You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@camel.apache.org by da...@apache.org on 2020/01/23 05:08:43 UTC

[camel] 02/05: CAMEL-14421: optimize core - ServiceSupport should not have instance logger. Use static logger instead.

This is an automated email from the ASF dual-hosted git repository.

davsclaus pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/camel.git

commit 2ae0cd8c24b35707d4cc40fd09310977cff57a68
Author: Claus Ibsen <cl...@gmail.com>
AuthorDate: Thu Jan 23 05:38:05 2020 +0100

    CAMEL-14421: optimize core - ServiceSupport should not have instance logger. Use static logger instead.
---
 .../apache/camel/component/ahc/ws/WsEndpoint.java  | 26 ++++---
 .../apache/camel/component/ahc/ws/WsProducer.java  |  7 +-
 .../apache/camel/component/ahc/AhcComponent.java   | 10 ++-
 .../apache/camel/component/ahc/AhcProducer.java    | 25 ++++---
 .../apache/camel/component/as2/AS2Component.java   |  8 ++-
 .../apache/camel/component/as2/AS2Consumer.java    |  6 +-
 .../camel/component/asterisk/AsteriskProducer.java |  6 +-
 .../camel/component/atmos/AtmosEndpoint.java       |  5 --
 .../atmosphere/websocket/WebsocketProducer.java    | 10 +--
 .../client/AbstractAtomixClientProducer.java       | 11 ++-
 .../atomix/client/map/AtomixMapConsumer.java       |  8 ++-
 .../client/messaging/AtomixMessagingConsumer.java  |  7 +-
 .../atomix/client/queue/AtomixQueueConsumer.java   |  6 +-
 .../atomix/client/set/AtomixSetConsumer.java       |  6 +-
 .../atomix/client/value/AtomixValueConsumer.java   |  6 +-
 .../atomix/cluster/AtomixClusterService.java       | 14 ++--
 .../atomix/cluster/AtomixClusterView.java          | 22 +++---
 .../apache/camel/component/aws/cw/CwProducer.java  |  6 +-
 .../camel/component/aws/ddb/DdbEndpoint.java       | 20 +++---
 .../component/aws/ddbstream/DdbStreamConsumer.java | 10 ++-
 .../camel/component/aws/ec2/EC2Producer.java       | 46 ++++++------
 .../camel/component/aws/ecs/ECSProducer.java       | 12 ++--
 .../camel/component/aws/eks/EKSProducer.java       | 11 +--
 .../camel/component/aws/iam/IAMProducer.java       | 29 ++++----
 .../aws/firehose/KinesisFirehoseProducer.java      |  8 ++-
 .../component/aws/kinesis/KinesisConsumer.java     | 17 +++--
 .../camel/component/aws/kms/KMSProducer.java       | 16 +++--
 .../camel/component/aws/lambda/LambdaProducer.java | 40 ++++++-----
 .../apache/camel/component/aws/mq/MQProducer.java  | 16 +++--
 .../camel/component/aws/msk/MSKProducer.java       | 12 ++--
 .../apache/camel/component/aws/s3/S3Consumer.java  | 34 +++++----
 .../apache/camel/component/aws/s3/S3Producer.java  |  2 +-
 .../camel/component/aws/ses/SesProducer.java       | 16 +++--
 .../camel/component/aws/sns/SnsEndpoint.java       | 16 +++--
 .../camel/component/aws/sns/SnsProducer.java       | 10 ++-
 .../camel/component/aws/sqs/SqsConsumer.java       | 42 ++++++-----
 .../camel/component/aws/sqs/SqsEndpoint.java       | 22 +++---
 .../camel/component/aws/sqs/SqsProducer.java       | 22 +++---
 .../component/aws/swf/SWFActivityProducer.java     |  4 +-
 .../component/aws/swf/SWFWorkflowProducer.java     |  4 +-
 .../component/aws/translate/TranslateProducer.java |  6 +-
 .../camel/component/aws/xray/XRayTracer.java       | 45 ++++++------
 .../aws2/translate/Translate2Producer.java         |  5 +-
 .../component/azure/blob/BlobServiceConsumer.java  |  1 -
 .../component/azure/blob/BlobServiceEndpoint.java  |  2 -
 .../component/azure/blob/BlobServiceProducer.java  | 28 ++++----
 .../azure/queue/QueueServiceConsumer.java          |  1 -
 .../azure/queue/QueueServiceEndpoint.java          |  2 -
 .../azure/queue/QueueServiceProducer.java          | 14 ++--
 .../dataformat/barcode/BarcodeDataFormat.java      | 14 ++--
 .../component/bean/AbstractBeanProcessor.java      |  8 ++-
 .../apache/camel/component/bean/BeanComponent.java |  8 ++-
 .../component/beanstalk/BeanstalkConsumer.java     | 16 +++--
 .../camel/blueprint/BlueprintCamelContext.java     | 34 +++++----
 .../apache/camel/component/box/BoxConsumer.java    |  8 ++-
 .../aggregate/CaffeineAggregationRepository.java   | 16 +++--
 .../camel/component/cbor/CBORDataFormat.java       | 13 ++--
 .../cdi/transaction/TransactionErrorHandler.java   | 54 +++++++-------
 .../org/apache/camel/component/cm/CMComponent.java |  1 -
 .../org/apache/camel/component/cm/CMProducer.java  | 23 +++---
 .../apache/camel/component/cmis/CMISConsumer.java  |  6 +-
 .../apache/camel/component/cmis/CMISProducer.java  | 10 ++-
 .../component/controlbus/ControlBusProducer.java   | 20 +++---
 .../component/couchbase/CouchbaseConsumer.java     | 41 ++++++-----
 .../component/couchbase/CouchbaseProducer.java     | 14 ++--
 .../camel/component/couchdb/CouchDbConsumer.java   |  8 ++-
 .../camel/component/couchdb/CouchDbProducer.java   | 16 +++--
 .../component/crypto/cms/CryptoCmsComponent.java   |  8 ++-
 .../camel/component/cxf/CxfBlueprintEndpoint.java  |  6 +-
 .../apache/camel/component/cxf/CxfComponent.java   |  6 +-
 .../apache/camel/component/cxf/CxfConsumer.java    | 26 ++++---
 .../apache/camel/component/cxf/CxfEndpoint.java    | 26 ++++---
 .../apache/camel/component/cxf/CxfProducer.java    | 32 +++++----
 .../component/dataset/DataSetTestEndpoint.java     | 10 ++-
 .../camel/component/direct/DirectProducer.java     |  8 ++-
 .../directvm/DirectVmBlockingProducer.java         |  8 ++-
 .../component/directvm/DirectVmProcessor.java      | 12 ++--
 .../camel/component/directvm/DirectVmProducer.java |  6 +-
 .../component/disruptor/DisruptorProducer.java     | 22 +++---
 .../docker/consumer/DockerEventsConsumer.java      | 10 ++-
 .../docker/producer/AsyncDockerProducer.java       | 63 +++++------------
 .../camel/component/dozer/DozerEndpoint.java       |  6 +-
 .../camel/component/dozer/DozerProducer.java       | 12 ++--
 .../camel/component/drill/DrillProducer.java       |  8 ++-
 .../integration/producer/DropboxDelProducer.java   |  1 -
 .../integration/producer/DropboxMoveProducer.java  |  2 -
 .../aggregate/EhcacheAggregationRepository.java    | 16 +++--
 .../elasticsearch/ElasticsearchProducer.java       | 11 ++-
 .../camel/component/elsql/ElsqlEndpoint.java       |  6 +-
 .../camel/component/elsql/ElsqlProducer.java       | 12 ++--
 .../component/eventadmin/EventAdminConsumer.java   |  6 +-
 .../camel/component/facebook/FacebookConsumer.java | 11 ++-
 .../camel/component/facebook/FacebookEndpoint.java |  9 ++-
 .../component/file/watch/FileWatchConsumer.java    | 10 ++-
 .../apache/camel/component/file/FileConsumer.java  | 31 ++++----
 .../apache/camel/component/file/FileEndpoint.java  | 16 +++--
 .../camel/component/file/GenericFileConsumer.java  | 52 +++++++-------
 .../camel/component/file/GenericFileEndpoint.java  | 24 +++----
 .../component/file/GenericFileOnCompletion.java    | 14 ++--
 .../component/file/GenericFilePollingConsumer.java | 32 +++++----
 .../camel/component/file/GenericFileProducer.java  | 37 +++++-----
 ...dempotentChangedRepositoryReadLockStrategy.java | 20 +++---
 ...IdempotentRenameRepositoryReadLockStrategy.java |  7 +-
 .../FileIdempotentRepositoryReadLockStrategy.java  | 15 ++--
 .../strategy/GenericFileDeleteProcessStrategy.java |  6 +-
 .../GenericFileProcessStrategySupport.java         |  9 ++-
 .../camel/component/file/remote/FtpConsumer.java   | 26 ++++---
 .../camel/component/file/remote/FtpEndpoint.java   | 11 ++-
 .../camel/component/file/remote/FtpsEndpoint.java  | 13 ++--
 .../component/file/remote/RemoteFileConsumer.java  | 49 +++++++------
 .../component/file/remote/RemoteFileEndpoint.java  | 12 ++--
 .../component/file/remote/RemoteFileProducer.java  | 29 ++++----
 .../camel/component/file/remote/SftpConsumer.java  | 24 ++++---
 .../camel/component/ganglia/GangliaProducer.java   | 12 ++--
 .../git/consumer/AbstractGitConsumer.java          |  6 +-
 .../camel/component/git/producer/GitProducer.java  | 54 +++++++-------
 .../google/bigquery/GoogleBigQueryProducer.java    | 18 +++--
 .../bigquery/sql/GoogleBigQuerySQLProducer.java    | 14 ++--
 .../stream/GoogleCalendarStreamConsumer.java       |  7 +-
 .../apache/camel/component/gora/GoraConsumer.java  |  2 -
 .../apache/camel/component/grpc/GrpcConsumer.java  | 14 ++--
 .../apache/camel/component/grpc/GrpcProducer.java  | 12 ++--
 .../guava/eventbus/GuavaEventBusConsumer.java      |  6 +-
 .../guava/eventbus/GuavaEventBusProducer.java      |  8 ++-
 .../hazelcast/queue/HazelcastQueueConsumer.java    |  5 +-
 .../hazelcast/seda/HazelcastSedaConsumer.java      | 18 +++--
 .../camel/component/hbase/HBaseConsumer.java       |  8 ++-
 .../idempotent/HBaseIdempotentRepository.java      | 12 ++--
 .../apache/camel/component/hdfs/HdfsConsumer.java  | 26 ++++---
 .../apache/camel/component/hdfs/HdfsProducer.java  | 37 +++++-----
 .../camel/component/hipchat/HipchatComponent.java  |  6 +-
 .../camel/component/hipchat/HipchatConsumer.java   | 11 ++-
 .../camel/component/hipchat/HipchatProducer.java   | 13 ++--
 .../apache/camel/component/http/HttpComponent.java | 10 ++-
 .../apache/camel/component/http/HttpEndpoint.java  |  8 ++-
 .../apache/camel/component/http/HttpProducer.java  | 17 +++--
 .../hystrix/metrics/HystrixEventStreamService.java | 12 ++--
 .../component/iec60870/AbstractIecComponent.java   | 17 +++--
 .../component/iec60870/client/ClientComponent.java |  2 -
 .../component/iec60870/client/ClientConsumer.java  |  8 ++-
 .../component/iec60870/server/ServerComponent.java |  2 -
 .../component/iec60870/server/ServerConsumer.java  |  6 +-
 .../cache/IgniteCacheContinuousQueryConsumer.java  | 16 +++--
 .../ignite/events/IgniteEventsConsumer.java        | 14 ++--
 .../ignite/events/IgniteEventsEndpoint.java        | 10 ++-
 .../ignite/idgen/IgniteIdGenEndpoint.java          |  6 +-
 .../ignite/messaging/IgniteMessagingConsumer.java  | 14 ++--
 .../component/infinispan/InfinispanComponent.java  |  7 +-
 .../component/infinispan/InfinispanConsumer.java   |  4 +-
 .../camel/component/influxdb/InfluxDbEndpoint.java |  6 +-
 .../camel/component/influxdb/InfluxDbProducer.java | 10 ++-
 .../apache/camel/component/irc/IrcComponent.java   | 24 ++++---
 .../apache/camel/component/irc/IrcConsumer.java    | 10 ++-
 .../apache/camel/component/irc/IrcEndpoint.java    | 16 +++--
 .../apache/camel/component/irc/IrcProducer.java    | 14 ++--
 .../camel/component/ironmq/IronMQConsumer.java     | 22 +++---
 .../camel/component/ironmq/IronMQEndpoint.java     |  6 +-
 .../camel/component/ironmq/IronMQProducer.java     |  8 ++-
 .../camel/component/jackson/JacksonDataFormat.java | 24 ++++---
 .../component/jacksonxml/JacksonXMLDataFormat.java | 14 ++--
 .../camel/converter/jaxb/JaxbDataFormat.java       | 22 +++---
 .../camel/component/jcache/JCacheConsumer.java     |  4 +-
 .../aggregate/JCacheAggregationRepository.java     | 22 +++---
 .../jclouds/JcloudsBlobStoreConsumer.java          |  1 -
 .../jclouds/JcloudsBlobStoreProducer.java          |  8 ++-
 .../apache/camel/component/jcr/JcrConsumer.java    | 26 ++++---
 .../apache/camel/component/jdbc/JdbcComponent.java |  6 +-
 .../apache/camel/component/jdbc/JdbcProducer.java  | 18 +++--
 .../camel/component/jetty/JettyHttpComponent.java  | 17 +++--
 .../component/jetty/async/MyAsyncProducer.java     | 13 ++--
 .../jgroups/raft/JGroupsRaftProducer.java          | 12 ++--
 .../camel/component/jgroups/JGroupsConsumer.java   |  8 ++-
 .../camel/component/jgroups/JGroupsEndpoint.java   | 10 ++-
 .../camel/component/jgroups/JGroupsProducer.java   | 12 ++--
 .../apache/camel/component/jms/JmsComponent.java   |  9 ++-
 .../apache/camel/component/jms/JmsConsumer.java    | 19 +++--
 .../apache/camel/component/jms/JmsEndpoint.java    | 20 +++---
 .../apache/camel/component/jms/JmsProducer.java    | 54 +++++++-------
 .../camel/component/jms/async/MyAsyncProducer.java | 14 ++--
 .../apache/camel/component/jmx/JMXConsumer.java    | 22 +++---
 .../apache/camel/component/jpa/JpaComponent.java   | 26 ++++---
 .../apache/camel/component/jpa/JpaConsumer.java    | 30 ++++----
 .../camel/component/jpa/JpaPollingConsumer.java    | 10 ++-
 .../apache/camel/component/jpa/JpaProducer.java    | 10 ++-
 .../idempotent/jpa/JpaMessageIdRepository.java     | 20 +++---
 .../apache/camel/component/scp/ScpEndpoint.java    |  2 +-
 .../component/jt400/Jt400DataQueueConsumer.java    | 12 ++--
 .../camel/component/kafka/KafkaConsumer.java       | 68 +++++++++---------
 .../camel/component/kafka/KafkaEndpoint.java       |  6 +-
 .../camel/component/kafka/KafkaProducer.java       | 24 ++++---
 .../config_maps/KubernetesConfigMapsConsumer.java  |  8 ++-
 .../config_maps/KubernetesConfigMapsProducer.java  | 16 +++--
 .../deployments/KubernetesDeploymentsConsumer.java |  8 ++-
 .../deployments/KubernetesDeploymentsProducer.java | 22 +++---
 .../kubernetes/hpa/KubernetesHPAConsumer.java      |  8 ++-
 .../kubernetes/hpa/KubernetesHPAProducer.java      | 20 +++---
 .../kubernetes/job/KubernetesJobProducer.java      | 20 +++---
 .../namespaces/KubernetesNamespacesConsumer.java   |  8 ++-
 .../kubernetes/nodes/KubernetesNodesConsumer.java  |  8 ++-
 .../kubernetes/nodes/KubernetesNodesProducer.java  | 12 ++--
 .../kubernetes/pods/KubernetesPodsConsumer.java    |  8 ++-
 .../KubernetesReplicationControllersConsumer.java  |  8 ++-
 .../services/KubernetesServicesConsumer.java       |  8 ++-
 .../OpenshiftBuildConfigsProducer.java             |  8 ++-
 .../openshift/builds/OpenshiftBuildsProducer.java  |  8 ++-
 .../camel/component/language/LanguageProducer.java |  6 +-
 .../apache/camel/component/ldap/LdapProducer.java  | 17 +++--
 .../apache/camel/component/ldif/LdifProducer.java  | 37 +++++-----
 .../leveldb/LevelDBAggregationRepository.java      | 46 ++++++------
 .../apache/camel/component/log/LogComponent.java   |  7 +-
 .../apache/camel/component/mail/MailConsumer.java  | 82 ++++++++++++----------
 .../apache/camel/component/mail/MailProducer.java  |  8 ++-
 .../camel/component/master/MasterConsumer.java     |  8 +--
 .../camel/component/metrics/GaugeProducer.java     |  9 ++-
 .../camel/component/metrics/HistogramProducer.java |  7 +-
 .../camel/component/metrics/MetricsComponent.java  | 14 ++--
 .../camel/component/metrics/TimerProducer.java     | 11 ++-
 .../component/micrometer/MicrometerComponent.java  |  6 +-
 .../camel/component/micrometer/TimerProducer.java  |  9 ++-
 .../component/milo/client/MiloClientComponent.java |  8 ++-
 .../component/milo/client/MiloClientConsumer.java  |  9 ++-
 .../component/milo/client/MiloClientProducer.java  |  2 -
 .../component/milo/server/MiloServerComponent.java | 15 ++--
 .../component/milo/server/MiloServerProducer.java  |  6 +-
 .../apache/camel/component/mllp/MllpEndpoint.java  |  2 -
 .../apache/camel/component/mock/MockEndpoint.java  | 40 ++++++-----
 .../component/mongodb/gridfs/GridFsEndpoint.java   | 10 ++-
 .../camel/component/mongodb/MongoDbEndpoint.java   | 15 ++--
 .../camel/component/mongodb/MongoDbProducer.java   | 16 +++--
 .../component/mybatis/MyBatisBeanProducer.java     | 18 +++--
 .../camel/component/mybatis/MyBatisConsumer.java   | 10 ++-
 .../component/nagios/NagiosEventNotifier.java      | 13 ++--
 .../camel/component/nagios/NagiosProducer.java     | 11 ++-
 .../apache/camel/component/nats/NatsConsumer.java  | 18 +++--
 .../apache/camel/component/nats/NatsProducer.java  | 19 ++---
 .../component/netty/http/NettyHttpComponent.java   | 10 ++-
 .../component/netty/http/NettyHttpEndpoint.java    | 10 ++-
 .../component/netty/http/NettyHttpProducer.java    |  8 ++-
 .../camel/component/netty/NettyComponent.java      |  6 +-
 .../camel/component/netty/NettyConsumer.java       | 12 ++--
 .../camel/component/netty/NettyProducer.java       | 78 ++++++++++----------
 .../camel/opentracing/OpenTracingTracer.java       | 31 ++++----
 .../component/optaplanner/OptaPlannerConsumer.java |  4 +-
 .../apache/camel/component/paho/PahoConsumer.java  | 22 +++---
 .../apache/camel/component/paho/PahoProducer.java  | 10 ++-
 .../component/paxlogging/PaxLoggingConsumer.java   |  8 ++-
 .../apache/camel/component/pdf/PdfProducer.java    | 11 ++-
 .../slot/PgReplicationSlotConsumer.java            | 12 ++--
 .../slot/PgReplicationSlotEndpoint.java            |  6 +-
 .../camel/component/pgevent/PgEventConsumer.java   |  8 ++-
 .../camel/component/pgevent/PgEventEndpoint.java   | 14 ++--
 .../camel/component/printer/PrinterProducer.java   |  9 ++-
 .../camel/component/pubnub/PubNubConsumer.java     |  9 ++-
 .../camel/component/pubnub/PubNubProducer.java     | 22 +++---
 .../camel/component/pulsar/PulsarProducer.java     |  8 ++-
 .../camel/component/quartz/QuartzComponent.java    | 36 +++++-----
 .../camel/component/quartz/QuartzEndpoint.java     | 29 ++++----
 .../QuartzScheduledPollConsumerScheduler.java      | 22 +++---
 .../component/quickfixj/QuickfixjComponent.java    |  8 ++-
 .../component/quickfixj/QuickfixjConsumer.java     |  6 +-
 .../component/quickfixj/QuickfixjEndpoint.java     |  8 ++-
 .../camel/component/quickfixj/QuickfixjEngine.java | 12 ++--
 .../component/quickfixj/QuickfixjProducer.java     |  8 ++-
 .../examples/trading/TradeExecutorComponent.java   |  2 +-
 .../camel/component/rabbitmq/RabbitConsumer.java   | 40 ++++++-----
 .../component/rabbitmq/RabbitMQComponent.java      | 12 ++--
 .../camel/component/rabbitmq/RabbitMQConsumer.java | 21 +++---
 .../camel/component/rabbitmq/RabbitMQProducer.java | 35 +++++----
 .../rabbitmq/reply/ReplyManagerSupport.java        | 39 +++++-----
 .../rabbitmq/reply/TemporaryQueueReplyManager.java | 14 ++--
 .../reactive/streams/ReactiveStreamsConsumer.java  |  6 +-
 .../apache/camel/component/rest/RestEndpoint.java  | 15 ++--
 .../component/salesforce/SalesforceComponent.java  | 15 ++--
 .../component/salesforce/SalesforceConsumer.java   | 16 +++--
 .../component/salesforce/SalesforceEndpoint.java   | 12 ++--
 .../component/salesforce/SalesforceProducer.java   |  6 +-
 .../component/sap/netweaver/NetWeaverProducer.java | 11 ++-
 .../camel/component/xquery/XQueryEndpoint.java     |  6 +-
 .../component/scheduler/SchedulerConsumer.java     |  8 ++-
 .../component/schematron/SchematronEndpoint.java   | 15 ++--
 .../component/schematron/SchematronProducer.java   | 20 ++----
 .../apache/camel/component/seda/SedaConsumer.java  | 36 +++++-----
 .../apache/camel/component/seda/SedaEndpoint.java  | 16 +++--
 .../apache/camel/component/seda/SedaProducer.java  | 33 +++++----
 .../camel/component/service/ServiceConsumer.java   |  4 +-
 .../camel/component/servlet/ServletComponent.java  |  6 +-
 .../apache/camel/component/sjms/SjmsConsumer.java  | 10 ++-
 .../apache/camel/component/sjms/SjmsProducer.java  | 34 +++++----
 .../component/sjms/batch/SjmsBatchConsumer.java    | 14 ++--
 .../component/sjms/producer/InOutProducer.java     | 22 +++---
 .../camel/component/slack/SlackConsumer.java       |  8 +--
 .../apache/camel/component/smpp/SmppConsumer.java  | 24 ++++---
 .../apache/camel/component/smpp/SmppProducer.java  | 24 ++++---
 .../apache/camel/component/snmp/SnmpEndpoint.java  |  7 +-
 .../apache/camel/component/snmp/SnmpOIDPoller.java | 26 ++++---
 .../apache/camel/component/snmp/SnmpProducer.java  | 12 ++--
 .../camel/component/snmp/SnmpTrapConsumer.java     | 32 +++++----
 .../camel/component/snmp/SnmpTrapProducer.java     | 16 +++--
 .../camel/dataformat/soap/SoapJaxbDataFormat.java  | 12 ++--
 .../apache/camel/component/solr/SolrComponent.java | 12 ++--
 .../component/SoroushBotAbstractConsumer.java      | 32 +++++----
 .../component/SoroushBotDownloadFileProducer.java  |  3 -
 .../component/SoroushBotMultiThreadConsumer.java   | 13 ++--
 .../component/SoroushBotSendMessageProducer.java   | 10 +--
 .../component/SoroushBotSingleThreadConsumer.java  | 15 ++--
 .../camel/component/sparkrest/SparkConsumer.java   |  8 ++-
 .../camel/component/splunk/SplunkConsumer.java     | 12 ++--
 .../camel/component/splunk/SplunkEndpoint.java     |  6 +-
 .../spring/batch/SpringBatchProducer.java          |  6 +-
 .../integration/SpringIntegrationConsumer.java     |  7 +-
 .../integration/SpringIntegrationProducer.java     | 11 ++-
 .../spring/ws/SpringWebserviceComponent.java       |  8 ++-
 .../spring/ws/SpringWebserviceProducer.java        | 10 ++-
 .../spring/spi/SpringManagementMBeanAssembler.java | 10 ++-
 .../camel/spring/spi/TransactionErrorHandler.java  | 52 +++++++-------
 .../camel/spring/StartAndStopEventNotifier.java    |  8 ++-
 .../apache/camel/component/sql/SqlComponent.java   |  6 +-
 .../apache/camel/component/sql/SqlConsumer.java    | 14 ++--
 .../apache/camel/component/sql/SqlProducer.java    | 11 ++-
 .../camel/component/stream/StreamConsumer.java     | 18 +++--
 .../camel/component/stream/StreamEndpoint.java     |  6 +-
 .../camel/component/stream/StreamProducer.java     | 22 +++---
 .../camel/component/telegram/TelegramConsumer.java | 14 ++--
 .../camel/component/telegram/TelegramProducer.java | 14 ++--
 .../apache/camel/test/junit4/CamelTestSupport.java | 38 +++++-----
 .../camel/test/junit4/LanguageTestSupport.java     |  6 +-
 .../camel/component/thrift/ThriftConsumer.java     | 20 +++---
 .../camel/component/thrift/ThriftProducer.java     | 20 +++---
 .../camel/component/timer/TimerConsumer.java       | 13 ++--
 .../directmessage/DirectMessageProducer.java       |  6 +-
 .../component/twitter/search/SearchProducer.java   |  8 ++-
 .../component/twitter/timeline/UserProducer.java   | 12 ++--
 .../camel/component/undertow/UndertowConsumer.java |  9 ++-
 .../camel/component/undertow/UndertowEndpoint.java |  6 +-
 .../camel/component/undertow/UndertowProducer.java | 12 ++--
 .../camel/component/vertx/VertxComponent.java      | 20 +++---
 .../camel/component/vertx/VertxConsumer.java       | 18 +++--
 .../camel/component/vertx/VertxProducer.java       | 10 ++-
 .../camel/component/weather/WeatherConsumer.java   | 10 ++-
 .../camel/component/weather/WeatherProducer.java   |  8 ++-
 .../camel/component/webhook/WebhookEndpoint.java   |  8 ++-
 .../component/websocket/WebsocketProducer.java     | 16 +++--
 .../apache/camel/component/xmpp/XmppComponent.java |  8 ++-
 .../apache/camel/component/xmpp/XmppConsumer.java  | 28 ++++----
 .../apache/camel/component/xmpp/XmppEndpoint.java  | 18 +++--
 .../component/xslt/saxon/XsltSaxonEndpoint.java    | 11 ++-
 .../apache/camel/component/xslt/XsltComponent.java |  6 +-
 .../apache/camel/component/xslt/XsltEndpoint.java  | 15 ++--
 .../zookeepermaster/policy/MasterRoutePolicy.java  | 14 ++--
 .../component/zookeeper/ZooKeeperConsumer.java     | 24 ++++---
 .../component/zookeeper/ZooKeeperProducer.java     | 56 ++++++++-------
 351 files changed, 3274 insertions(+), 2092 deletions(-)

diff --git a/components/camel-ahc-ws/src/main/java/org/apache/camel/component/ahc/ws/WsEndpoint.java b/components/camel-ahc-ws/src/main/java/org/apache/camel/component/ahc/ws/WsEndpoint.java
index 27f66d8..184ed1c 100644
--- a/components/camel-ahc-ws/src/main/java/org/apache/camel/component/ahc/ws/WsEndpoint.java
+++ b/components/camel-ahc-ws/src/main/java/org/apache/camel/component/ahc/ws/WsEndpoint.java
@@ -33,6 +33,8 @@ import org.asynchttpclient.DefaultAsyncHttpClientConfig;
 import org.asynchttpclient.ws.WebSocket;
 import org.asynchttpclient.ws.WebSocketListener;
 import org.asynchttpclient.ws.WebSocketUpgradeHandler;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * To exchange data with external Websocket servers using <a href="http://github.com/sonatype/async-http-client">Async Http Client</a>.
@@ -41,6 +43,8 @@ import org.asynchttpclient.ws.WebSocketUpgradeHandler;
         syntax = "ahc-ws:httpUri", label = "websocket")
 public class WsEndpoint extends AhcEndpoint {
 
+    private static final Logger LOG = LoggerFactory.getLogger(WsEndpoint.class);
+
     private final Set<WsConsumer> consumers = new HashSet<>();
     private final WsListener listener = new WsListener();
     private transient WebSocket websocket;
@@ -120,7 +124,7 @@ public class WsEndpoint extends AhcEndpoint {
     public void connect() throws Exception {
         String uri = getHttpUri().toASCIIString();
 
-        log.debug("Connecting to {}", uri);
+        LOG.debug("Connecting to {}", uri);
         websocket = getClient().prepareGet(uri).execute(
             new WebSocketUpgradeHandler.Builder()
                 .addWebSocketListener(listener).build()).get();
@@ -129,8 +133,8 @@ public class WsEndpoint extends AhcEndpoint {
     @Override
     protected void doStop() throws Exception {
         if (websocket != null && websocket.isOpen()) {
-            if (log.isDebugEnabled()) {
-                log.debug("Disconnecting from {}", getHttpUri().toASCIIString());
+            if (LOG.isDebugEnabled()) {
+                LOG.debug("Disconnecting from {}", getHttpUri().toASCIIString());
             }
             websocket.removeWebSocketListener(listener);
             websocket.sendCloseFrame();
@@ -151,7 +155,7 @@ public class WsEndpoint extends AhcEndpoint {
     void reConnect() throws Exception {
         if (websocket == null || !websocket.isOpen()) {
             String uri = getHttpUri().toASCIIString();
-            log.info("Reconnecting websocket: {}", uri);
+            LOG.info("Reconnecting websocket: {}", uri);
             connect();
         }
     }
@@ -160,16 +164,16 @@ public class WsEndpoint extends AhcEndpoint {
 
         @Override
         public void onOpen(WebSocket websocket) {
-            log.debug("Websocket opened");
+            LOG.debug("Websocket opened");
         }
 
         @Override
         public void onClose(WebSocket websocket, int code, String reason) {
-            log.debug("websocket closed - reconnecting");
+            LOG.debug("websocket closed - reconnecting");
             try {
                 reConnect();
             } catch (Exception e) {
-                log.warn("Error re-connecting to websocket", e);
+                LOG.warn("Error re-connecting to websocket", e);
                 ExceptionHandler exceptionHandler = getExceptionHandler();
                 if (exceptionHandler != null) {
                     exceptionHandler.handleException("Error re-connecting to websocket", e);
@@ -179,7 +183,7 @@ public class WsEndpoint extends AhcEndpoint {
 
         @Override
         public void onError(Throwable t) {
-            log.debug("websocket on error", t);
+            LOG.debug("websocket on error", t);
             if (isSendMessageOnError()) {
                 for (WsConsumer consumer : consumers) {
                     consumer.sendMessage(t);
@@ -189,7 +193,7 @@ public class WsEndpoint extends AhcEndpoint {
 
         @Override
         public void onBinaryFrame(byte[] message, boolean finalFragment, int rsv) {
-            log.debug("Received message --> {}", message);
+            LOG.debug("Received message --> {}", message);
             for (WsConsumer consumer : consumers) {
                 consumer.sendMessage(message);
             }
@@ -197,7 +201,7 @@ public class WsEndpoint extends AhcEndpoint {
 
         @Override
         public void onTextFrame(String message, boolean finalFragment, int rsv) {
-            log.debug("Received message --> {}", message);
+            LOG.debug("Received message --> {}", message);
             for (WsConsumer consumer : consumers) {
                 consumer.sendMessage(message);
             }
@@ -205,7 +209,7 @@ public class WsEndpoint extends AhcEndpoint {
 
         @Override
         public void onPingFrame(byte[] payload) {
-            log.debug("Received ping --> {}", payload);
+            LOG.debug("Received ping --> {}", payload);
             websocket.sendPongFrame(payload);
         }
     }
diff --git a/components/camel-ahc-ws/src/main/java/org/apache/camel/component/ahc/ws/WsProducer.java b/components/camel-ahc-ws/src/main/java/org/apache/camel/component/ahc/ws/WsProducer.java
index 716fa81..812b9d0 100644
--- a/components/camel-ahc-ws/src/main/java/org/apache/camel/component/ahc/ws/WsProducer.java
+++ b/components/camel-ahc-ws/src/main/java/org/apache/camel/component/ahc/ws/WsProducer.java
@@ -23,11 +23,16 @@ import org.apache.camel.Exchange;
 import org.apache.camel.Message;
 import org.apache.camel.support.DefaultProducer;
 import org.asynchttpclient.ws.WebSocket;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  *
  */
 public class WsProducer extends DefaultProducer {
+
+    private static final Logger LOG = LoggerFactory.getLogger(WsProducer.class);
+
     private static final int DEFAULT_STREAM_BUFFER_SIZE = 127;
     
     private int streamBufferSize = DEFAULT_STREAM_BUFFER_SIZE;
@@ -46,7 +51,7 @@ public class WsProducer extends DefaultProducer {
         Message in = exchange.getIn();
         Object message = in.getBody();
         if (message != null) {
-            log.debug("Sending out {}", message);
+            LOG.debug("Sending out {}", message);
             if (message instanceof String) {
                 sendMessage(getWebSocket(), (String)message, getEndpoint().isUseStreaming());
             } else if (message instanceof byte[]) {
diff --git a/components/camel-ahc/src/main/java/org/apache/camel/component/ahc/AhcComponent.java b/components/camel-ahc/src/main/java/org/apache/camel/component/ahc/AhcComponent.java
index 5cfd740..09083c8 100644
--- a/components/camel-ahc/src/main/java/org/apache/camel/component/ahc/AhcComponent.java
+++ b/components/camel-ahc/src/main/java/org/apache/camel/component/ahc/AhcComponent.java
@@ -37,13 +37,17 @@ import org.asynchttpclient.Realm;
 import org.asynchttpclient.Realm.Builder;
 import org.asynchttpclient.cookie.CookieStore;
 import org.asynchttpclient.cookie.ThreadSafeCookieStore;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  *  To call external HTTP services using <a href="http://github.com/sonatype/async-http-client">Async Http Client</a>
  */
 @Component("ahc")
 public class AhcComponent extends HeaderFilterStrategyComponent implements SSLContextParametersAware {
-    
+
+    private static final Logger LOG = LoggerFactory.getLogger(AhcComponent.class);
+
     private static final String CLIENT_CONFIG_PREFIX = "clientConfig.";
     private static final String CLIENT_REALM_CONFIG_PREFIX = "clientConfig.realm.";
 
@@ -88,12 +92,12 @@ public class AhcComponent extends HeaderFilterStrategyComponent implements SSLCo
                     ? new DefaultAsyncHttpClientConfig.Builder() : AhcComponent.cloneConfig(endpoint.getClientConfig());
             
             if (endpoint.getClient() != null) {
-                log.warn("The user explicitly set an AsyncHttpClient instance on the component or "
+                LOG.warn("The user explicitly set an AsyncHttpClient instance on the component or "
                          + "endpoint, but this endpoint URI contains client configuration parameters.  "
                          + "Are you sure that this is what was intended?  The AsyncHttpClient will be used"
                          + " and the URI parameters will be ignored.");
             } else if (endpoint.getClientConfig() != null) {
-                log.warn("The user explicitly set an AsyncHttpClientConfig instance on the component or "
+                LOG.warn("The user explicitly set an AsyncHttpClientConfig instance on the component or "
                          + "endpoint, but this endpoint URI contains client configuration parameters.  "
                          + "Are you sure that this is what was intended?  The URI parameters will be applied"
                          + " to a clone of the supplied AsyncHttpClientConfig in order to prevent unintended modification"
diff --git a/components/camel-ahc/src/main/java/org/apache/camel/component/ahc/AhcProducer.java b/components/camel-ahc/src/main/java/org/apache/camel/component/ahc/AhcProducer.java
index 265e6d1..3e5e783 100644
--- a/components/camel-ahc/src/main/java/org/apache/camel/component/ahc/AhcProducer.java
+++ b/components/camel-ahc/src/main/java/org/apache/camel/component/ahc/AhcProducer.java
@@ -27,12 +27,15 @@ import org.asynchttpclient.AsyncHttpClient;
 import org.asynchttpclient.HttpResponseBodyPart;
 import org.asynchttpclient.HttpResponseStatus;
 import org.asynchttpclient.Request;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  *
  */
 public class AhcProducer extends DefaultAsyncProducer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(AhcProducer.class);
     private final AsyncHttpClient client;
 
     public AhcProducer(AhcEndpoint endpoint) {
@@ -50,7 +53,7 @@ public class AhcProducer extends DefaultAsyncProducer {
         try {
             // AHC supports async processing
             Request request = getEndpoint().getBinding().prepareRequest(getEndpoint(), exchange);
-            log.debug("Executing request {}", request);
+            LOG.debug("Executing request {}", request);
             client.executeRequest(request, new AhcAsyncHandler(exchange, callback, request.getUrl(), getEndpoint().getBufferSize()));
             return false;
         } catch (Exception e) {
@@ -82,8 +85,8 @@ public class AhcProducer extends DefaultAsyncProducer {
 
         @Override
         public void onThrowable(Throwable t) {
-            if (log.isTraceEnabled()) {
-                log.trace("{} onThrowable {}", exchange.getExchangeId(), t);
+            if (LOG.isTraceEnabled()) {
+                LOG.trace("{} onThrowable {}", exchange.getExchangeId(), t);
             }
             try {
                 getEndpoint().getBinding().onThrowable(getEndpoint(), exchange, t);
@@ -96,8 +99,8 @@ public class AhcProducer extends DefaultAsyncProducer {
 
         @Override
         public Exchange onCompleted() throws Exception {
-            if (log.isTraceEnabled()) {
-                log.trace("{} onCompleted", exchange.getExchangeId());
+            if (LOG.isTraceEnabled()) {
+                LOG.trace("{} onCompleted", exchange.getExchangeId());
             }
             try {
                 getEndpoint().getBinding().onComplete(getEndpoint(), exchange, url, os, contentLength, statusCode, statusText);
@@ -120,8 +123,8 @@ public class AhcProducer extends DefaultAsyncProducer {
             throws Exception {
             // write body parts to stream, which we will bind to the Camel Exchange in onComplete
             os.write(bodyPart.getBodyPartBytes());
-            if (log.isTraceEnabled()) {
-                log.trace("{} onBodyPartReceived {} bytes", exchange.getExchangeId(), bodyPart.length());
+            if (LOG.isTraceEnabled()) {
+                LOG.trace("{} onBodyPartReceived {} bytes", exchange.getExchangeId(), bodyPart.length());
             }
             contentLength += bodyPart.length();
             return State.CONTINUE;
@@ -130,8 +133,8 @@ public class AhcProducer extends DefaultAsyncProducer {
         @Override
         public State onStatusReceived(HttpResponseStatus responseStatus)
             throws Exception {
-            if (log.isTraceEnabled()) {
-                log.trace("{} onStatusReceived {}", exchange.getExchangeId(), responseStatus);
+            if (LOG.isTraceEnabled()) {
+                LOG.trace("{} onStatusReceived {}", exchange.getExchangeId(), responseStatus);
             }
             try {
                 statusCode = responseStatus.getStatusCode();
@@ -145,8 +148,8 @@ public class AhcProducer extends DefaultAsyncProducer {
 
         @Override
         public State onHeadersReceived(HttpHeaders headers) throws Exception {
-            if (log.isTraceEnabled()) {
-                log.trace("{} onHeadersReceived {}", exchange.getExchangeId(), headers);
+            if (LOG.isTraceEnabled()) {
+                LOG.trace("{} onHeadersReceived {}", exchange.getExchangeId(), headers);
             }
             try {
                 getEndpoint().getBinding().onHeadersReceived(getEndpoint(), exchange, headers);
diff --git a/components/camel-as2/camel-as2-component/src/main/java/org/apache/camel/component/as2/AS2Component.java b/components/camel-as2/camel-as2-component/src/main/java/org/apache/camel/component/as2/AS2Component.java
index 3c85b34..48ca689 100644
--- a/components/camel-as2/camel-as2-component/src/main/java/org/apache/camel/component/as2/AS2Component.java
+++ b/components/camel-as2/camel-as2-component/src/main/java/org/apache/camel/component/as2/AS2Component.java
@@ -25,13 +25,17 @@ import org.apache.camel.component.as2.internal.AS2ApiName;
 import org.apache.camel.spi.annotations.Component;
 import org.apache.camel.support.component.AbstractApiComponent;
 import org.bouncycastle.jce.provider.BouncyCastleProvider;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Represents the component that manages {@link AS2Endpoint}.
  */
 @Component("as2")
 public class AS2Component extends AbstractApiComponent<AS2ApiName, AS2Configuration, AS2ApiCollection> {
-    
+
+    private static final Logger LOG = LoggerFactory.getLogger(AS2Component.class);
+
     public AS2Component() {
         super(AS2Endpoint.class, AS2ApiName.class, AS2ApiCollection.getCollection());
     }
@@ -65,7 +69,7 @@ public class AS2Component extends AbstractApiComponent<AS2ApiName, AS2Configurat
     protected void doStart() throws Exception {
         super.doStart();
         if (Security.getProvider("BC") == null) {
-            log.debug("Adding BouncyCastleProvider as security provider");
+            LOG.debug("Adding BouncyCastleProvider as security provider");
             Security.addProvider(new BouncyCastleProvider());
         }
     }
diff --git a/components/camel-as2/camel-as2-component/src/main/java/org/apache/camel/component/as2/AS2Consumer.java b/components/camel-as2/camel-as2-component/src/main/java/org/apache/camel/component/as2/AS2Consumer.java
index 4d57c9f..f918f3d 100644
--- a/components/camel-as2/camel-as2-component/src/main/java/org/apache/camel/component/as2/AS2Consumer.java
+++ b/components/camel-as2/camel-as2-component/src/main/java/org/apache/camel/component/as2/AS2Consumer.java
@@ -41,12 +41,16 @@ import org.apache.http.HttpResponse;
 import org.apache.http.protocol.HttpContext;
 import org.apache.http.protocol.HttpCoreContext;
 import org.apache.http.protocol.HttpRequestHandler;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * The AS2 consumer.
  */
 public class AS2Consumer extends AbstractApiConsumer<AS2ApiName, AS2Configuration> implements HttpRequestHandler {
 
+    private static final Logger LOG = LoggerFactory.getLogger(AS2Consumer.class);
+
     private static final String HANDLER_PROPERTY = "handler";
     private static final String REQUEST_URI_PROPERTY = "requestUri";
 
@@ -127,7 +131,7 @@ public class AS2Consumer extends AbstractApiConsumer<AS2ApiName, AS2Configuratio
                 exception = exchange.getException();
             }
         } catch (Exception e) {
-            log.info("Failed to process AS2 message", e);
+            LOG.info("Failed to process AS2 message", e);
             exception = e;
         }
         
diff --git a/components/camel-asterisk/src/main/java/org/apache/camel/component/asterisk/AsteriskProducer.java b/components/camel-asterisk/src/main/java/org/apache/camel/component/asterisk/AsteriskProducer.java
index d6673bb..b346f6d 100644
--- a/components/camel-asterisk/src/main/java/org/apache/camel/component/asterisk/AsteriskProducer.java
+++ b/components/camel-asterisk/src/main/java/org/apache/camel/component/asterisk/AsteriskProducer.java
@@ -25,12 +25,16 @@ import org.asteriskjava.manager.AuthenticationFailedException;
 import org.asteriskjava.manager.TimeoutException;
 import org.asteriskjava.manager.action.ManagerAction;
 import org.asteriskjava.manager.response.ManagerResponse;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * The Asterisk producer.
  */
 public class AsteriskProducer extends DefaultProducer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(AsteriskProducer.class);
+
     private final AsteriskEndpoint endpoint;
     private final AsteriskConnection connection;
 
@@ -67,7 +71,7 @@ public class AsteriskProducer extends DefaultProducer {
         // Action must be set
         ObjectHelper.notNull(action, "action");
 
-        log.debug("Send action {}", action);
+        LOG.debug("Send action {}", action);
 
         ManagerAction managerAction = action.apply(exchange);
         ManagerResponse managerResponse = connection.sendAction(managerAction);
diff --git a/components/camel-atmos/src/main/java/org/apache/camel/component/atmos/AtmosEndpoint.java b/components/camel-atmos/src/main/java/org/apache/camel/component/atmos/AtmosEndpoint.java
index a9c0e1c..61a74fb 100644
--- a/components/camel-atmos/src/main/java/org/apache/camel/component/atmos/AtmosEndpoint.java
+++ b/components/camel-atmos/src/main/java/org/apache/camel/component/atmos/AtmosEndpoint.java
@@ -65,8 +65,6 @@ public class AtmosEndpoint extends DefaultEndpoint {
      */
     @Override
     public Producer createProducer() throws Exception {
-        log.debug("resolve producer atmos endpoint {{}}", configuration.getOperation());
-        log.debug("resolve producer atmos attached client: {}", configuration.getClient());
         if (configuration.getOperation() == AtmosOperation.put) {
             return new AtmosPutProducer(this, configuration);
         } else if (this.configuration.getOperation() == AtmosOperation.del) {
@@ -88,9 +86,6 @@ public class AtmosEndpoint extends DefaultEndpoint {
      */
     @Override
     public Consumer createConsumer(Processor processor) throws Exception {
-        log.debug("resolve consumer atmos endpoint {{}}", configuration.getOperation());
-        log.debug("resolve consumer atmos attached client:{}", configuration.getClient());
-
         AtmosScheduledPollConsumer consumer;
         if (this.configuration.getOperation() == AtmosOperation.get) {
             consumer = new AtmosScheduledPollGetConsumer(this, processor, configuration);
diff --git a/components/camel-atmosphere-websocket/src/main/java/org/apache/camel/component/atmosphere/websocket/WebsocketProducer.java b/components/camel-atmosphere-websocket/src/main/java/org/apache/camel/component/atmosphere/websocket/WebsocketProducer.java
index a8a0203..ff9d664 100644
--- a/components/camel-atmosphere-websocket/src/main/java/org/apache/camel/component/atmosphere/websocket/WebsocketProducer.java
+++ b/components/camel-atmosphere-websocket/src/main/java/org/apache/camel/component/atmosphere/websocket/WebsocketProducer.java
@@ -75,9 +75,9 @@ public class WebsocketProducer extends DefaultProducer {
             message = in.getBody(byte[].class);
         }
 
-        log.debug("Sending to {}", message);
+        LOG.debug("Sending to {}", message);
         if (getEndpoint().isSendToAll()) {
-            log.debug("Sending to all -> {}", message);
+            LOG.debug("Sending to all -> {}", message);
             //TODO consider using atmosphere's broadcast or a more configurable async send
             for (final WebSocket websocket : getEndpoint().getWebSocketStore().getAllWebSockets()) {
                 sendMessage(websocket, message);
@@ -99,12 +99,12 @@ public class WebsocketProducer extends DefaultProducer {
         List<String> notValidConnectionKeys = new ArrayList<>();
 
         for (final String connectionKey : connectionKeyList) {
-            log.debug("Sending to connection key {} -> {}", connectionKey, message);
+            LOG.debug("Sending to connection key {} -> {}", connectionKey, message);
             sendMessage(getWebSocket(connectionKey, notValidConnectionKeys), message);
         }
 
         if (!notValidConnectionKeys.isEmpty()) {
-            log.debug("Some connections have not received the message {}",  message);
+            LOG.debug("Some connections have not received the message {}",  message);
             getEndpoint().getWebsocketConsumer().sendNotDeliveredMessage(notValidConnectionKeys, message);
         }
     }
@@ -140,7 +140,7 @@ public class WebsocketProducer extends DefaultProducer {
             if (websocket == null) {
                 //collect for call back to handle not sent message(s) to guaranty delivery
                 notValidConnectionKeys.add(connectionKey);
-                log.debug("Failed to send message to single connection; connetion key is not valid. {}",  connectionKey);
+                LOG.debug("Failed to send message to single connection; connetion key is not valid. {}",  connectionKey);
             }
         }
         return websocket;
diff --git a/components/camel-atomix/src/main/java/org/apache/camel/component/atomix/client/AbstractAtomixClientProducer.java b/components/camel-atomix/src/main/java/org/apache/camel/component/atomix/client/AbstractAtomixClientProducer.java
index c2c1978..47086dd 100644
--- a/components/camel-atomix/src/main/java/org/apache/camel/component/atomix/client/AbstractAtomixClientProducer.java
+++ b/components/camel-atomix/src/main/java/org/apache/camel/component/atomix/client/AbstractAtomixClientProducer.java
@@ -31,6 +31,9 @@ import org.apache.camel.component.atomix.AtomixAsyncMessageProcessor;
 import org.apache.camel.spi.InvokeOnHeader;
 import org.apache.camel.support.DefaultAsyncProducer;
 import org.apache.camel.util.ObjectHelper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 
 import static org.apache.camel.component.atomix.client.AtomixClientConstants.RESOURCE_ACTION_HAS_RESULT;
 import static org.apache.camel.component.atomix.client.AtomixClientConstants.RESOURCE_NAME;
@@ -38,6 +41,8 @@ import static org.apache.camel.support.ObjectHelper.invokeMethodSafe;
 
 public abstract class AbstractAtomixClientProducer<E extends AbstractAtomixClientEndpoint, R extends Resource> extends DefaultAsyncProducer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(AbstractAtomixClientProducer.class);
+
     private final Map<String, AtomixAsyncMessageProcessor> processors;
     private ConcurrentMap<String, R> resources;
 
@@ -133,8 +138,10 @@ public abstract class AbstractAtomixClientProducer<E extends AbstractAtomixClien
                 throw new IllegalArgumentException("Second argument should be of type AsyncCallback");
             }
 
-            log.debug("bind key={}, class={}, method={}",
-                annotation.value(), this.getClass(), method.getName());
+            if (LOG.isDebugEnabled()) {
+                LOG.debug("bind key={}, class={}, method={}",
+                        annotation.value(), this.getClass(), method.getName());
+            }
 
             this.processors.put(annotation.value(), (m, c) -> (boolean)invokeMethodSafe(method, this, m, c));
         } else {
diff --git a/components/camel-atomix/src/main/java/org/apache/camel/component/atomix/client/map/AtomixMapConsumer.java b/components/camel-atomix/src/main/java/org/apache/camel/component/atomix/client/map/AtomixMapConsumer.java
index 429699e..abf16c9 100644
--- a/components/camel-atomix/src/main/java/org/apache/camel/component/atomix/client/map/AtomixMapConsumer.java
+++ b/components/camel-atomix/src/main/java/org/apache/camel/component/atomix/client/map/AtomixMapConsumer.java
@@ -25,9 +25,13 @@ import org.apache.camel.Exchange;
 import org.apache.camel.Processor;
 import org.apache.camel.component.atomix.client.AbstractAtomixClientConsumer;
 import org.apache.camel.component.atomix.client.AtomixClientConstants;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public final class AtomixMapConsumer extends AbstractAtomixClientConsumer<AtomixMapEndpoint> {
 
+    private static final Logger LOG = LoggerFactory.getLogger(AtomixMapConsumer.class);
+
     private final List<Listener<DistributedMap.EntryEvent<Object, Object>>> listeners;
     private final String resourceName;
     private final String resultHeader;
@@ -55,12 +59,12 @@ public final class AtomixMapConsumer extends AbstractAtomixClientConsumer<Atomix
 
         Object key = getAtomixEndpoint().getConfiguration().getKey();
         if (key == null) {
-            log.debug("Subscribe to events for map: {}", resourceName);
+            LOG.debug("Subscribe to events for map: {}", resourceName);
             this.listeners.add(this.map.onAdd(this::onEvent).join());
             this.listeners.add(this.map.onRemove(this::onEvent).join());
             this.listeners.add(this.map.onUpdate(this::onEvent).join());
         } else {
-            log.debug("Subscribe to events for map: {}, key: {}", resourceName, key);
+            LOG.debug("Subscribe to events for map: {}, key: {}", resourceName, key);
             this.listeners.add(this.map.onAdd(key, this::onEvent).join());
             this.listeners.add(this.map.onRemove(key, this::onEvent).join());
             this.listeners.add(this.map.onUpdate(key, this::onEvent).join());
diff --git a/components/camel-atomix/src/main/java/org/apache/camel/component/atomix/client/messaging/AtomixMessagingConsumer.java b/components/camel-atomix/src/main/java/org/apache/camel/component/atomix/client/messaging/AtomixMessagingConsumer.java
index 34590ab..f4beedd 100644
--- a/components/camel-atomix/src/main/java/org/apache/camel/component/atomix/client/messaging/AtomixMessagingConsumer.java
+++ b/components/camel-atomix/src/main/java/org/apache/camel/component/atomix/client/messaging/AtomixMessagingConsumer.java
@@ -29,6 +29,9 @@ import org.apache.camel.Processor;
 import org.apache.camel.component.atomix.client.AbstractAtomixClientConsumer;
 import org.apache.camel.component.atomix.client.AtomixClientConstants;
 import org.apache.camel.util.ObjectHelper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 
 import static org.apache.camel.component.atomix.client.AtomixClientConstants.CHANNEL_NAME;
 import static org.apache.camel.component.atomix.client.AtomixClientConstants.MEMBER_NAME;
@@ -36,6 +39,8 @@ import static org.apache.camel.component.atomix.client.AtomixClientConstants.RES
 
 public final class AtomixMessagingConsumer extends AbstractAtomixClientConsumer<AtomixMessagingEndpoint> {
 
+    private static final Logger LOG = LoggerFactory.getLogger(AtomixMessagingConsumer.class);
+
     private final List<Listener<Message<Object>>> listeners;
     private final String resultHeader;
     private final String groupName;
@@ -71,7 +76,7 @@ public final class AtomixMessagingConsumer extends AbstractAtomixClientConsumer<
         this.localMember = group.join(memberName).join();
         this.consumer = localMember.messaging().consumer(channelName);
 
-        log.debug("Subscribe to group: {}, member: {}, channel: {}", groupName, memberName, channelName);
+        LOG.debug("Subscribe to group: {}, member: {}, channel: {}", groupName, memberName, channelName);
         this.listeners.add(consumer.onMessage(this::onMessage));
     }
 
diff --git a/components/camel-atomix/src/main/java/org/apache/camel/component/atomix/client/queue/AtomixQueueConsumer.java b/components/camel-atomix/src/main/java/org/apache/camel/component/atomix/client/queue/AtomixQueueConsumer.java
index aa19c44..88e2315 100644
--- a/components/camel-atomix/src/main/java/org/apache/camel/component/atomix/client/queue/AtomixQueueConsumer.java
+++ b/components/camel-atomix/src/main/java/org/apache/camel/component/atomix/client/queue/AtomixQueueConsumer.java
@@ -25,9 +25,13 @@ import org.apache.camel.Exchange;
 import org.apache.camel.Processor;
 import org.apache.camel.component.atomix.client.AbstractAtomixClientConsumer;
 import org.apache.camel.component.atomix.client.AtomixClientConstants;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public final class AtomixQueueConsumer extends AbstractAtomixClientConsumer<AtomixQueueEndpoint> {
 
+    private static final Logger LOG = LoggerFactory.getLogger(AtomixQueueConsumer.class);
+
     private final List<Listener<DistributedQueue.ValueEvent<Object>>> listeners;
     private final String resourceName;
     private final String resultHeader;
@@ -53,7 +57,7 @@ public final class AtomixQueueConsumer extends AbstractAtomixClientConsumer<Atom
             .join();
 
 
-        log.debug("Subscribe to events for queue: {}", resourceName);
+        LOG.debug("Subscribe to events for queue: {}", resourceName);
         this.listeners.add(this.queue.onAdd(this::onEvent).join());
         this.listeners.add(this.queue.onRemove(this::onEvent).join());
     }
diff --git a/components/camel-atomix/src/main/java/org/apache/camel/component/atomix/client/set/AtomixSetConsumer.java b/components/camel-atomix/src/main/java/org/apache/camel/component/atomix/client/set/AtomixSetConsumer.java
index 9687258..1942d63 100644
--- a/components/camel-atomix/src/main/java/org/apache/camel/component/atomix/client/set/AtomixSetConsumer.java
+++ b/components/camel-atomix/src/main/java/org/apache/camel/component/atomix/client/set/AtomixSetConsumer.java
@@ -25,9 +25,13 @@ import org.apache.camel.Exchange;
 import org.apache.camel.Processor;
 import org.apache.camel.component.atomix.client.AbstractAtomixClientConsumer;
 import org.apache.camel.component.atomix.client.AtomixClientConstants;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public final class AtomixSetConsumer extends AbstractAtomixClientConsumer<AtomixSetEndpoint> {
 
+    private static final Logger LOG = LoggerFactory.getLogger(AtomixSetConsumer.class);
+
     private final List<Listener<DistributedSet.ValueEvent<Object>>> listeners;
     private final String resourceName;
     private final String resultHeader;
@@ -53,7 +57,7 @@ public final class AtomixSetConsumer extends AbstractAtomixClientConsumer<Atomix
             .join();
 
 
-        log.debug("Subscribe to events for set: {}", resourceName);
+        LOG.debug("Subscribe to events for set: {}", resourceName);
         this.listeners.add(this.set.onAdd(this::onEvent).join());
         this.listeners.add(this.set.onRemove(this::onEvent).join());
     }
diff --git a/components/camel-atomix/src/main/java/org/apache/camel/component/atomix/client/value/AtomixValueConsumer.java b/components/camel-atomix/src/main/java/org/apache/camel/component/atomix/client/value/AtomixValueConsumer.java
index d39d691..5347929 100644
--- a/components/camel-atomix/src/main/java/org/apache/camel/component/atomix/client/value/AtomixValueConsumer.java
+++ b/components/camel-atomix/src/main/java/org/apache/camel/component/atomix/client/value/AtomixValueConsumer.java
@@ -25,9 +25,13 @@ import org.apache.camel.Exchange;
 import org.apache.camel.Processor;
 import org.apache.camel.component.atomix.client.AbstractAtomixClientConsumer;
 import org.apache.camel.component.atomix.client.AtomixClientConstants;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public final class AtomixValueConsumer extends AbstractAtomixClientConsumer<AtomixValueEndpoint> {
 
+    private static final Logger LOG = LoggerFactory.getLogger(AtomixValueConsumer.class);
+
     private final List<Listener<DistributedValue.ChangeEvent<Object>>> listeners;
     private final String resourceName;
     private final String resultHeader;
@@ -53,7 +57,7 @@ public final class AtomixValueConsumer extends AbstractAtomixClientConsumer<Atom
             .join();
 
 
-        log.debug("Subscribe to events for value: {}", resourceName);
+        LOG.debug("Subscribe to events for value: {}", resourceName);
         this.listeners.add(this.value.onChange(this::onEvent).join());
     }
 
diff --git a/components/camel-atomix/src/main/java/org/apache/camel/component/atomix/cluster/AtomixClusterService.java b/components/camel-atomix/src/main/java/org/apache/camel/component/atomix/cluster/AtomixClusterService.java
index a959d9f..e3acabf 100644
--- a/components/camel-atomix/src/main/java/org/apache/camel/component/atomix/cluster/AtomixClusterService.java
+++ b/components/camel-atomix/src/main/java/org/apache/camel/component/atomix/cluster/AtomixClusterService.java
@@ -24,9 +24,13 @@ import io.atomix.copycat.server.storage.StorageLevel;
 import org.apache.camel.CamelContext;
 import org.apache.camel.support.cluster.AbstractCamelClusterService;
 import org.apache.camel.util.ObjectHelper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public final class AtomixClusterService extends AbstractCamelClusterService<AtomixClusterView> {
 
+    private static final Logger LOG = LoggerFactory.getLogger(AtomixClusterService.class);
+
     private Address address;
     private AtomixClusterConfiguration configuration;
     private AtomixReplica atomix;
@@ -140,7 +144,7 @@ public final class AtomixClusterService extends AbstractCamelClusterService<Atom
         super.doStop();
 
         if (atomix != null) {
-            log.debug("Leaving atomix cluster replica {}", atomix);
+            LOG.debug("Leaving atomix cluster replica {}", atomix);
             atomix.leave().join();
         }
     }
@@ -160,13 +164,13 @@ public final class AtomixClusterService extends AbstractCamelClusterService<Atom
             atomix = AtomixClusterHelper.createReplica(getCamelContext(), address, configuration);
 
             if (ObjectHelper.isNotEmpty(configuration.getNodes())) {
-                log.debug("Bootstrap cluster on address {} for nodes: {}", address, configuration.getNodes());
+                LOG.debug("Bootstrap cluster on address {} for nodes: {}", address, configuration.getNodes());
                 this.atomix.bootstrap(configuration.getNodes()).join();
-                log.debug("Bootstrap cluster done");
+                LOG.debug("Bootstrap cluster done");
             } else {
-                log.debug("Bootstrap cluster on address {}", address);
+                LOG.debug("Bootstrap cluster on address {}", address);
                 this.atomix.bootstrap().join();
-                log.debug("Bootstrap cluster done");
+                LOG.debug("Bootstrap cluster done");
             }
         }
 
diff --git a/components/camel-atomix/src/main/java/org/apache/camel/component/atomix/cluster/AtomixClusterView.java b/components/camel-atomix/src/main/java/org/apache/camel/component/atomix/cluster/AtomixClusterView.java
index 9ed1027..eb97a9d 100644
--- a/components/camel-atomix/src/main/java/org/apache/camel/component/atomix/cluster/AtomixClusterView.java
+++ b/components/camel-atomix/src/main/java/org/apache/camel/component/atomix/cluster/AtomixClusterView.java
@@ -31,9 +31,13 @@ import org.apache.camel.cluster.CamelClusterService;
 import org.apache.camel.component.atomix.AtomixConfiguration;
 import org.apache.camel.support.cluster.AbstractCamelClusterView;
 import org.apache.camel.util.ObjectHelper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 final class AtomixClusterView extends AbstractCamelClusterView {
 
+    private static final Logger LOG = LoggerFactory.getLogger(AtomixClusterView.class);
+
     private final Atomix atomix;
     private final AtomixLocalMember localMember;
     private final AtomixConfiguration<?> configuration;
@@ -81,7 +85,7 @@ final class AtomixClusterView extends AbstractCamelClusterView {
     @Override
     protected void doStart() throws Exception {
         if (!localMember.hasJoined()) {
-            log.debug("Get group {}", getNamespace());
+            LOG.debug("Get group {}", getNamespace());
 
             group = this.atomix.getGroup(
                 getNamespace(),
@@ -89,28 +93,28 @@ final class AtomixClusterView extends AbstractCamelClusterView {
                 new DistributedGroup.Options(configuration.getResourceOptions(getNamespace()))
             ).get();
 
-            log.debug("Listen election events");
+            LOG.debug("Listen election events");
             group.election().onElection(term -> {
                 if (isRunAllowed()) {
                     fireLeadershipChangedEvent(Optional.of(toClusterMember(term.leader())));
                 }
             });
 
-            log.debug("Listen join events");
+            LOG.debug("Listen join events");
             group.onJoin(member -> {
                 if (isRunAllowed()) {
                     fireMemberAddedEvent(toClusterMember(member));
                 }
             });
 
-            log.debug("Listen leave events");
+            LOG.debug("Listen leave events");
             group.onLeave(member -> {
                 if (isRunAllowed()) {
                     fireMemberRemovedEvent(toClusterMember(member));
                 }
             });
 
-            log.debug("Join group {}", getNamespace());
+            LOG.debug("Join group {}", getNamespace());
             localMember.join();
         }
     }
@@ -175,11 +179,11 @@ final class AtomixClusterView extends AbstractCamelClusterView {
             if (member == null && group != null) {
                 String id = getClusterService().getId();
                 if (ObjectHelper.isEmpty(id) || configuration.isEphemeral()) {
-                    log.debug("Joining group: {}", group);
+                    LOG.debug("Joining group: {}", group);
                     member = group.join().join();
-                    log.debug("Group {} joined with id {}", group, member.id());
+                    LOG.debug("Group {} joined with id {}", group, member.id());
                 } else {
-                    log.debug("Joining group: {}, with id: {}", group, id);
+                    LOG.debug("Joining group: {}, with id: {}", group, id);
                     member = group.join(id).join();
                 }
             }
@@ -191,7 +195,7 @@ final class AtomixClusterView extends AbstractCamelClusterView {
             if (member != null) {
                 String id = member.id();
 
-                log.debug("Member {} : leave group {}", id, group);
+                LOG.debug("Member {} : leave group {}", id, group);
 
                 member.leave().join();
                 group.remove(id).join();
diff --git a/components/camel-aws-cw/src/main/java/org/apache/camel/component/aws/cw/CwProducer.java b/components/camel-aws-cw/src/main/java/org/apache/camel/component/aws/cw/CwProducer.java
index 28b6ae0..340dd69 100644
--- a/components/camel-aws-cw/src/main/java/org/apache/camel/component/aws/cw/CwProducer.java
+++ b/components/camel-aws-cw/src/main/java/org/apache/camel/component/aws/cw/CwProducer.java
@@ -32,12 +32,16 @@ import org.apache.camel.Exchange;
 import org.apache.camel.support.DefaultProducer;
 import org.apache.camel.util.CastUtils;
 import org.apache.camel.util.URISupport;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A Producer which sends messages to the AWS CloudWatch Service
  */
 public class CwProducer extends DefaultProducer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(CwProducer.class);
+
     private transient String cwProducerToString;
     
     public CwProducer(Endpoint endpoint) {
@@ -52,7 +56,7 @@ public class CwProducer extends DefaultProducer {
                 .withMetricData(metricData)
                 .withNamespace(determineNameSpace(exchange));
 
-        log.info("Sending request [{}] from exchange [{}]...", request, exchange);
+        LOG.info("Sending request [{}] from exchange [{}]...", request, exchange);
         getEndpoint().getCloudWatchClient().putMetricData(request);
     }
 
diff --git a/components/camel-aws-ddb/src/main/java/org/apache/camel/component/aws/ddb/DdbEndpoint.java b/components/camel-aws-ddb/src/main/java/org/apache/camel/component/aws/ddb/DdbEndpoint.java
index d691148..c1ccc4e 100644
--- a/components/camel-aws-ddb/src/main/java/org/apache/camel/component/aws/ddb/DdbEndpoint.java
+++ b/components/camel-aws-ddb/src/main/java/org/apache/camel/component/aws/ddb/DdbEndpoint.java
@@ -40,6 +40,8 @@ import org.apache.camel.spi.UriEndpoint;
 import org.apache.camel.spi.UriParam;
 import org.apache.camel.support.ScheduledPollEndpoint;
 import org.apache.camel.util.ObjectHelper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * The aws-ddb component is used for storing and retrieving data from Amazon's DynamoDB service.
@@ -47,6 +49,8 @@ import org.apache.camel.util.ObjectHelper;
 @UriEndpoint(firstVersion = "2.10.0", scheme = "aws-ddb", title = "AWS DynamoDB", syntax = "aws-ddb:tableName", producerOnly = true, label = "cloud,database,nosql")
 public class DdbEndpoint extends ScheduledPollEndpoint {
 
+    private static final Logger LOG = LoggerFactory.getLogger(DdbEndpoint.class);
+
     @UriParam
     private DdbConfiguration configuration;
 
@@ -75,7 +79,7 @@ public class DdbEndpoint extends ScheduledPollEndpoint {
             : createDdbClient();
         
         String tableName = getConfiguration().getTableName();
-        log.trace("Querying whether table [{}] already exists...", tableName);
+        LOG.trace("Querying whether table [{}] already exists...", tableName);
 
         try {
             DescribeTableRequest request = new DescribeTableRequest().withTableName(tableName);
@@ -84,17 +88,17 @@ public class DdbEndpoint extends ScheduledPollEndpoint {
                 waitForTableToBecomeAvailable(tableName);
             }
 
-            log.trace("Table [{}] already exists", tableName);
+            LOG.trace("Table [{}] already exists", tableName);
             return;
         } catch (ResourceNotFoundException e) {
-            log.trace("Table [{}] doesn't exist yet", tableName);
-            log.trace("Creating table [{}]...", tableName);
+            LOG.trace("Table [{}] doesn't exist yet", tableName);
+            LOG.trace("Creating table [{}]...", tableName);
             TableDescription tableDescription = createTable(tableName);
             if (!isTableActive(tableDescription)) {
                 waitForTableToBecomeAvailable(tableName);
             }
 
-            log.trace("Table [{}] created", tableName);
+            LOG.trace("Table [{}] created", tableName);
         }
     }
     
@@ -163,7 +167,7 @@ public class DdbEndpoint extends ScheduledPollEndpoint {
     }
 
     private void waitForTableToBecomeAvailable(String tableName) {
-        log.trace("Waiting for [{}] to become ACTIVE...", tableName);
+        LOG.trace("Waiting for [{}] to become ACTIVE...", tableName);
 
         long waitTime = 5 * 60 * 1000;
         while (waitTime > 0) {
@@ -176,10 +180,10 @@ public class DdbEndpoint extends ScheduledPollEndpoint {
                 DescribeTableRequest request = new DescribeTableRequest().withTableName(tableName);
                 TableDescription tableDescription = getDdbClient().describeTable(request).getTable();
                 if (isTableActive(tableDescription)) {
-                    log.trace("Table [{}] became active", tableName);
+                    LOG.trace("Table [{}] became active", tableName);
                     return;
                 }
-                log.trace("Table [{}] not active yet", tableName);
+                LOG.trace("Table [{}] not active yet", tableName);
             } catch (AmazonServiceException ase) {
                 if (!ase.getErrorCode().equalsIgnoreCase("ResourceNotFoundException")) {
                     throw ase;
diff --git a/components/camel-aws-ddb/src/main/java/org/apache/camel/component/aws/ddbstream/DdbStreamConsumer.java b/components/camel-aws-ddb/src/main/java/org/apache/camel/component/aws/ddbstream/DdbStreamConsumer.java
index 899e45e..6831e3a 100644
--- a/components/camel-aws-ddb/src/main/java/org/apache/camel/component/aws/ddbstream/DdbStreamConsumer.java
+++ b/components/camel-aws-ddb/src/main/java/org/apache/camel/component/aws/ddbstream/DdbStreamConsumer.java
@@ -32,9 +32,13 @@ import org.apache.camel.Processor;
 import org.apache.camel.support.ScheduledBatchPollingConsumer;
 import org.apache.camel.util.CastUtils;
 import org.apache.camel.util.ObjectHelper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class DdbStreamConsumer extends ScheduledBatchPollingConsumer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(DdbStreamConsumer.class);
+
     private final ShardIteratorHandler shardIteratorHandler;
     private String lastSeenSequenceNumber;
 
@@ -56,7 +60,7 @@ public class DdbStreamConsumer extends ScheduledBatchPollingConsumer {
                         .withLimit(getEndpoint().getConfiguration().getMaxResultsPerRequest());
             result = getClient().getRecords(req);
         } catch (ExpiredIteratorException e) {
-            log.warn("Expired Shard Iterator, attempting to resume from {}", lastSeenSequenceNumber, e);
+            LOG.warn("Expired Shard Iterator, attempting to resume from {}", lastSeenSequenceNumber, e);
             GetRecordsRequest req = new GetRecordsRequest()
                         .withShardIterator(shardIteratorHandler.getShardIterator(lastSeenSequenceNumber))
                         .withLimit(getEndpoint().getConfiguration().getMaxResultsPerRequest());
@@ -81,11 +85,11 @@ public class DdbStreamConsumer extends ScheduledBatchPollingConsumer {
         while (!exchanges.isEmpty()) {
             final Exchange exchange = ObjectHelper.cast(Exchange.class, exchanges.poll());
 
-            log.trace("Processing exchange [{}] started.", exchange);
+            LOG.trace("Processing exchange [{}] started.", exchange);
             getAsyncProcessor().process(exchange, new AsyncCallback() {
                 @Override
                 public void done(boolean doneSync) {
-                    log.trace("Processing exchange [{}] done.", exchange);
+                    LOG.trace("Processing exchange [{}] done.", exchange);
                 }
             });
             processedExchanges++;
diff --git a/components/camel-aws-ec2/src/main/java/org/apache/camel/component/aws/ec2/EC2Producer.java b/components/camel-aws-ec2/src/main/java/org/apache/camel/component/aws/ec2/EC2Producer.java
index 371b2d0..0f74a12 100644
--- a/components/camel-aws-ec2/src/main/java/org/apache/camel/component/aws/ec2/EC2Producer.java
+++ b/components/camel-aws-ec2/src/main/java/org/apache/camel/component/aws/ec2/EC2Producer.java
@@ -50,13 +50,17 @@ import org.apache.camel.Message;
 import org.apache.camel.support.DefaultProducer;
 import org.apache.camel.util.ObjectHelper;
 import org.apache.camel.util.URISupport;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A Producer which sends messages to the Amazon EC2 Service
  * <a href="http://aws.amazon.com/ec2/">AWS EC2</a>
  */
 public class EC2Producer extends DefaultProducer {
-    
+
+    private static final Logger LOG = LoggerFactory.getLogger(EC2Producer.class);
+
     private transient String ec2ProducerToString;
 
     public EC2Producer(Endpoint endpoint) {
@@ -193,10 +197,10 @@ public class EC2Producer extends DefaultProducer {
         try {
             result = ec2Client.runInstances(request);
         } catch (AmazonServiceException ase) {
-            log.trace("Run Instances command returned the error code {}", ase.getErrorCode());
+            LOG.trace("Run Instances command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
-        log.trace("Creating and running instances with ami [{}] and instance type {}", ami, instanceType);
+        LOG.trace("Creating and running instances with ami [{}] and instance type {}", ami, instanceType);
         Message message = getMessageForResponse(exchange);
         message.setBody(result);
     }
@@ -214,10 +218,10 @@ public class EC2Producer extends DefaultProducer {
         try {
             result = ec2Client.startInstances(request);
         } catch (AmazonServiceException ase) {
-            log.trace("Start Instances command returned the error code {}", ase.getErrorCode());
+            LOG.trace("Start Instances command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
-        log.trace("Starting instances with Ids [{}] ", Arrays.toString(instanceIds.toArray()));
+        LOG.trace("Starting instances with Ids [{}] ", Arrays.toString(instanceIds.toArray()));
         Message message = getMessageForResponse(exchange);
         message.setBody(result);        
     }
@@ -235,10 +239,10 @@ public class EC2Producer extends DefaultProducer {
         try {
             result = ec2Client.stopInstances(request);
         } catch (AmazonServiceException ase) {
-            log.trace("Stop Instances command returned the error code {}", ase.getErrorCode());
+            LOG.trace("Stop Instances command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
-        log.trace("Stopping instances with Ids [{}] ", Arrays.toString(instanceIds.toArray()));
+        LOG.trace("Stopping instances with Ids [{}] ", Arrays.toString(instanceIds.toArray()));
         Message message = getMessageForResponse(exchange);
         message.setBody(result);        
     }
@@ -256,10 +260,10 @@ public class EC2Producer extends DefaultProducer {
         try {
             result = ec2Client.terminateInstances(request);
         } catch (AmazonServiceException ase) {
-            log.trace("Terminate Instances command returned the error code {}", ase.getErrorCode());
+            LOG.trace("Terminate Instances command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
-        log.trace("Terminating instances with Ids [{}] ", Arrays.toString(instanceIds.toArray()));
+        LOG.trace("Terminating instances with Ids [{}] ", Arrays.toString(instanceIds.toArray()));
         Message message = getMessageForResponse(exchange);
         message.setBody(result);        
     }
@@ -275,7 +279,7 @@ public class EC2Producer extends DefaultProducer {
         try {
             result = ec2Client.describeInstances(request);
         } catch (AmazonServiceException ase) {
-            log.trace("Describe Instances command returned the error code {}", ase.getErrorCode());
+            LOG.trace("Describe Instances command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
@@ -293,7 +297,7 @@ public class EC2Producer extends DefaultProducer {
         try {
             result = ec2Client.describeInstanceStatus(request);
         } catch (AmazonServiceException ase) {
-            log.trace("Describe Instances Status command returned the error code {}", ase.getErrorCode());
+            LOG.trace("Describe Instances Status command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
@@ -310,10 +314,10 @@ public class EC2Producer extends DefaultProducer {
             throw new IllegalArgumentException("Instances Ids must be specified");
         }
         try {
-            log.trace("Rebooting instances with Ids [{}] ", Arrays.toString(instanceIds.toArray()));
+            LOG.trace("Rebooting instances with Ids [{}] ", Arrays.toString(instanceIds.toArray()));
             ec2Client.rebootInstances(request);
         } catch (AmazonServiceException ase) {
-            log.trace("Reboot Instances command returned the error code {}", ase.getErrorCode());
+            LOG.trace("Reboot Instances command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
     }
@@ -331,10 +335,10 @@ public class EC2Producer extends DefaultProducer {
         try {
             result = ec2Client.monitorInstances(request);
         } catch (AmazonServiceException ase) {
-            log.trace("Monitor Instances command returned the error code {}", ase.getErrorCode());
+            LOG.trace("Monitor Instances command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
-        log.trace("Start Monitoring instances with Ids [{}] ", Arrays.toString(instanceIds.toArray()));
+        LOG.trace("Start Monitoring instances with Ids [{}] ", Arrays.toString(instanceIds.toArray()));
         Message message = getMessageForResponse(exchange);
         message.setBody(result); 
     }
@@ -352,10 +356,10 @@ public class EC2Producer extends DefaultProducer {
         try {
             result = ec2Client.unmonitorInstances(request);
         } catch (AmazonServiceException ase) {
-            log.trace("Unmonitor Instances command returned the error code {}", ase.getErrorCode());
+            LOG.trace("Unmonitor Instances command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
-        log.trace("Stop Monitoring instances with Ids [{}] ", Arrays.toString(instanceIds.toArray()));
+        LOG.trace("Stop Monitoring instances with Ids [{}] ", Arrays.toString(instanceIds.toArray()));
         Message message = getMessageForResponse(exchange);
         message.setBody(result); 
     }
@@ -380,10 +384,10 @@ public class EC2Producer extends DefaultProducer {
         try {
             result = ec2Client.createTags(request);
         } catch (AmazonServiceException ase) {
-            log.trace("Create tags command returned the error code {}", ase.getErrorCode());
+            LOG.trace("Create tags command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
-        log.trace("Created tags [{}] on resources with Ids [{}] ", Arrays.toString(tags.toArray()), Arrays.toString(instanceIds.toArray()));
+        LOG.trace("Created tags [{}] on resources with Ids [{}] ", Arrays.toString(tags.toArray()), Arrays.toString(instanceIds.toArray()));
         Message message = getMessageForResponse(exchange);
         message.setBody(result); 
     }
@@ -408,10 +412,10 @@ public class EC2Producer extends DefaultProducer {
         try {
             result = ec2Client.deleteTags(request);
         } catch (AmazonServiceException ase) {
-            log.trace("Delete tags command returned the error code {}", ase.getErrorCode());
+            LOG.trace("Delete tags command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
-        log.trace("Delete tags [{}] on resources with Ids [{}] ", Arrays.toString(tags.toArray()), Arrays.toString(instanceIds.toArray()));
+        LOG.trace("Delete tags [{}] on resources with Ids [{}] ", Arrays.toString(tags.toArray()), Arrays.toString(instanceIds.toArray()));
         Message message = getMessageForResponse(exchange);
         message.setBody(result); 
     }
diff --git a/components/camel-aws-ecs/src/main/java/org/apache/camel/component/aws/ecs/ECSProducer.java b/components/camel-aws-ecs/src/main/java/org/apache/camel/component/aws/ecs/ECSProducer.java
index deff48e..1a983df 100644
--- a/components/camel-aws-ecs/src/main/java/org/apache/camel/component/aws/ecs/ECSProducer.java
+++ b/components/camel-aws-ecs/src/main/java/org/apache/camel/component/aws/ecs/ECSProducer.java
@@ -32,6 +32,8 @@ import org.apache.camel.Message;
 import org.apache.camel.support.DefaultProducer;
 import org.apache.camel.util.ObjectHelper;
 import org.apache.camel.util.URISupport;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A Producer which sends messages to the Amazon ECS Service
@@ -39,6 +41,8 @@ import org.apache.camel.util.URISupport;
  */
 public class ECSProducer extends DefaultProducer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(ECSProducer.class);
+
     private transient String ecsProducerToString;
 
     public ECSProducer(Endpoint endpoint) {
@@ -100,7 +104,7 @@ public class ECSProducer extends DefaultProducer {
         try {
             result = ecsClient.listClusters();
         } catch (AmazonServiceException ase) {
-            log.trace("List Clusters command returned the error code {}", ase.getErrorCode());
+            LOG.trace("List Clusters command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
@@ -117,7 +121,7 @@ public class ECSProducer extends DefaultProducer {
         try {
             result = ecsClient.createCluster(request);
         } catch (AmazonServiceException ase) {
-            log.trace("Create Cluster command returned the error code {}", ase.getErrorCode());
+            LOG.trace("Create Cluster command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
@@ -134,7 +138,7 @@ public class ECSProducer extends DefaultProducer {
         try {
             result = ecsClient.describeClusters(request);
         } catch (AmazonServiceException ase) {
-            log.trace("Describe Clusters command returned the error code {}", ase.getErrorCode());
+            LOG.trace("Describe Clusters command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
@@ -153,7 +157,7 @@ public class ECSProducer extends DefaultProducer {
         try {
             result = ecsClient.deleteCluster(request);
         } catch (AmazonServiceException ase) {
-            log.trace("Delete Cluster command returned the error code {}", ase.getErrorCode());
+            LOG.trace("Delete Cluster command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
diff --git a/components/camel-aws-eks/src/main/java/org/apache/camel/component/aws/eks/EKSProducer.java b/components/camel-aws-eks/src/main/java/org/apache/camel/component/aws/eks/EKSProducer.java
index b501ab3..a1e5dcf 100644
--- a/components/camel-aws-eks/src/main/java/org/apache/camel/component/aws/eks/EKSProducer.java
+++ b/components/camel-aws-eks/src/main/java/org/apache/camel/component/aws/eks/EKSProducer.java
@@ -33,6 +33,8 @@ import org.apache.camel.Message;
 import org.apache.camel.support.DefaultProducer;
 import org.apache.camel.util.ObjectHelper;
 import org.apache.camel.util.URISupport;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A Producer which sends messages to the Amazon EKS Service
@@ -40,6 +42,7 @@ import org.apache.camel.util.URISupport;
  */
 public class EKSProducer extends DefaultProducer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(EKSProducer.class);
     private transient String eksProducerToString;
 
     public EKSProducer(Endpoint endpoint) {
@@ -101,7 +104,7 @@ public class EKSProducer extends DefaultProducer {
         try {
             result = eksClient.listClusters(request);
         } catch (AmazonServiceException ase) {
-            log.trace("List Clusters command returned the error code {}", ase.getErrorCode());
+            LOG.trace("List Clusters command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
@@ -126,7 +129,7 @@ public class EKSProducer extends DefaultProducer {
         try {
             result = eksClient.createCluster(request);
         } catch (AmazonServiceException ase) {
-            log.trace("Create Cluster command returned the error code {}", ase.getErrorCode());
+            LOG.trace("Create Cluster command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
@@ -145,7 +148,7 @@ public class EKSProducer extends DefaultProducer {
         try {
             result = eksClient.describeCluster(request);
         } catch (AmazonServiceException ase) {
-            log.trace("Describe Cluster command returned the error code {}", ase.getErrorCode());
+            LOG.trace("Describe Cluster command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
@@ -164,7 +167,7 @@ public class EKSProducer extends DefaultProducer {
         try {
             result = eksClient.deleteCluster(request);
         } catch (AmazonServiceException ase) {
-            log.trace("Delete Cluster command returned the error code {}", ase.getErrorCode());
+            LOG.trace("Delete Cluster command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
diff --git a/components/camel-aws-iam/src/main/java/org/apache/camel/component/aws/iam/IAMProducer.java b/components/camel-aws-iam/src/main/java/org/apache/camel/component/aws/iam/IAMProducer.java
index f62ff92..b1708fa 100644
--- a/components/camel-aws-iam/src/main/java/org/apache/camel/component/aws/iam/IAMProducer.java
+++ b/components/camel-aws-iam/src/main/java/org/apache/camel/component/aws/iam/IAMProducer.java
@@ -48,6 +48,8 @@ import org.apache.camel.Message;
 import org.apache.camel.support.DefaultProducer;
 import org.apache.camel.util.ObjectHelper;
 import org.apache.camel.util.URISupport;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A Producer which sends messages to the Amazon IAM Service
@@ -55,6 +57,7 @@ import org.apache.camel.util.URISupport;
  */
 public class IAMProducer extends DefaultProducer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(IAMProducer.class);
     private transient String iamProducerToString;
 
     public IAMProducer(Endpoint endpoint) {
@@ -138,7 +141,7 @@ public class IAMProducer extends DefaultProducer {
         try {
             result = iamClient.listAccessKeys();
         } catch (AmazonServiceException ase) {
-            log.trace("List Access Keys command returned the error code {}", ase.getErrorCode());
+            LOG.trace("List Access Keys command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
@@ -157,7 +160,7 @@ public class IAMProducer extends DefaultProducer {
         try {
             result = iamClient.createUser(request);
         } catch (AmazonServiceException ase) {
-            log.trace("Create user command returned the error code {}", ase.getErrorCode());
+            LOG.trace("Create user command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
@@ -176,7 +179,7 @@ public class IAMProducer extends DefaultProducer {
         try {
             result = iamClient.deleteUser(request);
         } catch (AmazonServiceException ase) {
-            log.trace("Delete user command returned the error code {}", ase.getErrorCode());
+            LOG.trace("Delete user command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
@@ -195,7 +198,7 @@ public class IAMProducer extends DefaultProducer {
         try {
             result = iamClient.getUser(request);
         } catch (AmazonServiceException ase) {
-            log.trace("get user command returned the error code {}", ase.getErrorCode());
+            LOG.trace("get user command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
@@ -207,7 +210,7 @@ public class IAMProducer extends DefaultProducer {
         try {
             result = iamClient.listUsers();
         } catch (AmazonServiceException ase) {
-            log.trace("List users command returned the error code {}", ase.getErrorCode());
+            LOG.trace("List users command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
@@ -224,7 +227,7 @@ public class IAMProducer extends DefaultProducer {
         try {
             result = iamClient.createAccessKey(request);
         } catch (AmazonServiceException ase) {
-            log.trace("Create Access Key command returned the error code {}", ase.getErrorCode());
+            LOG.trace("Create Access Key command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
@@ -247,7 +250,7 @@ public class IAMProducer extends DefaultProducer {
         try {
             result = iamClient.deleteAccessKey(request);
         } catch (AmazonServiceException ase) {
-            log.trace("Delete Access Key command returned the error code {}", ase.getErrorCode());
+            LOG.trace("Delete Access Key command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
@@ -276,7 +279,7 @@ public class IAMProducer extends DefaultProducer {
         try {
             result = iamClient.updateAccessKey(request);
         } catch (AmazonServiceException ase) {
-            log.trace("Update Access Key command returned the error code {}", ase.getErrorCode());
+            LOG.trace("Update Access Key command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
@@ -299,7 +302,7 @@ public class IAMProducer extends DefaultProducer {
         try {
             result = iamClient.createGroup(request);
         } catch (AmazonServiceException ase) {
-            log.trace("Create Group command returned the error code {}", ase.getErrorCode());
+            LOG.trace("Create Group command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
@@ -318,7 +321,7 @@ public class IAMProducer extends DefaultProducer {
         try {
             result = iamClient.deleteGroup(request);
         } catch (AmazonServiceException ase) {
-            log.trace("Delete Group command returned the error code {}", ase.getErrorCode());
+            LOG.trace("Delete Group command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
@@ -330,7 +333,7 @@ public class IAMProducer extends DefaultProducer {
         try {
             result = iamClient.listGroups();
         } catch (AmazonServiceException ase) {
-            log.trace("List Groups command returned the error code {}", ase.getErrorCode());
+            LOG.trace("List Groups command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
@@ -355,7 +358,7 @@ public class IAMProducer extends DefaultProducer {
         try {
             result = iamClient.addUserToGroup(request);
         } catch (AmazonServiceException ase) {
-            log.trace("Add User To Group command returned the error code {}", ase.getErrorCode());
+            LOG.trace("Add User To Group command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
@@ -380,7 +383,7 @@ public class IAMProducer extends DefaultProducer {
         try {
             result = iamClient.removeUserFromGroup(request);
         } catch (AmazonServiceException ase) {
-            log.trace("Remove User From Group command returned the error code {}", ase.getErrorCode());
+            LOG.trace("Remove User From Group command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
diff --git a/components/camel-aws-kinesis/src/main/java/org/apache/camel/component/aws/firehose/KinesisFirehoseProducer.java b/components/camel-aws-kinesis/src/main/java/org/apache/camel/component/aws/firehose/KinesisFirehoseProducer.java
index 9ebec00..deece9a 100644
--- a/components/camel-aws-kinesis/src/main/java/org/apache/camel/component/aws/firehose/KinesisFirehoseProducer.java
+++ b/components/camel-aws-kinesis/src/main/java/org/apache/camel/component/aws/firehose/KinesisFirehoseProducer.java
@@ -24,9 +24,13 @@ import com.amazonaws.services.kinesisfirehose.model.Record;
 import org.apache.camel.Exchange;
 import org.apache.camel.Message;
 import org.apache.camel.support.DefaultProducer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class KinesisFirehoseProducer extends DefaultProducer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(KinesisFirehoseProducer.class);
+
     public KinesisFirehoseProducer(KinesisFirehoseEndpoint endpoint) {
         super(endpoint);
     }
@@ -39,9 +43,9 @@ public class KinesisFirehoseProducer extends DefaultProducer {
     @Override
     public void process(Exchange exchange) throws Exception {
         PutRecordRequest request = createRequest(exchange);
-        log.trace("Sending request [{}] from exchange [{}]...", request, exchange);
+        LOG.trace("Sending request [{}] from exchange [{}]...", request, exchange);
         PutRecordResult putRecordResult = getEndpoint().getClient().putRecord(request);
-        log.trace("Received result [{}]", putRecordResult);
+        LOG.trace("Received result [{}]", putRecordResult);
         Message message = getMessageForResponse(exchange);
         message.setHeader(KinesisFirehoseConstants.RECORD_ID, putRecordResult.getRecordId());
     }
diff --git a/components/camel-aws-kinesis/src/main/java/org/apache/camel/component/aws/kinesis/KinesisConsumer.java b/components/camel-aws-kinesis/src/main/java/org/apache/camel/component/aws/kinesis/KinesisConsumer.java
index 2a500ff..9a47456 100644
--- a/components/camel-aws-kinesis/src/main/java/org/apache/camel/component/aws/kinesis/KinesisConsumer.java
+++ b/components/camel-aws-kinesis/src/main/java/org/apache/camel/component/aws/kinesis/KinesisConsumer.java
@@ -17,7 +17,6 @@
 package org.apache.camel.component.aws.kinesis;
 
 import java.util.ArrayDeque;
-import java.util.Iterator;
 import java.util.List;
 import java.util.Queue;
 
@@ -37,9 +36,13 @@ import org.apache.camel.Processor;
 import org.apache.camel.support.ScheduledBatchPollingConsumer;
 import org.apache.camel.util.CastUtils;
 import org.apache.camel.util.ObjectHelper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class KinesisConsumer extends ScheduledBatchPollingConsumer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(KinesisConsumer.class);
+
     private String currentShardIterator;
     private boolean isShardClosed;
 
@@ -64,12 +67,12 @@ public class KinesisConsumer extends ScheduledBatchPollingConsumer {
         if (isShardClosed) {
             switch (getEndpoint().getConfiguration().getShardClosed()) {
             case ignore:
-                log.warn("The shard {} is in closed state", currentShardIterator);
+                LOG.warn("The shard {} is in closed state", currentShardIterator);
                 break;
             case silent:
                 break;
             case fail:
-                log.info("Shard Iterator reaches CLOSE status:{} {}", getEndpoint().getConfiguration().getStreamName(), getEndpoint().getConfiguration().getShardId());
+                LOG.info("Shard Iterator reaches CLOSE status:{} {}", getEndpoint().getConfiguration().getStreamName(), getEndpoint().getConfiguration().getShardId());
                 throw new ReachedClosedStatusException(getEndpoint().getConfiguration().getStreamName(), getEndpoint().getConfiguration().getShardId());
             default:
                 throw new IllegalArgumentException("Unsupported shard closed strategy");
@@ -85,11 +88,11 @@ public class KinesisConsumer extends ScheduledBatchPollingConsumer {
         while (!exchanges.isEmpty()) {
             final Exchange exchange = ObjectHelper.cast(Exchange.class, exchanges.poll());
 
-            log.trace("Processing exchange [{}] started.", exchange);
+            LOG.trace("Processing exchange [{}] started.", exchange);
             getAsyncProcessor().process(exchange, new AsyncCallback() {
                 @Override
                 public void done(boolean doneSync) {
-                    log.trace("Processing exchange [{}] done.", exchange);
+                    LOG.trace("Processing exchange [{}] done.", exchange);
                 }
             });
             processedExchanges++;
@@ -129,7 +132,7 @@ public class KinesisConsumer extends ScheduledBatchPollingConsumer {
                 shardId = res1.getStreamDescription().getShards().get(0).getShardId();
                 isShardClosed = res1.getStreamDescription().getShards().get(0).getSequenceNumberRange().getEndingSequenceNumber() != null;
             }
-            log.debug("ShardId is: {}", shardId);
+            LOG.debug("ShardId is: {}", shardId);
 
             GetShardIteratorRequest req = new GetShardIteratorRequest().withStreamName(getEndpoint().getConfiguration().getStreamName()).withShardId(shardId)
                 .withShardIteratorType(getEndpoint().getConfiguration().getIteratorType());
@@ -141,7 +144,7 @@ public class KinesisConsumer extends ScheduledBatchPollingConsumer {
             GetShardIteratorResult result = getClient().getShardIterator(req);
             currentShardIterator = result.getShardIterator();
         }
-        log.debug("Shard Iterator is: {}", currentShardIterator);
+        LOG.debug("Shard Iterator is: {}", currentShardIterator);
         return currentShardIterator;
     }
 
diff --git a/components/camel-aws-kms/src/main/java/org/apache/camel/component/aws/kms/KMSProducer.java b/components/camel-aws-kms/src/main/java/org/apache/camel/component/aws/kms/KMSProducer.java
index 4247edc..f09e46f 100644
--- a/components/camel-aws-kms/src/main/java/org/apache/camel/component/aws/kms/KMSProducer.java
+++ b/components/camel-aws-kms/src/main/java/org/apache/camel/component/aws/kms/KMSProducer.java
@@ -36,6 +36,8 @@ import org.apache.camel.Message;
 import org.apache.camel.support.DefaultProducer;
 import org.apache.camel.util.ObjectHelper;
 import org.apache.camel.util.URISupport;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A Producer which sends messages to the Amazon KMS Service
@@ -43,6 +45,8 @@ import org.apache.camel.util.URISupport;
  */
 public class KMSProducer extends DefaultProducer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(KMSProducer.class);
+
     private transient String kmsProducerToString;
 
     public KMSProducer(Endpoint endpoint) {
@@ -114,7 +118,7 @@ public class KMSProducer extends DefaultProducer {
         try {
             result = kmsClient.listKeys(request);
         } catch (AmazonServiceException ase) {
-            log.trace("List Keys command returned the error code {}", ase.getErrorCode());
+            LOG.trace("List Keys command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
@@ -131,7 +135,7 @@ public class KMSProducer extends DefaultProducer {
         try {
             result = kmsClient.createKey(request);
         } catch (AmazonServiceException ase) {
-            log.trace("Create Key command returned the error code {}", ase.getErrorCode());
+            LOG.trace("Create Key command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
@@ -150,7 +154,7 @@ public class KMSProducer extends DefaultProducer {
         try {
             result = kmsClient.disableKey(request);
         } catch (AmazonServiceException ase) {
-            log.trace("Disable Key command returned the error code {}", ase.getErrorCode());
+            LOG.trace("Disable Key command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
@@ -173,7 +177,7 @@ public class KMSProducer extends DefaultProducer {
         try {
             result = kmsClient.scheduleKeyDeletion(request);
         } catch (AmazonServiceException ase) {
-            log.trace("Schedule Key Deletion command returned the error code {}", ase.getErrorCode());
+            LOG.trace("Schedule Key Deletion command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
@@ -192,7 +196,7 @@ public class KMSProducer extends DefaultProducer {
         try {
             result = kmsClient.describeKey(request);
         } catch (AmazonServiceException ase) {
-            log.trace("Describe Key command returned the error code {}", ase.getErrorCode());
+            LOG.trace("Describe Key command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
@@ -211,7 +215,7 @@ public class KMSProducer extends DefaultProducer {
         try {
             result = kmsClient.enableKey(request);
         } catch (AmazonServiceException ase) {
-            log.trace("Enable Key command returned the error code {}", ase.getErrorCode());
+            LOG.trace("Enable Key command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
diff --git a/components/camel-aws-lambda/src/main/java/org/apache/camel/component/aws/lambda/LambdaProducer.java b/components/camel-aws-lambda/src/main/java/org/apache/camel/component/aws/lambda/LambdaProducer.java
index 1410c77..c949908 100644
--- a/components/camel-aws-lambda/src/main/java/org/apache/camel/component/aws/lambda/LambdaProducer.java
+++ b/components/camel-aws-lambda/src/main/java/org/apache/camel/component/aws/lambda/LambdaProducer.java
@@ -72,6 +72,8 @@ import org.apache.camel.Message;
 import org.apache.camel.support.DefaultProducer;
 import org.apache.camel.util.CastUtils;
 import org.apache.camel.util.ObjectHelper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A Producer which sends messages to the Amazon Web Service Lambda <a
@@ -79,6 +81,8 @@ import org.apache.camel.util.ObjectHelper;
  */
 public class LambdaProducer extends DefaultProducer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(LambdaProducer.class);
+
     public LambdaProducer(final Endpoint endpoint) {
         super(endpoint);
     }
@@ -150,7 +154,7 @@ public class LambdaProducer extends DefaultProducer {
         try {
             result = lambdaClient.getFunction(new GetFunctionRequest().withFunctionName(getEndpoint().getFunction()));
         } catch (AmazonServiceException ase) {
-            log.trace("getFunction command returned the error code {}", ase.getErrorCode());
+            LOG.trace("getFunction command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
@@ -162,7 +166,7 @@ public class LambdaProducer extends DefaultProducer {
         try {
             result = lambdaClient.deleteFunction(new DeleteFunctionRequest().withFunctionName(getEndpoint().getFunction()));
         } catch (AmazonServiceException ase) {
-            log.trace("deleteFunction command returned the error code {}", ase.getErrorCode());
+            LOG.trace("deleteFunction command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
@@ -174,7 +178,7 @@ public class LambdaProducer extends DefaultProducer {
         try {
             result = lambdaClient.listFunctions();
         } catch (AmazonServiceException ase) {
-            log.trace("listFunctions command returned the error code {}", ase.getErrorCode());
+            LOG.trace("listFunctions command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
@@ -189,7 +193,7 @@ public class LambdaProducer extends DefaultProducer {
                 .withPayload(exchange.getIn().getBody(String.class));
             result = lambdaClient.invoke(request);
         } catch (AmazonServiceException ase) {
-            log.trace("invokeFunction command returned the error code {}", ase.getErrorCode());
+            LOG.trace("invokeFunction command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
@@ -324,7 +328,7 @@ public class LambdaProducer extends DefaultProducer {
             result = lambdaClient.createFunction(request);
 
         } catch (AmazonServiceException ase) {
-            log.trace("createFunction command returned the error code {}", ase.getErrorCode());
+            LOG.trace("createFunction command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
 
@@ -389,7 +393,7 @@ public class LambdaProducer extends DefaultProducer {
             result = lambdaClient.updateFunctionCode(request);
 
         } catch (AmazonServiceException ase) {
-            log.trace("updateFunction command returned the error code {}", ase.getErrorCode());
+            LOG.trace("updateFunction command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
 
@@ -421,7 +425,7 @@ public class LambdaProducer extends DefaultProducer {
             }
             result = lambdaClient.createEventSourceMapping(request);
         } catch (AmazonServiceException ase) {
-            log.trace("createEventSourceMapping command returned the error code {}", ase.getErrorCode());
+            LOG.trace("createEventSourceMapping command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
@@ -448,7 +452,7 @@ public class LambdaProducer extends DefaultProducer {
             }
             result = lambdaClient.deleteEventSourceMapping(request);
         } catch (AmazonServiceException ase) {
-            log.trace("deleteEventSourceMapping command returned the error code {}", ase.getErrorCode());
+            LOG.trace("deleteEventSourceMapping command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
@@ -470,7 +474,7 @@ public class LambdaProducer extends DefaultProducer {
             }
             result = lambdaClient.listEventSourceMappings(request);
         } catch (AmazonServiceException ase) {
-            log.trace("listEventSourceMapping command returned the error code {}", ase.getErrorCode());
+            LOG.trace("listEventSourceMapping command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
@@ -489,7 +493,7 @@ public class LambdaProducer extends DefaultProducer {
             }
             result = lambdaClient.listTags(request);
         } catch (AmazonServiceException ase) {
-            log.trace("listTags command returned the error code {}", ase.getErrorCode());
+            LOG.trace("listTags command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
@@ -514,7 +518,7 @@ public class LambdaProducer extends DefaultProducer {
             }
             result = lambdaClient.tagResource(request);
         } catch (AmazonServiceException ase) {
-            log.trace("listTags command returned the error code {}", ase.getErrorCode());
+            LOG.trace("listTags command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
@@ -539,7 +543,7 @@ public class LambdaProducer extends DefaultProducer {
             }
             result = lambdaClient.untagResource(request);
         } catch (AmazonServiceException ase) {
-            log.trace("untagResource command returned the error code {}", ase.getErrorCode());
+            LOG.trace("untagResource command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
@@ -560,7 +564,7 @@ public class LambdaProducer extends DefaultProducer {
             } 
             result = lambdaClient.publishVersion(request);
         } catch (AmazonServiceException ase) {
-            log.trace("publishVersion command returned the error code {}", ase.getErrorCode());
+            LOG.trace("publishVersion command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
@@ -573,7 +577,7 @@ public class LambdaProducer extends DefaultProducer {
             ListVersionsByFunctionRequest request = new ListVersionsByFunctionRequest().withFunctionName(getEndpoint().getFunction());
             result = lambdaClient.listVersionsByFunction(request);
         } catch (AmazonServiceException ase) {
-            log.trace("publishVersion command returned the error code {}", ase.getErrorCode());
+            LOG.trace("publishVersion command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
@@ -597,7 +601,7 @@ public class LambdaProducer extends DefaultProducer {
             } 
             result = lambdaClient.createAlias(request);
         } catch (AmazonServiceException ase) {
-            log.trace("createAlias command returned the error code {}", ase.getErrorCode());
+            LOG.trace("createAlias command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
@@ -615,7 +619,7 @@ public class LambdaProducer extends DefaultProducer {
             request.setName(aliasName);
             result = lambdaClient.deleteAlias(request);
         } catch (AmazonServiceException ase) {
-            log.trace("deleteAlias command returned the error code {}", ase.getErrorCode());
+            LOG.trace("deleteAlias command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
@@ -633,7 +637,7 @@ public class LambdaProducer extends DefaultProducer {
             request.setName(aliasName);
             result = lambdaClient.getAlias(request);
         } catch (AmazonServiceException ase) {
-            log.trace("getAlias command returned the error code {}", ase.getErrorCode());
+            LOG.trace("getAlias command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
@@ -651,7 +655,7 @@ public class LambdaProducer extends DefaultProducer {
             request.withFunctionVersion(version);
             result = lambdaClient.listAliases(request);
         } catch (AmazonServiceException ase) {
-            log.trace("listAliases command returned the error code {}", ase.getErrorCode());
+            LOG.trace("listAliases command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
diff --git a/components/camel-aws-mq/src/main/java/org/apache/camel/component/aws/mq/MQProducer.java b/components/camel-aws-mq/src/main/java/org/apache/camel/component/aws/mq/MQProducer.java
index bf9bef4..f02cdea 100644
--- a/components/camel-aws-mq/src/main/java/org/apache/camel/component/aws/mq/MQProducer.java
+++ b/components/camel-aws-mq/src/main/java/org/apache/camel/component/aws/mq/MQProducer.java
@@ -42,6 +42,8 @@ import org.apache.camel.Message;
 import org.apache.camel.support.DefaultProducer;
 import org.apache.camel.util.ObjectHelper;
 import org.apache.camel.util.URISupport;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A Producer which sends messages to the Amazon MQ Service
@@ -49,6 +51,8 @@ import org.apache.camel.util.URISupport;
  */
 public class MQProducer extends DefaultProducer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(MQProducer.class);
+
     private transient String mqProducerToString;
 
     public MQProducer(Endpoint endpoint) {
@@ -116,7 +120,7 @@ public class MQProducer extends DefaultProducer {
         try {
             result = mqClient.listBrokers(request);
         } catch (AmazonServiceException ase) {
-            log.trace("List Brokers command returned the error code {}", ase.getErrorCode());
+            LOG.trace("List Brokers command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
@@ -178,7 +182,7 @@ public class MQProducer extends DefaultProducer {
         try {
             result = mqClient.createBroker(request);
         } catch (AmazonServiceException ase) {
-            log.trace("Create Broker command returned the error code {}", ase.getErrorCode());
+            LOG.trace("Create Broker command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
@@ -198,7 +202,7 @@ public class MQProducer extends DefaultProducer {
         try {
             result = mqClient.deleteBroker(request);
         } catch (AmazonServiceException ase) {
-            log.trace("Delete Broker command returned the error code {}", ase.getErrorCode());
+            LOG.trace("Delete Broker command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
@@ -218,7 +222,7 @@ public class MQProducer extends DefaultProducer {
         try {
             result = mqClient.rebootBroker(request);
         } catch (AmazonServiceException ase) {
-            log.trace("Reboot Broker command returned the error code {}", ase.getErrorCode());
+            LOG.trace("Reboot Broker command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
@@ -245,7 +249,7 @@ public class MQProducer extends DefaultProducer {
         try {
             result = mqClient.updateBroker(request);
         } catch (AmazonServiceException ase) {
-            log.trace("Update Broker command returned the error code {}", ase.getErrorCode());
+            LOG.trace("Update Broker command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
@@ -265,7 +269,7 @@ public class MQProducer extends DefaultProducer {
         try {
             result = mqClient.describeBroker(request);
         } catch (AmazonServiceException ase) {
-            log.trace("Reboot Broker command returned the error code {}", ase.getErrorCode());
+            LOG.trace("Reboot Broker command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
diff --git a/components/camel-aws-msk/src/main/java/org/apache/camel/component/aws/msk/MSKProducer.java b/components/camel-aws-msk/src/main/java/org/apache/camel/component/aws/msk/MSKProducer.java
index bd8318b7..d1d3f7f 100644
--- a/components/camel-aws-msk/src/main/java/org/apache/camel/component/aws/msk/MSKProducer.java
+++ b/components/camel-aws-msk/src/main/java/org/apache/camel/component/aws/msk/MSKProducer.java
@@ -33,6 +33,8 @@ import org.apache.camel.Message;
 import org.apache.camel.support.DefaultProducer;
 import org.apache.camel.util.ObjectHelper;
 import org.apache.camel.util.URISupport;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A Producer which sends messages to the Amazon MSK Service
@@ -40,6 +42,8 @@ import org.apache.camel.util.URISupport;
  */
 public class MSKProducer extends DefaultProducer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(MSKProducer.class);
+
     private transient String mskProducerToString;
 
     public MSKProducer(Endpoint endpoint) {
@@ -101,7 +105,7 @@ public class MSKProducer extends DefaultProducer {
         try {
             result = mskClient.listClusters(request);
         } catch (AmazonServiceException ase) {
-            log.trace("List Clusters command returned the error code {}", ase.getErrorCode());
+            LOG.trace("List Clusters command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
@@ -138,7 +142,7 @@ public class MSKProducer extends DefaultProducer {
         try {
             result = mskClient.createCluster(request);
         } catch (AmazonServiceException ase) {
-            log.trace("Create Cluster command returned the error code {}", ase.getErrorCode());
+            LOG.trace("Create Cluster command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
@@ -157,7 +161,7 @@ public class MSKProducer extends DefaultProducer {
         try {
             result = mskClient.deleteCluster(request);
         } catch (AmazonServiceException ase) {
-            log.trace("Delete Cluster command returned the error code {}", ase.getErrorCode());
+            LOG.trace("Delete Cluster command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
@@ -176,7 +180,7 @@ public class MSKProducer extends DefaultProducer {
         try {
             result = mskClient.describeCluster(request);
         } catch (AmazonServiceException ase) {
-            log.trace("Delete Cluster command returned the error code {}", ase.getErrorCode());
+            LOG.trace("Delete Cluster command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
diff --git a/components/camel-aws-s3/src/main/java/org/apache/camel/component/aws/s3/S3Consumer.java b/components/camel-aws-s3/src/main/java/org/apache/camel/component/aws/s3/S3Consumer.java
index b1d1b49..9c50779 100644
--- a/components/camel-aws-s3/src/main/java/org/apache/camel/component/aws/s3/S3Consumer.java
+++ b/components/camel-aws-s3/src/main/java/org/apache/camel/component/aws/s3/S3Consumer.java
@@ -40,6 +40,8 @@ import org.apache.camel.util.CastUtils;
 import org.apache.camel.util.IOHelper;
 import org.apache.camel.util.ObjectHelper;
 import org.apache.camel.util.URISupport;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A Consumer of messages from the Amazon Web Service Simple Storage Service
@@ -47,6 +49,8 @@ import org.apache.camel.util.URISupport;
  */
 public class S3Consumer extends ScheduledBatchPollingConsumer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(S3Consumer.class);
+
     private String marker;
     private transient String s3ConsumerToString;
 
@@ -65,12 +69,12 @@ public class S3Consumer extends ScheduledBatchPollingConsumer {
         Queue<Exchange> exchanges;
 
         if (fileName != null) {
-            log.trace("Getting object in bucket [{}] with file name [{}]...", bucketName, fileName);
+            LOG.trace("Getting object in bucket [{}] with file name [{}]...", bucketName, fileName);
 
             S3Object s3Object = getAmazonS3Client().getObject(new GetObjectRequest(bucketName, fileName));
             exchanges = createExchanges(s3Object);
         } else {
-            log.trace("Queueing objects in bucket [{}]...", bucketName);
+            LOG.trace("Queueing objects in bucket [{}]...", bucketName);
 
             ListObjectsRequest listObjectsRequest = new ListObjectsRequest();
             listObjectsRequest.setBucketName(bucketName);
@@ -83,20 +87,20 @@ public class S3Consumer extends ScheduledBatchPollingConsumer {
             // if there was a marker from previous poll then use that to
             // continue from where we left last time
             if (marker != null) {
-                log.trace("Resuming from marker: {}", marker);
+                LOG.trace("Resuming from marker: {}", marker);
                 listObjectsRequest.setMarker(marker);
             }
 
             ObjectListing listObjects = getAmazonS3Client().listObjects(listObjectsRequest);
             if (listObjects.isTruncated()) {
                 marker = listObjects.getNextMarker();
-                log.trace("Returned list is truncated, so setting next marker: {}", marker);
+                LOG.trace("Returned list is truncated, so setting next marker: {}", marker);
             } else {
                 // no more data so clear marker
                 marker = null;
             }
-            if (log.isTraceEnabled()) {
-                log.trace("Found {} objects in bucket [{}]...", listObjects.getObjectSummaries().size(), bucketName);
+            if (LOG.isTraceEnabled()) {
+                LOG.trace("Found {} objects in bucket [{}]...", listObjects.getObjectSummaries().size(), bucketName);
             }
 
             exchanges = createExchanges(listObjects.getObjectSummaries());
@@ -112,8 +116,8 @@ public class S3Consumer extends ScheduledBatchPollingConsumer {
     }
 
     protected Queue<Exchange> createExchanges(List<S3ObjectSummary> s3ObjectSummaries) {
-        if (log.isTraceEnabled()) {
-            log.trace("Received {} messages in this poll", s3ObjectSummaries.size());
+        if (LOG.isTraceEnabled()) {
+            LOG.trace("Received {} messages in this poll", s3ObjectSummaries.size());
         }
 
         Collection<S3Object> s3Objects = new ArrayList<>();
@@ -127,7 +131,7 @@ public class S3Consumer extends ScheduledBatchPollingConsumer {
                 answer.add(exchange);
             }
         } catch (Throwable e) {
-            log.warn("Error getting S3Object due: {}", e.getMessage(), e);
+            LOG.warn("Error getting S3Object due: {}", e.getMessage(), e);
             // ensure all previous gathered s3 objects are closed
             // if there was an exception creating the exchanges in this batch
             s3Objects.forEach(IOHelper::close);
@@ -168,11 +172,11 @@ public class S3Consumer extends ScheduledBatchPollingConsumer {
                 }
             });
 
-            log.trace("Processing exchange [{}]...", exchange);
+            LOG.trace("Processing exchange [{}]...", exchange);
             getAsyncProcessor().process(exchange, new AsyncCallback() {
                 @Override
                 public void done(boolean doneSync) {
-                    log.trace("Processing exchange [{}] done.", exchange);
+                    LOG.trace("Processing exchange [{}] done.", exchange);
                 }
             });
         }
@@ -191,11 +195,11 @@ public class S3Consumer extends ScheduledBatchPollingConsumer {
                 String bucketName = exchange.getIn().getHeader(S3Constants.BUCKET_NAME, String.class);
                 String key = exchange.getIn().getHeader(S3Constants.KEY, String.class);
 
-                log.trace("Deleting object from bucket {} with key {}...", bucketName, key);
+                LOG.trace("Deleting object from bucket {} with key {}...", bucketName, key);
 
                 getAmazonS3Client().deleteObject(bucketName, key);
 
-                log.trace("Deleted object from bucket {} with key {}...", bucketName, key);
+                LOG.trace("Deleted object from bucket {} with key {}...", bucketName, key);
             }
         } catch (AmazonClientException e) {
             getExceptionHandler().handleException("Error occurred during deleting object. This exception is ignored.", exchange, e);
@@ -210,9 +214,9 @@ public class S3Consumer extends ScheduledBatchPollingConsumer {
     protected void processRollback(Exchange exchange) {
         Exception cause = exchange.getException();
         if (cause != null) {
-            log.warn("Exchange failed, so rolling back message status: {}", exchange, cause);
+            LOG.warn("Exchange failed, so rolling back message status: {}", exchange, cause);
         } else {
-            log.warn("Exchange failed, so rolling back message status: {}", exchange);
+            LOG.warn("Exchange failed, so rolling back message status: {}", exchange);
         }
     }
 
diff --git a/components/camel-aws-s3/src/main/java/org/apache/camel/component/aws/s3/S3Producer.java b/components/camel-aws-s3/src/main/java/org/apache/camel/component/aws/s3/S3Producer.java
index 5ed2efb..b8bbb9e 100644
--- a/components/camel-aws-s3/src/main/java/org/apache/camel/component/aws/s3/S3Producer.java
+++ b/components/camel-aws-s3/src/main/java/org/apache/camel/component/aws/s3/S3Producer.java
@@ -232,7 +232,7 @@ public class S3Producer extends DefaultProducer {
         } else {
             is = exchange.getIn().getMandatoryBody(InputStream.class);
             if (objectMetadata.getContentLength() == 0 && ObjectHelper.isEmpty(exchange.getProperty(Exchange.CONTENT_LENGTH))) {
-                log.debug("The content length is not defined. It needs to be determined by reading the data into memory");
+                LOG.debug("The content length is not defined. It needs to be determined by reading the data into memory");
                 baos = determineLengthInputStream(is);
                 objectMetadata.setContentLength(baos.size());
                 is = new ByteArrayInputStream(baos.toByteArray());
diff --git a/components/camel-aws-ses/src/main/java/org/apache/camel/component/aws/ses/SesProducer.java b/components/camel-aws-ses/src/main/java/org/apache/camel/component/aws/ses/SesProducer.java
index 0ce967c..3f5a242 100644
--- a/components/camel-aws-ses/src/main/java/org/apache/camel/component/aws/ses/SesProducer.java
+++ b/components/camel-aws-ses/src/main/java/org/apache/camel/component/aws/ses/SesProducer.java
@@ -34,13 +34,17 @@ import org.apache.camel.Exchange;
 import org.apache.camel.Message;
 import org.apache.camel.support.DefaultProducer;
 import org.apache.camel.util.URISupport;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A Producer which sends messages to the Amazon Simple Email Service
  * <a href="http://aws.amazon.com/ses/">AWS SES</a>
  */
 public class SesProducer extends DefaultProducer {
-    
+
+    private static final Logger LOG = LoggerFactory.getLogger(SesProducer.class);
+
     private transient String sesProducerToString;
     
     public SesProducer(Endpoint endpoint) {
@@ -51,16 +55,16 @@ public class SesProducer extends DefaultProducer {
     public void process(Exchange exchange) throws Exception {
         if (!(exchange.getIn().getBody() instanceof javax.mail.Message)) {
             SendEmailRequest request = createMailRequest(exchange);
-            log.trace("Sending request [{}] from exchange [{}]...", request, exchange);            
+            LOG.trace("Sending request [{}] from exchange [{}]...", request, exchange);
             SendEmailResult result = getEndpoint().getSESClient().sendEmail(request);
-            log.trace("Received result [{}]", result);
+            LOG.trace("Received result [{}]", result);
             Message message = getMessageForResponse(exchange);
             message.setHeader(SesConstants.MESSAGE_ID, result.getMessageId());
         } else {
             SendRawEmailRequest request = createRawMailRequest(exchange);
-            log.trace("Sending request [{}] from exchange [{}]...", request, exchange);            
+            LOG.trace("Sending request [{}] from exchange [{}]...", request, exchange);
             SendRawEmailResult result = getEndpoint().getSESClient().sendRawEmail(request);
-            log.trace("Received result [{}]", result);
+            LOG.trace("Received result [{}]", result);
             Message message = getMessageForResponse(exchange);
             message.setHeader(SesConstants.MESSAGE_ID, result.getMessageId());
         }
@@ -105,7 +109,7 @@ public class SesProducer extends DefaultProducer {
         try {
             content.writeTo(byteOutput);
         } catch (Exception e) {
-            log.error("Cannot write to byte Array");
+            LOG.error("Cannot write to byte Array");
             throw e;
         }
         byte[] messageByteArray = ((ByteArrayOutputStream)byteOutput).toByteArray();
diff --git a/components/camel-aws-sns/src/main/java/org/apache/camel/component/aws/sns/SnsEndpoint.java b/components/camel-aws-sns/src/main/java/org/apache/camel/component/aws/sns/SnsEndpoint.java
index 4732651..bc75eac 100644
--- a/components/camel-aws-sns/src/main/java/org/apache/camel/component/aws/sns/SnsEndpoint.java
+++ b/components/camel-aws-sns/src/main/java/org/apache/camel/component/aws/sns/SnsEndpoint.java
@@ -46,6 +46,8 @@ import org.apache.camel.spi.UriParam;
 import org.apache.camel.spi.UriPath;
 import org.apache.camel.support.DefaultEndpoint;
 import org.apache.camel.util.ObjectHelper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * The aws-sns component is used for sending messages to an Amazon Simple Notification Topic.
@@ -54,6 +56,8 @@ import org.apache.camel.util.ObjectHelper;
     producerOnly = true, label = "cloud,mobile,messaging")
 public class SnsEndpoint extends DefaultEndpoint implements HeaderFilterStrategyAware {
 
+    private static final Logger LOG = LoggerFactory.getLogger(SnsEndpoint.class);
+
     private AmazonSNS snsClient;
 
     @UriPath(description = "Topic name or ARN")
@@ -119,7 +123,7 @@ public class SnsEndpoint extends DefaultEndpoint implements HeaderFilterStrategy
                     }
                 } while (nextToken != null);
             } catch (final AmazonServiceException ase) {
-                log.trace("The list topics operation return the following error code {}", ase.getErrorCode());
+                LOG.trace("The list topics operation return the following error code {}", ase.getErrorCode());
                 throw ase;
             }
         }
@@ -136,26 +140,26 @@ public class SnsEndpoint extends DefaultEndpoint implements HeaderFilterStrategy
                 }
             }
 
-            log.trace("Creating topic [{}] with request [{}]...", configuration.getTopicName(), request);
+            LOG.trace("Creating topic [{}] with request [{}]...", configuration.getTopicName(), request);
 
             CreateTopicResult result = snsClient.createTopic(request);
             configuration.setTopicArn(result.getTopicArn());
 
-            log.trace("Topic created with Amazon resource name: {}", configuration.getTopicArn());
+            LOG.trace("Topic created with Amazon resource name: {}", configuration.getTopicArn());
         }
         
         if (ObjectHelper.isNotEmpty(configuration.getPolicy())) {
-            log.trace("Updating topic [{}] with policy [{}]", configuration.getTopicArn(), configuration.getPolicy());
+            LOG.trace("Updating topic [{}] with policy [{}]", configuration.getTopicArn(), configuration.getPolicy());
             
             snsClient.setTopicAttributes(new SetTopicAttributesRequest(configuration.getTopicArn(), "Policy", configuration.getPolicy()));
             
-            log.trace("Topic policy updated");
+            LOG.trace("Topic policy updated");
         }
         
         if (configuration.isSubscribeSNStoSQS()) {
             if (ObjectHelper.isNotEmpty(configuration.getAmazonSQSClient()) && ObjectHelper.isNotEmpty(configuration.getQueueUrl())) {
                 String subscriptionARN = Topics.subscribeQueue(snsClient, configuration.getAmazonSQSClient(), configuration.getTopicArn(), configuration.getQueueUrl());
-                log.trace("Subscription of SQS Queue to SNS Topic done with Amazon resource name: {}", subscriptionARN);
+                LOG.trace("Subscription of SQS Queue to SNS Topic done with Amazon resource name: {}", subscriptionARN);
             } else {
                 throw new IllegalArgumentException("Using the SubscribeSNStoSQS option require both AmazonSQSClient and Queue URL options");
             }
diff --git a/components/camel-aws-sns/src/main/java/org/apache/camel/component/aws/sns/SnsProducer.java b/components/camel-aws-sns/src/main/java/org/apache/camel/component/aws/sns/SnsProducer.java
index 99273f1..55e082b 100644
--- a/components/camel-aws-sns/src/main/java/org/apache/camel/component/aws/sns/SnsProducer.java
+++ b/components/camel-aws-sns/src/main/java/org/apache/camel/component/aws/sns/SnsProducer.java
@@ -34,6 +34,8 @@ import org.apache.camel.Message;
 import org.apache.camel.spi.HeaderFilterStrategy;
 import org.apache.camel.support.DefaultProducer;
 import org.apache.camel.util.URISupport;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A Producer which sends messages to the Amazon Web Service Simple Notification Service
@@ -41,6 +43,8 @@ import org.apache.camel.util.URISupport;
  */
 public class SnsProducer extends DefaultProducer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(SnsProducer.class);
+
     private transient String snsProducerToString;
 
     public SnsProducer(Endpoint endpoint) {
@@ -57,11 +61,11 @@ public class SnsProducer extends DefaultProducer {
         request.setMessage(exchange.getIn().getBody(String.class));
         request.setMessageAttributes(this.translateAttributes(exchange.getIn().getHeaders(), exchange));
 
-        log.trace("Sending request [{}] from exchange [{}]...", request, exchange);
+        LOG.trace("Sending request [{}] from exchange [{}]...", request, exchange);
 
         PublishResult result = getEndpoint().getSNSClient().publish(request);
 
-        log.trace("Received result [{}]", result);
+        LOG.trace("Received result [{}]", result);
 
         Message message = getMessageForResponse(exchange);
         message.setHeader(SnsConstants.MESSAGE_ID, result.getMessageId());
@@ -117,7 +121,7 @@ public class SnsProducer extends DefaultProducer {
                     result.put(entry.getKey(), mav);
                 } else {
                     // cannot translate the message header to message attribute value
-                    log.warn("Cannot put the message header key={}, value={} into Sns MessageAttribute", entry.getKey(), entry.getValue());
+                    LOG.warn("Cannot put the message header key={}, value={} into Sns MessageAttribute", entry.getKey(), entry.getValue());
                 }
             }
         }
diff --git a/components/camel-aws-sqs/src/main/java/org/apache/camel/component/aws/sqs/SqsConsumer.java b/components/camel-aws-sqs/src/main/java/org/apache/camel/component/aws/sqs/SqsConsumer.java
index 3df7fa9..cbf03c0 100644
--- a/components/camel-aws-sqs/src/main/java/org/apache/camel/component/aws/sqs/SqsConsumer.java
+++ b/components/camel-aws-sqs/src/main/java/org/apache/camel/component/aws/sqs/SqsConsumer.java
@@ -45,6 +45,8 @@ import org.apache.camel.support.ScheduledBatchPollingConsumer;
 import org.apache.camel.util.CastUtils;
 import org.apache.camel.util.ObjectHelper;
 import org.apache.camel.util.URISupport;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A Consumer of messages from the Amazon Web Service Simple Queue Service
@@ -52,6 +54,8 @@ import org.apache.camel.util.URISupport;
  */
 public class SqsConsumer extends ScheduledBatchPollingConsumer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(SqsConsumer.class);
+
     private ScheduledExecutorService scheduledExecutor;
     private transient String sqsConsumerToString;
     private Collection<String> attributeNames;
@@ -88,19 +92,19 @@ public class SqsConsumer extends ScheduledBatchPollingConsumer {
             request.setMessageAttributeNames(messageAttributeNames);
         }
 
-        log.trace("Receiving messages with request [{}]...", request);
+        LOG.trace("Receiving messages with request [{}]...", request);
 
         ReceiveMessageResult messageResult;
         try {
             messageResult = getClient().receiveMessage(request);
         } catch (QueueDoesNotExistException e) {
-            log.info("Queue does not exist....recreating now...");
+            LOG.info("Queue does not exist....recreating now...");
             reConnectToQueue();
             messageResult = getClient().receiveMessage(request);
         }
 
-        if (log.isTraceEnabled()) {
-            log.trace("Received {} messages", messageResult.getMessages().size());
+        if (LOG.isTraceEnabled()) {
+            LOG.trace("Received {} messages", messageResult.getMessages().size());
         }
 
         Queue<Exchange> exchanges = createExchanges(messageResult.getMessages());
@@ -113,21 +117,21 @@ public class SqsConsumer extends ScheduledBatchPollingConsumer {
                 getEndpoint().createQueue(getClient());
             }
         } catch (QueueDeletedRecentlyException qdr) {
-            log.debug("Queue recently deleted, will retry in 30 seconds.");
+            LOG.debug("Queue recently deleted, will retry in 30 seconds.");
             try {
                 Thread.sleep(30000);
                 getEndpoint().createQueue(getClient());
             } catch (Exception e) {
-                log.warn("failed to retry queue connection.", e);
+                LOG.warn("failed to retry queue connection.", e);
             }
         } catch (Exception e) {
-            log.warn("Could not connect to queue in amazon.", e);
+            LOG.warn("Could not connect to queue in amazon.", e);
         }
     }
 
     protected Queue<Exchange> createExchanges(List<Message> messages) {
-        if (log.isTraceEnabled()) {
-            log.trace("Received {} messages in this poll", messages.size());
+        if (LOG.isTraceEnabled()) {
+            LOG.trace("Received {} messages in this poll", messages.size());
         }
 
         Queue<Exchange> answer = new LinkedList<>();
@@ -160,8 +164,8 @@ public class SqsConsumer extends ScheduledBatchPollingConsumer {
                 int delay = visibilityTimeout.intValue() / 2;
                 int period = visibilityTimeout.intValue();
                 int repeatSeconds = Double.valueOf(visibilityTimeout.doubleValue() * 1.5).intValue();
-                if (log.isDebugEnabled()) {
-                    log.debug("Scheduled TimeoutExtender task to start after {} delay, and run with {}/{} period/repeat (seconds), to extend exchangeId: {}", delay, period,
+                if (LOG.isDebugEnabled()) {
+                    LOG.debug("Scheduled TimeoutExtender task to start after {} delay, and run with {}/{} period/repeat (seconds), to extend exchangeId: {}", delay, period,
                               repeatSeconds, exchange.getExchangeId());
                 }
                 final ScheduledFuture<?> scheduledFuture = this.scheduledExecutor.scheduleAtFixedRate(new TimeoutExtender(exchange, repeatSeconds), delay, period,
@@ -179,7 +183,7 @@ public class SqsConsumer extends ScheduledBatchPollingConsumer {
 
                     private void cancelExtender(Exchange exchange) {
                         // cancel task as we are done
-                        log.trace("Processing done so cancelling TimeoutExtender task for exchangeId: {}", exchange.getExchangeId());
+                        LOG.trace("Processing done so cancelling TimeoutExtender task for exchangeId: {}", exchange.getExchangeId());
                         scheduledFuture.cancel(true);
                     }
                 });
@@ -201,8 +205,8 @@ public class SqsConsumer extends ScheduledBatchPollingConsumer {
                 }
             });
 
-            log.trace("Processing exchange [{}]...", exchange);
-            getAsyncProcessor().process(exchange, doneSync -> log.trace("Processing exchange [{}] done.", exchange));
+            LOG.trace("Processing exchange [{}]...", exchange);
+            getAsyncProcessor().process(exchange, doneSync -> LOG.trace("Processing exchange [{}] done.", exchange));
         }
 
         return total;
@@ -220,11 +224,11 @@ public class SqsConsumer extends ScheduledBatchPollingConsumer {
                 String receiptHandle = exchange.getIn().getHeader(SqsConstants.RECEIPT_HANDLE, String.class);
                 DeleteMessageRequest deleteRequest = new DeleteMessageRequest(getQueueUrl(), receiptHandle);
 
-                log.trace("Deleting message with receipt handle {}...", receiptHandle);
+                LOG.trace("Deleting message with receipt handle {}...", receiptHandle);
 
                 getClient().deleteMessage(deleteRequest);
 
-                log.trace("Deleted message with receipt handle {}...", receiptHandle);
+                LOG.trace("Deleted message with receipt handle {}...", receiptHandle);
             }
         } catch (AmazonClientException e) {
             getExceptionHandler().handleException("Error occurred during deleting message. This exception is ignored.", exchange, e);
@@ -314,15 +318,15 @@ public class SqsConsumer extends ScheduledBatchPollingConsumer {
                                                                                         repeatSeconds);
 
             try {
-                log.trace("Extending visibility window by {} seconds for exchange {}", this.repeatSeconds, this.exchange);
+                LOG.trace("Extending visibility window by {} seconds for exchange {}", this.repeatSeconds, this.exchange);
                 getEndpoint().getClient().changeMessageVisibility(request);
-                log.debug("Extended visibility window by {} seconds for exchange {}", this.repeatSeconds, this.exchange);
+                LOG.debug("Extended visibility window by {} seconds for exchange {}", this.repeatSeconds, this.exchange);
             } catch (ReceiptHandleIsInvalidException e) {
                 // Ignore.
             } catch (MessageNotInflightException e) {
                 // Ignore.
             } catch (Exception e) {
-                log.warn("Extending visibility window failed for exchange " + exchange + ". Will not attempt to extend visibility further. This exception will be ignored.", e);
+                LOG.warn("Extending visibility window failed for exchange " + exchange + ". Will not attempt to extend visibility further. This exception will be ignored.", e);
             }
         }
     }
diff --git a/components/camel-aws-sqs/src/main/java/org/apache/camel/component/aws/sqs/SqsEndpoint.java b/components/camel-aws-sqs/src/main/java/org/apache/camel/component/aws/sqs/SqsEndpoint.java
index 2dcfd2f..1d95af3 100644
--- a/components/camel-aws-sqs/src/main/java/org/apache/camel/component/aws/sqs/SqsEndpoint.java
+++ b/components/camel-aws-sqs/src/main/java/org/apache/camel/component/aws/sqs/SqsEndpoint.java
@@ -53,6 +53,8 @@ import org.apache.camel.support.DefaultScheduledPollConsumerScheduler;
 import org.apache.camel.support.ScheduledPollEndpoint;
 import org.apache.camel.util.FileUtil;
 import org.apache.camel.util.ObjectHelper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * The aws-sqs component is used for sending and receiving messages to Amazon's
@@ -61,6 +63,8 @@ import org.apache.camel.util.ObjectHelper;
 @UriEndpoint(firstVersion = "2.6.0", scheme = "aws-sqs", title = "AWS Simple Queue Service", syntax = "aws-sqs:queueNameOrArn", label = "cloud,messaging")
 public class SqsEndpoint extends ScheduledPollEndpoint implements HeaderFilterStrategyAware {
 
+    private static final Logger LOG = LoggerFactory.getLogger(SqsEndpoint.class);
+
     private AmazonSQS client;
     private String queueUrl;
 
@@ -157,7 +161,7 @@ public class SqsEndpoint extends ScheduledPollEndpoint implements HeaderFilterSt
                 for (String url : listQueuesResult.getQueueUrls()) {
                     if (url.endsWith("/" + configuration.getQueueName())) {
                         queueUrl = url;
-                        log.trace("Queue available at '{}'.", queueUrl);
+                        LOG.trace("Queue available at '{}'.", queueUrl);
                         break;
                     }
                 }
@@ -167,13 +171,13 @@ public class SqsEndpoint extends ScheduledPollEndpoint implements HeaderFilterSt
         if (queueUrl == null && configuration.isAutoCreateQueue()) {
             createQueue(client);
         } else {
-            log.debug("Using Amazon SQS queue url: {}", queueUrl);
+            LOG.debug("Using Amazon SQS queue url: {}", queueUrl);
             updateQueueAttributes(client);
         }
     }
 
     protected void createQueue(AmazonSQS client) {
-        log.trace("Queue '{}' doesn't exist. Will create it...", configuration.getQueueName());
+        LOG.trace("Queue '{}' doesn't exist. Will create it...", configuration.getQueueName());
 
         // creates a new queue, or returns the URL of an existing one
         CreateQueueRequest request = new CreateQueueRequest(configuration.getQueueName());
@@ -211,12 +215,12 @@ public class SqsEndpoint extends ScheduledPollEndpoint implements HeaderFilterSt
                 request.getAttributes().put(QueueAttributeName.KmsDataKeyReusePeriodSeconds.name(), String.valueOf(getConfiguration().getKmsDataKeyReusePeriodSeconds()));
             }
         }
-        log.trace("Creating queue [{}] with request [{}]...", configuration.getQueueName(), request);
+        LOG.trace("Creating queue [{}] with request [{}]...", configuration.getQueueName(), request);
 
         CreateQueueResult queueResult = client.createQueue(request);
         queueUrl = queueResult.getQueueUrl();
 
-        log.trace("Queue created and available at: {}", queueUrl);
+        LOG.trace("Queue created and available at: {}", queueUrl);
     }
 
     private void updateQueueAttributes(AmazonSQS client) {
@@ -252,9 +256,9 @@ public class SqsEndpoint extends ScheduledPollEndpoint implements HeaderFilterSt
             }
         }
         if (!request.getAttributes().isEmpty()) {
-            log.trace("Updating queue '{}' with the provided queue attributes...", configuration.getQueueName());
+            LOG.trace("Updating queue '{}' with the provided queue attributes...", configuration.getQueueName());
             client.setQueueAttributes(request);
-            log.trace("Queue '{}' updated and available at {}'", configuration.getQueueName(), queueUrl);
+            LOG.trace("Queue '{}' updated and available at {}'", configuration.getQueueName(), queueUrl);
         }
     }
 
@@ -339,7 +343,7 @@ public class SqsEndpoint extends ScheduledPollEndpoint implements HeaderFilterSt
         final String protocol = configuration.getProtocol(); 
 
         if (protocol.equals("http")) {
-            log.trace("Configuring AWS-SQS for HTTP protocol");
+            LOG.trace("Configuring AWS-SQS for HTTP protocol");
             if (isClientConfigFound) {
                 clientConfiguration = clientConfiguration.withProtocol(Protocol.HTTP);
             } else {
@@ -367,7 +371,7 @@ public class SqsEndpoint extends ScheduledPollEndpoint implements HeaderFilterSt
         final String host = getFullyQualifiedAWSHost();
         final String region = Regions.valueOf(configuration.getRegion()).getName();
 
-        log.debug("Creating endpoint for host {} on region {}", host, region);
+        LOG.debug("Creating endpoint for host {} on region {}", host, region);
         clientBuilder.withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(host, region));
 
         client = clientBuilder.build();
diff --git a/components/camel-aws-sqs/src/main/java/org/apache/camel/component/aws/sqs/SqsProducer.java b/components/camel-aws-sqs/src/main/java/org/apache/camel/component/aws/sqs/SqsProducer.java
index 71c09b8..bd5fb94 100644
--- a/components/camel-aws-sqs/src/main/java/org/apache/camel/component/aws/sqs/SqsProducer.java
+++ b/components/camel-aws-sqs/src/main/java/org/apache/camel/component/aws/sqs/SqsProducer.java
@@ -43,6 +43,8 @@ import org.apache.camel.spi.HeaderFilterStrategy;
 import org.apache.camel.support.DefaultProducer;
 import org.apache.camel.util.ObjectHelper;
 import org.apache.camel.util.URISupport;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A Producer which sends messages to the Amazon Web Service Simple Queue
@@ -50,6 +52,8 @@ import org.apache.camel.util.URISupport;
  */
 public class SqsProducer extends DefaultProducer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(SqsProducer.class);
+
     private transient String sqsProducerToString;
 
     public SqsProducer(SqsEndpoint endpoint) throws NoFactoryAvailableException {
@@ -88,11 +92,11 @@ public class SqsProducer extends DefaultProducer {
         addDelay(request, exchange);
         configureFifoAttributes(request, exchange);
 
-        log.trace("Sending request [{}] from exchange [{}]...", request, exchange);
+        LOG.trace("Sending request [{}] from exchange [{}]...", request, exchange);
 
         SendMessageResult result = getClient().sendMessage(request);
 
-        log.trace("Received result [{}]", result);
+        LOG.trace("Received result [{}]", result);
 
         Message message = getMessageForResponse(exchange);
         message.setHeader(SqsConstants.MESSAGE_ID, result.getMessageId());
@@ -181,13 +185,13 @@ public class SqsProducer extends DefaultProducer {
         Integer headerValue = exchange.getIn().getHeader(SqsConstants.DELAY_HEADER, Integer.class);
         Integer delayValue;
         if (headerValue == null) {
-            log.trace("Using the config delay");
+            LOG.trace("Using the config delay");
             delayValue = getEndpoint().getConfiguration().getDelaySeconds();
         } else {
-            log.trace("Using the header delay");
+            LOG.trace("Using the header delay");
             delayValue = headerValue;
         }
-        log.trace("found delay: {}", delayValue);
+        LOG.trace("found delay: {}", delayValue);
         request.setDelaySeconds(delayValue == null ? Integer.valueOf(0) : delayValue);
     }
 
@@ -195,13 +199,13 @@ public class SqsProducer extends DefaultProducer {
         Integer headerValue = exchange.getIn().getHeader(SqsConstants.DELAY_HEADER, Integer.class);
         Integer delayValue;
         if (headerValue == null) {
-            log.trace("Using the config delay");
+            LOG.trace("Using the config delay");
             delayValue = getEndpoint().getConfiguration().getDelaySeconds();
         } else {
-            log.trace("Using the header delay");
+            LOG.trace("Using the header delay");
             delayValue = headerValue;
         }
-        log.trace("found delay: {}", delayValue);
+        LOG.trace("found delay: {}", delayValue);
         request.setDelaySeconds(delayValue == null ? Integer.valueOf(0) : delayValue);
     }
 
@@ -282,7 +286,7 @@ public class SqsProducer extends DefaultProducer {
                 } else {
                     // cannot translate the message header to message attribute
                     // value
-                    log.warn("Cannot put the message header key={}, value={} into Sqs MessageAttribute", entry.getKey(), entry.getValue());
+                    LOG.warn("Cannot put the message header key={}, value={} into Sqs MessageAttribute", entry.getKey(), entry.getValue());
                 }
             }
         }
diff --git a/components/camel-aws-swf/src/main/java/org/apache/camel/component/aws/swf/SWFActivityProducer.java b/components/camel-aws-swf/src/main/java/org/apache/camel/component/aws/swf/SWFActivityProducer.java
index e9c34d8..c0bb11c 100644
--- a/components/camel-aws-swf/src/main/java/org/apache/camel/component/aws/swf/SWFActivityProducer.java
+++ b/components/camel-aws-swf/src/main/java/org/apache/camel/component/aws/swf/SWFActivityProducer.java
@@ -23,7 +23,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 public class SWFActivityProducer extends DefaultProducer {
-    private static final transient Logger LOGGER = LoggerFactory.getLogger(SWFActivityProducer.class);
+    private static final Logger LOG = LoggerFactory.getLogger(SWFActivityProducer.class);
     private final CamelSWFActivityClient camelSWFClient;
     private SWFEndpoint endpoint;
     private SWFConfiguration configuration;
@@ -41,7 +41,7 @@ public class SWFActivityProducer extends DefaultProducer {
     public void process(Exchange exchange) throws Exception {
         String eventName = getEventName(exchange);
         String version = getVersion(exchange);
-        LOGGER.debug("scheduleActivity : " + eventName + " : " + version);
+        LOG.debug("scheduleActivity : " + eventName + " : " + version);
 
         Object result = camelSWFClient.scheduleActivity(eventName, version, exchange.getIn().getBody());
         endpoint.setResult(exchange, result);
diff --git a/components/camel-aws-swf/src/main/java/org/apache/camel/component/aws/swf/SWFWorkflowProducer.java b/components/camel-aws-swf/src/main/java/org/apache/camel/component/aws/swf/SWFWorkflowProducer.java
index e01ada0..f841f1a 100644
--- a/components/camel-aws-swf/src/main/java/org/apache/camel/component/aws/swf/SWFWorkflowProducer.java
+++ b/components/camel-aws-swf/src/main/java/org/apache/camel/component/aws/swf/SWFWorkflowProducer.java
@@ -27,7 +27,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 public class SWFWorkflowProducer extends DefaultProducer {
-    private static final transient Logger LOGGER = LoggerFactory.getLogger(SWFWorkflowProducer.class);
+    private static final Logger LOG = LoggerFactory.getLogger(SWFWorkflowProducer.class);
     private final CamelSWFWorkflowClient camelSWFClient;
     private SWFEndpoint endpoint;
     private SWFConfiguration configuration;
@@ -43,7 +43,7 @@ public class SWFWorkflowProducer extends DefaultProducer {
 
     @Override
     public void process(Exchange exchange) throws Exception {
-        LOGGER.debug("processing workflow task " + exchange);
+        LOG.debug("processing workflow task " + exchange);
 
         try {
 
diff --git a/components/camel-aws-translate/src/main/java/org/apache/camel/component/aws/translate/TranslateProducer.java b/components/camel-aws-translate/src/main/java/org/apache/camel/component/aws/translate/TranslateProducer.java
index aff9644..bc10c8b 100644
--- a/components/camel-aws-translate/src/main/java/org/apache/camel/component/aws/translate/TranslateProducer.java
+++ b/components/camel-aws-translate/src/main/java/org/apache/camel/component/aws/translate/TranslateProducer.java
@@ -28,6 +28,8 @@ import org.apache.camel.Message;
 import org.apache.camel.support.DefaultProducer;
 import org.apache.camel.util.ObjectHelper;
 import org.apache.camel.util.URISupport;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A Producer which sends messages to the Amazon Translate Service
@@ -35,6 +37,8 @@ import org.apache.camel.util.URISupport;
  */
 public class TranslateProducer extends DefaultProducer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(TranslateProducer.class);
+
     private transient String translateProducerToString;
 
     public TranslateProducer(Endpoint endpoint) {
@@ -115,7 +119,7 @@ public class TranslateProducer extends DefaultProducer {
         try {
             result = translateClient.translateText(request);
         } catch (AmazonServiceException ase) {
-            log.trace("Translate Text command returned the error code {}", ase.getErrorCode());
+            LOG.trace("Translate Text command returned the error code {}", ase.getErrorCode());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
diff --git a/components/camel-aws-xray/src/main/java/org/apache/camel/component/aws/xray/XRayTracer.java b/components/camel-aws-xray/src/main/java/org/apache/camel/component/aws/xray/XRayTracer.java
index 9b07b73..541dda9 100644
--- a/components/camel-aws-xray/src/main/java/org/apache/camel/component/aws/xray/XRayTracer.java
+++ b/components/camel-aws-xray/src/main/java/org/apache/camel/component/aws/xray/XRayTracer.java
@@ -77,6 +77,7 @@ public class XRayTracer extends ServiceSupport implements RoutePolicyFactory, St
     // Note that the Entity itself is not serializable, so don't share this object among different VMs!
     public static final String XRAY_TRACE_ENTITY = "Camel-AWS-XRay-Trace-Entity";
 
+    private static final Logger LOG = LoggerFactory.getLogger(XRayTracer.class);
 
     private static Map<String, SegmentDecorator> decorators = new HashMap<>();
 
@@ -128,13 +129,13 @@ public class XRayTracer extends ServiceSupport implements RoutePolicyFactory, St
         }
 
         if (null == tracingStrategy) {
-            log.info("No tracing strategy available. Defaulting to no-op strategy");
+            LOG.info("No tracing strategy available. Defaulting to no-op strategy");
             tracingStrategy = new NoopTracingStrategy();
         }
 
         camelContext.adapt(ExtendedCamelContext.class).addInterceptStrategy(tracingStrategy);
 
-        log.debug("Starting XRay tracer");
+        LOG.debug("Starting XRay tracer");
     }
 
     @Override
@@ -144,7 +145,7 @@ public class XRayTracer extends ServiceSupport implements RoutePolicyFactory, St
         ServiceHelper.stopAndShutdownService(eventNotifier);
 
         camelContext.getRoutePolicyFactories().remove(this);
-        log.debug("XRay tracer stopped");
+        LOG.debug("XRay tracer stopped");
     }
 
     /**
@@ -155,7 +156,7 @@ public class XRayTracer extends ServiceSupport implements RoutePolicyFactory, St
     public void init(CamelContext camelContext) {
         if (!camelContext.hasService(this)) {
             try {
-                log.debug("Initializing XRay tracer");
+                LOG.debug("Initializing XRay tracer");
                 // start this service eager so we init before Camel is starting up
                 camelContext.addService(this, true, true);
             } catch (Exception e) {
@@ -218,7 +219,7 @@ public class XRayTracer extends ServiceSupport implements RoutePolicyFactory, St
         if (!excludePatterns.isEmpty()) {
             for (String pattern : excludePatterns) {
                 if (pattern.equals(routeId)) {
-                    log.debug("Ignoring route with ID {}", routeId);
+                    LOG.debug("Ignoring route with ID {}", routeId);
                     return true;
                 }
             }
@@ -266,7 +267,7 @@ public class XRayTracer extends ServiceSupport implements RoutePolicyFactory, St
 
             if (event instanceof ExchangeSendingEvent) {
                 ExchangeSendingEvent ese = (ExchangeSendingEvent) event;
-                log.trace("-> {} - target: {} (routeId: {})",
+                LOG.trace("-> {} - target: {} (routeId: {})",
                         event.getClass().getSimpleName(), ese.getEndpoint(),
                         ese.getExchange().getFromRouteId());
 
@@ -288,21 +289,21 @@ public class XRayTracer extends ServiceSupport implements RoutePolicyFactory, St
                     try {
                         Subsegment subsegment = AWSXRay.beginSubsegment(name);
                         sd.pre(subsegment, ese.getExchange(), ese.getEndpoint());
-                        log.trace("Creating new subsegment with ID {} and name {} (parent {}, references: {})",
+                        LOG.trace("Creating new subsegment with ID {} and name {} (parent {}, references: {})",
                                 subsegment.getId(), subsegment.getName(),
                                 subsegment.getParentSegment().getId(), subsegment.getParentSegment().getReferenceCount());
                         ese.getExchange().setProperty(CURRENT_SEGMENT, subsegment);
                     } catch (AlreadyEmittedException aeEx) {
-                        log.warn("Ignoring starting of subsegment " + name + " as its parent segment"
+                        LOG.warn("Ignoring starting of subsegment " + name + " as its parent segment"
                                 + " was already emitted to AWS.");
                     }
                 } else {
-                    log.trace("Ignoring creation of XRay subsegment as no segment exists in the current thread");
+                    LOG.trace("Ignoring creation of XRay subsegment as no segment exists in the current thread");
                 }
 
             } else if (event instanceof ExchangeSentEvent) {
                 ExchangeSentEvent ese = (ExchangeSentEvent) event;
-                log.trace("-> {} - target: {} (routeId: {})",
+                LOG.trace("-> {} - target: {} (routeId: {})",
                         event.getClass().getSimpleName(), ese.getEndpoint(), ese.getExchange().getFromRouteId());
 
                 Entity entity = getTraceEntityFromExchange(ese.getExchange());
@@ -313,17 +314,17 @@ public class XRayTracer extends ServiceSupport implements RoutePolicyFactory, St
                         Subsegment subsegment = (Subsegment) entity;
                         sd.post(subsegment, ese.getExchange(), ese.getEndpoint());
                         subsegment.close();
-                        log.trace("Closing down subsegment with ID {} and name {}",
+                        LOG.trace("Closing down subsegment with ID {} and name {}",
                                 subsegment.getId(), subsegment.getName());
-                        log.trace("Setting trace entity for exchange {} to {}", ese.getExchange(), subsegment.getParent());
+                        LOG.trace("Setting trace entity for exchange {} to {}", ese.getExchange(), subsegment.getParent());
                         ese.getExchange().setProperty(CURRENT_SEGMENT, subsegment.getParent());
                     } catch (AlreadyEmittedException aeEx) {
-                        log.warn("Ignoring close of subsegment " + entity.getName()
+                        LOG.warn("Ignoring close of subsegment " + entity.getName()
                                 + " as its parent segment was already emitted to AWS");
                     }
                 }
             } else {
-                log.trace("Received event {} from source {}", event, event.getSource());
+                LOG.trace("Received event {} from source {}", event, event.getSource());
             }
         }
 
@@ -370,7 +371,7 @@ public class XRayTracer extends ServiceSupport implements RoutePolicyFactory, St
                 return;
             }
 
-            log.trace("=> RoutePolicy-Begin: Route: {} - RouteId: {}", routeId, route.getId());
+            LOG.trace("=> RoutePolicy-Begin: Route: {} - RouteId: {}", routeId, route.getId());
 
             Entity entity = getTraceEntityFromExchange(exchange);
             boolean createSegment = entity == null || !Objects.equals(entity.getName(), routeId);
@@ -391,19 +392,19 @@ public class XRayTracer extends ServiceSupport implements RoutePolicyFactory, St
                 segment.setParent(entity);
                 segment.setTraceId(traceID);
                 sd.pre(segment, exchange, route.getEndpoint());
-                log.trace("Created new XRay segment {} with name {}", segment.getId(), segment.getName());
+                LOG.trace("Created new XRay segment {} with name {}", segment.getId(), segment.getName());
                 exchange.setProperty(CURRENT_SEGMENT, segment);
             } else {
                 String segmentName = entity.getId();
                 try {
                     Subsegment subsegment = AWSXRay.beginSubsegment(route.getId());
                     sd.pre(subsegment, exchange, route.getEndpoint());
-                    log.trace("Creating new subsegment with ID {} and name {} (parent {}, references: {})",
+                    LOG.trace("Creating new subsegment with ID {} and name {} (parent {}, references: {})",
                             subsegment.getId(), subsegment.getName(),
                             subsegment.getParentSegment().getId(), subsegment.getParentSegment().getReferenceCount());
                     exchange.setProperty(CURRENT_SEGMENT, subsegment);
                 } catch (AlreadyEmittedException aeEx) {
-                    log.warn("Ignoring opening of subsegment " + route.getId() + " as its parent segment "
+                    LOG.warn("Ignoring opening of subsegment " + route.getId() + " as its parent segment "
                             + segmentName + " was already emitted before.");
                 }
             }
@@ -416,7 +417,7 @@ public class XRayTracer extends ServiceSupport implements RoutePolicyFactory, St
                 return;
             }
 
-            log.trace("=> RoutePolicy-Done: Route: {} - RouteId: {}", routeId, route.getId());
+            LOG.trace("=> RoutePolicy-Done: Route: {} - RouteId: {}", routeId, route.getId());
 
             Entity entity = getTraceEntityFromExchange(exchange);
             AWSXRay.setTraceEntity(entity);
@@ -424,14 +425,14 @@ public class XRayTracer extends ServiceSupport implements RoutePolicyFactory, St
                 SegmentDecorator sd = getSegmentDecorator(route.getEndpoint());
                 sd.post(entity, exchange, route.getEndpoint());
                 entity.close();
-                log.trace("Closing down (sub)segment {} with name {} (parent {}, references: {})",
+                LOG.trace("Closing down (sub)segment {} with name {} (parent {}, references: {})",
                         entity.getId(), entity.getName(),
                         entity.getParentSegment().getId(), entity.getParentSegment().getReferenceCount());
                 exchange.setProperty(CURRENT_SEGMENT, entity.getParent());
             } catch (AlreadyEmittedException aeEx) {
-                log.warn("Ignoring closing of (sub)segment {} as the segment was already emitted.", route.getId());
+                LOG.warn("Ignoring closing of (sub)segment {} as the segment was already emitted.", route.getId());
             } catch (Exception e) {
-                log.warn("Error closing entity");
+                LOG.warn("Error closing entity");
             } finally {
                 AWSXRay.setTraceEntity(null);
             }
diff --git a/components/camel-aws2-translate/src/main/java/org/apache/camel/component/aws2/translate/Translate2Producer.java b/components/camel-aws2-translate/src/main/java/org/apache/camel/component/aws2/translate/Translate2Producer.java
index 2f3e58c..1db5875 100644
--- a/components/camel-aws2-translate/src/main/java/org/apache/camel/component/aws2/translate/Translate2Producer.java
+++ b/components/camel-aws2-translate/src/main/java/org/apache/camel/component/aws2/translate/Translate2Producer.java
@@ -24,6 +24,8 @@ import org.apache.camel.Message;
 import org.apache.camel.support.DefaultProducer;
 import org.apache.camel.util.ObjectHelper;
 import org.apache.camel.util.URISupport;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import software.amazon.awssdk.awscore.exception.AwsServiceException;
 import software.amazon.awssdk.services.translate.TranslateClient;
 import software.amazon.awssdk.services.translate.model.TranslateTextRequest;
@@ -36,6 +38,7 @@ import software.amazon.awssdk.services.translate.model.TranslateTextResponse;
  */
 public class Translate2Producer extends DefaultProducer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(Translate2Producer.class);
     private transient String translateProducerToString;
 
     public Translate2Producer(Endpoint endpoint) {
@@ -116,7 +119,7 @@ public class Translate2Producer extends DefaultProducer {
         try {
             result = translateClient.translateText(request.build());
         } catch (AwsServiceException ase) {
-            log.trace("Translate Text command returned the error code {}", ase.getMessage());
+            LOG.trace("Translate Text command returned the error code {}", ase.getMessage());
             throw ase;
         }
         Message message = getMessageForResponse(exchange);
diff --git a/components/camel-azure/src/main/java/org/apache/camel/component/azure/blob/BlobServiceConsumer.java b/components/camel-azure/src/main/java/org/apache/camel/component/azure/blob/BlobServiceConsumer.java
index 1fa97b2..882a85a 100644
--- a/components/camel-azure/src/main/java/org/apache/camel/component/azure/blob/BlobServiceConsumer.java
+++ b/components/camel-azure/src/main/java/org/apache/camel/component/azure/blob/BlobServiceConsumer.java
@@ -37,7 +37,6 @@ public class BlobServiceConsumer extends ScheduledPollConsumer {
     protected int poll() throws Exception {
         Exchange exchange = super.getEndpoint().createExchange();
         try {
-            log.trace("Getting the blob content");
             getBlob(exchange);
             super.getAsyncProcessor().process(exchange);
             return 1;
diff --git a/components/camel-azure/src/main/java/org/apache/camel/component/azure/blob/BlobServiceEndpoint.java b/components/camel-azure/src/main/java/org/apache/camel/component/azure/blob/BlobServiceEndpoint.java
index ca92a93..10d2747 100644
--- a/components/camel-azure/src/main/java/org/apache/camel/component/azure/blob/BlobServiceEndpoint.java
+++ b/components/camel-azure/src/main/java/org/apache/camel/component/azure/blob/BlobServiceEndpoint.java
@@ -49,7 +49,6 @@ public class BlobServiceEndpoint extends DefaultEndpoint {
 
     @Override
     public Consumer createConsumer(Processor processor) throws Exception {
-        log.trace("Creating a consumer");
         if (getConfiguration().getBlobName() == null) {
             throw new IllegalArgumentException("Blob name must be specified.");
         }
@@ -60,7 +59,6 @@ public class BlobServiceEndpoint extends DefaultEndpoint {
 
     @Override
     public Producer createProducer() throws Exception {
-        log.trace("Creating a producer");
         if (getConfiguration().getBlobName() == null
             && getConfiguration().getOperation() != null 
             && BlobServiceOperations.listBlobs != configuration.getOperation()) {
diff --git a/components/camel-azure/src/main/java/org/apache/camel/component/azure/blob/BlobServiceProducer.java b/components/camel-azure/src/main/java/org/apache/camel/component/azure/blob/BlobServiceProducer.java
index dc88748..10ba0f3 100644
--- a/components/camel-azure/src/main/java/org/apache/camel/component/azure/blob/BlobServiceProducer.java
+++ b/components/camel-azure/src/main/java/org/apache/camel/component/azure/blob/BlobServiceProducer.java
@@ -46,12 +46,16 @@ import org.apache.camel.component.azure.common.ExchangeUtil;
 import org.apache.camel.support.DefaultProducer;
 import org.apache.camel.util.ObjectHelper;
 import org.apache.camel.util.URISupport;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A Producer which sends messages to the Azure Storage Blob Service
  */
 public class BlobServiceProducer extends DefaultProducer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(BlobServiceProducer.class);
+
     public BlobServiceProducer(final Endpoint endpoint) {
         super(endpoint);
     }
@@ -114,7 +118,7 @@ public class BlobServiceProducer extends DefaultProducer {
     private void listBlobs(Exchange exchange) throws Exception {
         CloudBlobContainer client = BlobServiceUtil.createBlobContainerClient(getConfiguration());
         BlobServiceRequestOptions opts = BlobServiceUtil.getRequestOptions(exchange);
-        log.trace("Getting the blob list from the container [{}] from exchange [{}]...",
+        LOG.trace("Getting the blob list from the container [{}] from exchange [{}]...",
                   getConfiguration().getContainerName(), exchange);
         BlobServiceConfiguration cfg = getConfiguration();
         EnumSet<BlobListingDetails> details = null;
@@ -139,7 +143,7 @@ public class BlobServiceProducer extends DefaultProducer {
         
         InputStream inputStream = getInputStreamFromExchange(exchange);
         
-        log.trace("Putting a block blob [{}] from exchange [{}]...", getConfiguration().getBlobName(), exchange);
+        LOG.trace("Putting a block blob [{}] from exchange [{}]...", getConfiguration().getBlobName(), exchange);
         try {
             client.upload(inputStream, -1,
                           opts.getAccessCond(), opts.getRequestOpts(), opts.getOpContext());
@@ -165,7 +169,7 @@ public class BlobServiceProducer extends DefaultProducer {
         configureCloudBlobForWrite(client);
         BlobServiceRequestOptions opts = BlobServiceUtil.getRequestOptions(exchange);
         
-        log.trace("Putting a blob [{}] from blocks from exchange [{}]...", getConfiguration().getBlobName(), exchange);
+        LOG.trace("Putting a blob [{}] from blocks from exchange [{}]...", getConfiguration().getBlobName(), exchange);
         List<BlockEntry> blockEntries = new LinkedList<>();
         for (BlobBlock blobBlock : blobBlocks) {
             blockEntries.add(blobBlock.getBlockEntry());
@@ -196,7 +200,7 @@ public class BlobServiceProducer extends DefaultProducer {
         CloudBlockBlob client = BlobServiceUtil.createBlockBlobClient(getConfiguration());
         BlobServiceRequestOptions opts = BlobServiceUtil.getRequestOptions(exchange);
         
-        log.trace("Putting a blob [{}] block list from exchange [{}]...", getConfiguration().getBlobName(), exchange);
+        LOG.trace("Putting a blob [{}] block list from exchange [{}]...", getConfiguration().getBlobName(), exchange);
         client.commitBlockList(blockEntries, 
                                opts.getAccessCond(), opts.getRequestOpts(), opts.getOpContext());
     }
@@ -224,7 +228,7 @@ public class BlobServiceProducer extends DefaultProducer {
     private void getBlobBlockList(Exchange exchange) throws Exception {
         CloudBlockBlob client = BlobServiceUtil.createBlockBlobClient(getConfiguration());
         BlobServiceRequestOptions opts = BlobServiceUtil.getRequestOptions(exchange);
-        log.trace("Getting the blob block list [{}] from exchange [{}]...", getConfiguration().getBlobName(), exchange);
+        LOG.trace("Getting the blob block list [{}] from exchange [{}]...", getConfiguration().getBlobName(), exchange);
         BlockListingFilter filter = exchange.getIn().getBody(BlockListingFilter.class);
         if (filter == null) {
             filter = BlockListingFilter.COMMITTED;
@@ -251,7 +255,7 @@ public class BlobServiceProducer extends DefaultProducer {
     
     private void doCreateAppendBlob(CloudAppendBlob client, BlobServiceRequestOptions opts, Exchange exchange) 
         throws Exception {
-        log.trace("Creating an append blob [{}] from exchange [{}]...", getConfiguration().getBlobName(), exchange);
+        LOG.trace("Creating an append blob [{}] from exchange [{}]...", getConfiguration().getBlobName(), exchange);
         try {
             client.createOrReplace(opts.getAccessCond(), opts.getRequestOpts(), opts.getOpContext());
         } catch (StorageException ex) {
@@ -305,7 +309,7 @@ public class BlobServiceProducer extends DefaultProducer {
     
     private void doCreatePageBlob(CloudPageBlob client, BlobServiceRequestOptions opts, Exchange exchange) 
         throws Exception {
-        log.trace("Creating a page blob [{}] from exchange [{}]...", getConfiguration().getBlobName(), exchange);
+        LOG.trace("Creating a page blob [{}] from exchange [{}]...", getConfiguration().getBlobName(), exchange);
         Long pageSize = getPageBlobSize(exchange);
         try {
             client.create(pageSize,
@@ -321,7 +325,7 @@ public class BlobServiceProducer extends DefaultProducer {
     }
     
     private void uploadPageBlob(Exchange exchange) throws Exception {
-        log.trace("Updating a page blob [{}] from exchange [{}]...", getConfiguration().getBlobName(), exchange);
+        LOG.trace("Updating a page blob [{}] from exchange [{}]...", getConfiguration().getBlobName(), exchange);
         
         CloudPageBlob client = BlobServiceUtil.createPageBlobClient(getConfiguration());
         configureCloudBlobForWrite(client);
@@ -342,7 +346,7 @@ public class BlobServiceProducer extends DefaultProducer {
     }
     
     private void resizePageBlob(Exchange exchange) throws Exception {
-        log.trace("Resizing a page blob [{}] from exchange [{}]...", getConfiguration().getBlobName(), exchange);
+        LOG.trace("Resizing a page blob [{}] from exchange [{}]...", getConfiguration().getBlobName(), exchange);
         
         CloudPageBlob client = BlobServiceUtil.createPageBlobClient(getConfiguration());
         BlobServiceRequestOptions opts = BlobServiceUtil.getRequestOptions(exchange);
@@ -351,7 +355,7 @@ public class BlobServiceProducer extends DefaultProducer {
     }
     
     private void clearPageBlob(Exchange exchange) throws Exception {
-        log.trace("Clearing a page blob [{}] from exchange [{}]...", getConfiguration().getBlobName(), exchange);
+        LOG.trace("Clearing a page blob [{}] from exchange [{}]...", getConfiguration().getBlobName(), exchange);
                 
         CloudPageBlob client = BlobServiceUtil.createPageBlobClient(getConfiguration());
         BlobServiceRequestOptions opts = BlobServiceUtil.getRequestOptions(exchange);
@@ -396,7 +400,7 @@ public class BlobServiceProducer extends DefaultProducer {
         CloudPageBlob client = BlobServiceUtil.createPageBlobClient(getConfiguration());
         BlobServiceUtil.configureCloudBlobForRead(client, getConfiguration());
         BlobServiceRequestOptions opts = BlobServiceUtil.getRequestOptions(exchange);
-        log.trace("Getting the page blob ranges [{}] from exchange [{}]...", getConfiguration().getBlobName(), exchange);
+        LOG.trace("Getting the page blob ranges [{}] from exchange [{}]...", getConfiguration().getBlobName(), exchange);
         List<PageRange> ranges = 
             client.downloadPageRanges(opts.getAccessCond(), opts.getRequestOpts(), opts.getOpContext());
         ExchangeUtil.getMessageForResponse(exchange).setBody(ranges);
@@ -417,7 +421,7 @@ public class BlobServiceProducer extends DefaultProducer {
 
     
     private void doDeleteBlock(CloudBlob client, Exchange exchange) throws Exception {
-        log.trace("Deleting a blob [{}] from exchange [{}]...", getConfiguration().getBlobName(), exchange);
+        LOG.trace("Deleting a blob [{}] from exchange [{}]...", getConfiguration().getBlobName(), exchange);
         client.delete();
     }
 
diff --git a/components/camel-azure/src/main/java/org/apache/camel/component/azure/queue/QueueServiceConsumer.java b/components/camel-azure/src/main/java/org/apache/camel/component/azure/queue/QueueServiceConsumer.java
index e26ca47..de55bed 100644
--- a/components/camel-azure/src/main/java/org/apache/camel/component/azure/queue/QueueServiceConsumer.java
+++ b/components/camel-azure/src/main/java/org/apache/camel/component/azure/queue/QueueServiceConsumer.java
@@ -35,7 +35,6 @@ public class QueueServiceConsumer extends ScheduledPollConsumer {
     protected int poll() throws Exception {
         Exchange exchange = super.getEndpoint().createExchange();
         try {
-            log.trace("Retrieving a message");
             retrieveMessage(exchange);
             super.getAsyncProcessor().process(exchange);
             return 1;
diff --git a/components/camel-azure/src/main/java/org/apache/camel/component/azure/queue/QueueServiceEndpoint.java b/components/camel-azure/src/main/java/org/apache/camel/component/azure/queue/QueueServiceEndpoint.java
index d527547..4116093 100644
--- a/components/camel-azure/src/main/java/org/apache/camel/component/azure/queue/QueueServiceEndpoint.java
+++ b/components/camel-azure/src/main/java/org/apache/camel/component/azure/queue/QueueServiceEndpoint.java
@@ -49,7 +49,6 @@ public class QueueServiceEndpoint extends DefaultEndpoint {
 
     @Override
     public Consumer createConsumer(Processor processor) throws Exception {
-        log.trace("Creating a consumer");
         QueueServiceConsumer consumer = new QueueServiceConsumer(this, processor);
         configureConsumer(consumer);
         return consumer;
@@ -57,7 +56,6 @@ public class QueueServiceEndpoint extends DefaultEndpoint {
 
     @Override
     public Producer createProducer() throws Exception {
-        log.trace("Creating a producer");
         return new QueueServiceProducer(this);
     }
 
diff --git a/components/camel-azure/src/main/java/org/apache/camel/component/azure/queue/QueueServiceProducer.java b/components/camel-azure/src/main/java/org/apache/camel/component/azure/queue/QueueServiceProducer.java
index d24deac..6d7d8eb 100644
--- a/components/camel-azure/src/main/java/org/apache/camel/component/azure/queue/QueueServiceProducer.java
+++ b/components/camel-azure/src/main/java/org/apache/camel/component/azure/queue/QueueServiceProducer.java
@@ -29,12 +29,16 @@ import org.apache.camel.component.azure.common.ExchangeUtil;
 import org.apache.camel.support.DefaultProducer;
 import org.apache.camel.util.ObjectHelper;
 import org.apache.camel.util.URISupport;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A Producer which sends messages to the Azure Storage Queue Service
  */
 public class QueueServiceProducer extends DefaultProducer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(QueueServiceProducer.class);
+
     public QueueServiceProducer(final Endpoint endpoint) {
         super(endpoint);
     }
@@ -97,7 +101,7 @@ public class QueueServiceProducer extends DefaultProducer {
     }
     
     private void doCreateQueue(CloudQueue client, QueueServiceRequestOptions opts, Exchange exchange) throws Exception {
-        log.trace("Creating the queue [{}] from exchange [{}]...",
+        LOG.trace("Creating the queue [{}] from exchange [{}]...",
                   getConfiguration().getQueueName(), exchange);
         client.createIfNotExists(opts.getRequestOpts(), opts.getOpContext());
         ExchangeUtil.getMessageForResponse(exchange)
@@ -105,7 +109,7 @@ public class QueueServiceProducer extends DefaultProducer {
     }
     
     private void deleteQueue(Exchange exchange) throws Exception {
-        log.trace("Deleting the queue [{}] from exchange [{}]...",
+        LOG.trace("Deleting the queue [{}] from exchange [{}]...",
                   getConfiguration().getQueueName(), exchange);
         CloudQueue client = QueueServiceUtil.createQueueClient(getConfiguration());
         QueueServiceRequestOptions opts = QueueServiceUtil.getRequestOptions(exchange);
@@ -113,7 +117,7 @@ public class QueueServiceProducer extends DefaultProducer {
     }
     
     private void addMessage(Exchange exchange) throws Exception {
-        log.trace("Putting the message into the queue [{}] from exchange [{}]...",
+        LOG.trace("Putting the message into the queue [{}] from exchange [{}]...",
                   getConfiguration().getQueueName(), exchange);
         CloudQueue client = QueueServiceUtil.createQueueClient(getConfiguration());
         QueueServiceRequestOptions opts = QueueServiceUtil.getRequestOptions(exchange);
@@ -135,7 +139,7 @@ public class QueueServiceProducer extends DefaultProducer {
         QueueServiceRequestOptions opts = QueueServiceUtil.getRequestOptions(exchange);
         
         CloudQueueMessage message = getCloudQueueMessage(exchange);
-        log.trace("Updating the message in the queue [{}] from exchange [{}]...",
+        LOG.trace("Updating the message in the queue [{}] from exchange [{}]...",
                   getConfiguration().getQueueName(), exchange);
         
         EnumSet<MessageUpdateFields> fields = null;
@@ -154,7 +158,7 @@ public class QueueServiceProducer extends DefaultProducer {
     }
     
     private void deleteMessage(Exchange exchange) throws Exception {
-        log.trace("Deleting the message from the queue [{}] from exchange [{}]...",
+        LOG.trace("Deleting the message from the queue [{}] from exchange [{}]...",
                   getConfiguration().getQueueName(), exchange);
         CloudQueue client = QueueServiceUtil.createQueueClient(getConfiguration());
         QueueServiceRequestOptions opts = QueueServiceUtil.getRequestOptions(exchange);
diff --git a/components/camel-barcode/src/main/java/org/apache/camel/dataformat/barcode/BarcodeDataFormat.java b/components/camel-barcode/src/main/java/org/apache/camel/dataformat/barcode/BarcodeDataFormat.java
index 07b3ad9..746334c 100644
--- a/components/camel-barcode/src/main/java/org/apache/camel/dataformat/barcode/BarcodeDataFormat.java
+++ b/components/camel-barcode/src/main/java/org/apache/camel/dataformat/barcode/BarcodeDataFormat.java
@@ -43,6 +43,8 @@ import org.apache.camel.spi.DataFormatName;
 import org.apache.camel.spi.annotations.Dataformat;
 import org.apache.camel.support.ExchangeHelper;
 import org.apache.camel.support.service.ServiceSupport;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * {@link DataFormat} to create (encode) and
@@ -54,6 +56,8 @@ import org.apache.camel.support.service.ServiceSupport;
 @Dataformat("barcode")
 public class BarcodeDataFormat extends ServiceSupport implements DataFormat, DataFormatName {
 
+    private static final Logger LOG = LoggerFactory.getLogger(BarcodeDataFormat.class);
+
     /**
      * The bean for the default parameters.
      */
@@ -235,7 +239,7 @@ public class BarcodeDataFormat extends ServiceSupport implements DataFormat, Dat
      */
     public final void addToHintMap(final EncodeHintType hintType, final Object value) {
         this.writerHintMap.put(hintType, value);
-        log.info(String.format("Added '%s' with value '%s' to writer hint map.", hintType.toString(), value.toString()));
+        LOG.info(String.format("Added '%s' with value '%s' to writer hint map.", hintType.toString(), value.toString()));
     }
 
     /**
@@ -251,9 +255,9 @@ public class BarcodeDataFormat extends ServiceSupport implements DataFormat, Dat
     public final void removeFromHintMap(final EncodeHintType hintType) {
         if (this.writerHintMap.containsKey(hintType)) {
             this.writerHintMap.remove(hintType);
-            log.info(String.format("Removed '%s' from writer hint map.", hintType.toString()));
+            LOG.info(String.format("Removed '%s' from writer hint map.", hintType.toString()));
         } else {
-            log.warn(String.format("Could not find encode hint type '%s' in writer hint map.", hintType.toString()));
+            LOG.warn(String.format("Could not find encode hint type '%s' in writer hint map.", hintType.toString()));
         }
     }
 
@@ -263,9 +267,9 @@ public class BarcodeDataFormat extends ServiceSupport implements DataFormat, Dat
     public final void removeFromHintMap(final DecodeHintType hintType) {
         if (this.readerHintMap.containsKey(hintType)) {
             this.readerHintMap.remove(hintType);
-            log.info(String.format("Removed '%s' from reader hint map.", hintType.toString()));
+            LOG.info(String.format("Removed '%s' from reader hint map.", hintType.toString()));
         } else {
-            log.warn(String.format("Could not find decode hint type '%s' in reader hint map.", hintType.toString()));
+            LOG.warn(String.format("Could not find decode hint type '%s' in reader hint map.", hintType.toString()));
         }
     }
 
diff --git a/components/camel-bean/src/main/java/org/apache/camel/component/bean/AbstractBeanProcessor.java b/components/camel-bean/src/main/java/org/apache/camel/component/bean/AbstractBeanProcessor.java
index 1964498..ff273eb 100644
--- a/components/camel-bean/src/main/java/org/apache/camel/component/bean/AbstractBeanProcessor.java
+++ b/components/camel-bean/src/main/java/org/apache/camel/component/bean/AbstractBeanProcessor.java
@@ -25,6 +25,8 @@ import org.apache.camel.NoSuchBeanException;
 import org.apache.camel.Processor;
 import org.apache.camel.support.AsyncProcessorSupport;
 import org.apache.camel.support.service.ServiceHelper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A {@link Processor} which converts the inbound exchange to a method
@@ -32,6 +34,8 @@ import org.apache.camel.support.service.ServiceHelper;
  */
 public abstract class AbstractBeanProcessor extends AsyncProcessorSupport {
 
+    private static final Logger LOG = LoggerFactory.getLogger(AbstractBeanProcessor.class);
+
     private final BeanHolder beanHolder;
     private transient Processor processor;
     private transient boolean lookupProcessorDone;
@@ -106,8 +110,8 @@ public abstract class AbstractBeanProcessor extends AsyncProcessorSupport {
                 }
             }
             if (target != null) {
-                if (log.isTraceEnabled()) {
-                    log.trace("Using a custom adapter as bean invocation: {}", target);
+                if (LOG.isTraceEnabled()) {
+                    LOG.trace("Using a custom adapter as bean invocation: {}", target);
                 }
                 try {
                     target.process(exchange);
diff --git a/components/camel-bean/src/main/java/org/apache/camel/component/bean/BeanComponent.java b/components/camel-bean/src/main/java/org/apache/camel/component/bean/BeanComponent.java
index 4929e22..e2d819b 100644
--- a/components/camel-bean/src/main/java/org/apache/camel/component/bean/BeanComponent.java
+++ b/components/camel-bean/src/main/java/org/apache/camel/component/bean/BeanComponent.java
@@ -25,6 +25,8 @@ import org.apache.camel.support.DefaultComponent;
 import org.apache.camel.support.LRUCache;
 import org.apache.camel.support.LRUCacheFactory;
 import org.apache.camel.util.PropertiesHelper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * The bean component is for invoking Java beans from Camel.
@@ -32,6 +34,8 @@ import org.apache.camel.util.PropertiesHelper;
 @org.apache.camel.spi.annotations.Component("bean")
 public class BeanComponent extends DefaultComponent {
 
+    private static final Logger LOG = LoggerFactory.getLogger(BeanComponent.class);
+
     // use an internal soft cache for BeanInfo as they are costly to introspect
     // for example the bean language using OGNL expression runs much faster reusing the BeanInfo from this cache
     @SuppressWarnings("unchecked")
@@ -82,9 +86,9 @@ public class BeanComponent extends DefaultComponent {
 
     @Override
     protected void doShutdown() throws Exception {
-        if (log.isDebugEnabled() && beanInfoCache instanceof LRUCache) {
+        if (LOG.isDebugEnabled() && beanInfoCache instanceof LRUCache) {
             LRUCache cache = (LRUCache) this.beanInfoCache;
-            log.debug("Clearing BeanInfo cache[size={}, hits={}, misses={}, evicted={}]", cache.size(), cache.getHits(), cache.getMisses(), cache.getEvicted());
+            LOG.debug("Clearing BeanInfo cache[size={}, hits={}, misses={}, evicted={}]", cache.size(), cache.getHits(), cache.getMisses(), cache.getEvicted());
         }
         beanInfoCache.clear();
     }
diff --git a/components/camel-beanstalk/src/main/java/org/apache/camel/component/beanstalk/BeanstalkConsumer.java b/components/camel-beanstalk/src/main/java/org/apache/camel/component/beanstalk/BeanstalkConsumer.java
index 2a9ee42..47f7f1c 100644
--- a/components/camel-beanstalk/src/main/java/org/apache/camel/component/beanstalk/BeanstalkConsumer.java
+++ b/components/camel-beanstalk/src/main/java/org/apache/camel/component/beanstalk/BeanstalkConsumer.java
@@ -34,6 +34,8 @@ import org.apache.camel.component.beanstalk.processors.DeleteCommand;
 import org.apache.camel.component.beanstalk.processors.ReleaseCommand;
 import org.apache.camel.spi.Synchronization;
 import org.apache.camel.support.ScheduledPollConsumer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * PollingConsumer to read Beanstalk jobs.
@@ -53,6 +55,8 @@ import org.apache.camel.support.ScheduledPollConsumer;
  */
 public class BeanstalkConsumer extends ScheduledPollConsumer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(BeanstalkConsumer.class);
+
     private static final String[] STATS_KEY_STR = new String[]{"tube", "state"};
     private static final String[] STATS_KEY_INT = new String[]{"age", "time-left", "timeouts", "releases", "buries", "kicks"};
 
@@ -85,8 +89,8 @@ public class BeanstalkConsumer extends ScheduledPollConsumer {
                     return null;
                 }
 
-                if (log.isDebugEnabled()) {
-                    log.debug(String.format("Received job ID %d (data length %d)", job.getJobId(), job.getData().length));
+                if (LOG.isDebugEnabled()) {
+                    LOG.debug(String.format("Received job ID %d (data length %d)", job.getJobId(), job.getData().length));
                 }
 
                 final Exchange exchange = getEndpoint().createExchange(ExchangePattern.InOnly);
@@ -221,7 +225,7 @@ public class BeanstalkConsumer extends ScheduledPollConsumer {
             try {
                 executor.submit(new RunCommand(successCommand, exchange)).get();
             } catch (Exception e) {
-                log.error(String.format("Could not run completion of exchange %s", exchange), e);
+                LOG.error(String.format("Could not run completion of exchange %s", exchange), e);
             }
         }
 
@@ -230,7 +234,7 @@ public class BeanstalkConsumer extends ScheduledPollConsumer {
             try {
                 executor.submit(new RunCommand(failureCommand, exchange)).get();
             } catch (Exception e) {
-                log.error(String.format("%s could not run failure of exchange %s", failureCommand.getClass().getName(), exchange), e);
+                LOG.error(String.format("%s could not run failure of exchange %s", failureCommand.getClass().getName(), exchange), e);
             }
         }
 
@@ -249,12 +253,12 @@ public class BeanstalkConsumer extends ScheduledPollConsumer {
                     try {
                         command.act(client, exchange);
                     } catch (BeanstalkException e) {
-                        log.warn(String.format("Post-processing %s of exchange %s failed, retrying.", command.getClass().getName(), exchange), e);
+                        LOG.warn(String.format("Post-processing %s of exchange %s failed, retrying.", command.getClass().getName(), exchange), e);
                         resetClient();
                         command.act(client, exchange);
                     }
                 } catch (final Exception e) {
-                    log.error(String.format("%s could not post-process exchange %s", command.getClass().getName(), exchange), e);
+                    LOG.error(String.format("%s could not post-process exchange %s", command.getClass().getName(), exchange), e);
                     exchange.setException(e);
                 }
             }
diff --git a/components/camel-blueprint/src/main/java/org/apache/camel/blueprint/BlueprintCamelContext.java b/components/camel-blueprint/src/main/java/org/apache/camel/blueprint/BlueprintCamelContext.java
index 7993a56..7453344 100644
--- a/components/camel-blueprint/src/main/java/org/apache/camel/blueprint/BlueprintCamelContext.java
+++ b/components/camel-blueprint/src/main/java/org/apache/camel/blueprint/BlueprintCamelContext.java
@@ -40,12 +40,16 @@ import org.osgi.framework.ServiceRegistration;
 import org.osgi.service.blueprint.container.BlueprintContainer;
 import org.osgi.service.blueprint.container.BlueprintEvent;
 import org.osgi.service.blueprint.container.BlueprintListener;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * OSGi Blueprint based {@link org.apache.camel.CamelContext}.
  */
 public class BlueprintCamelContext extends DefaultCamelContext implements ServiceListener, BlueprintListener {
 
+    private static final Logger LOG = LoggerFactory.getLogger(BlueprintCamelContext.class);
+
     protected final AtomicBoolean routeDefinitionValid = new AtomicBoolean(true);
 
     private BundleContext bundleContext;
@@ -107,7 +111,7 @@ public class BlueprintCamelContext extends DefaultCamelContext implements Servic
    
     @Override
     public void doInit() throws Exception {
-        log.trace("init {}", this);
+        LOG.trace("init {}", this);
         // add service listener so we can be notified when blueprint container is done
         // and we would be ready to start CamelContext
         bundleContext.addServiceListener(this);
@@ -119,19 +123,19 @@ public class BlueprintCamelContext extends DefaultCamelContext implements Servic
     }
 
     public void destroy() throws Exception {
-        log.trace("destroy {}", this);
+        LOG.trace("destroy {}", this);
 
         // remove listener and stop this CamelContext
         try {
             bundleContext.removeServiceListener(this);
         } catch (Exception e) {
-            log.warn("Error removing ServiceListener: " + this + ". This exception is ignored.", e);
+            LOG.warn("Error removing ServiceListener: " + this + ". This exception is ignored.", e);
         }
         if (registration != null) {
             try {
                 registration.unregister();
             } catch (Exception e) {
-                log.warn("Error unregistering service registration: " + registration + ". This exception is ignored.", e);
+                LOG.warn("Error unregistering service registration: " + registration + ". This exception is ignored.", e);
             }
             registration = null;
         }
@@ -143,7 +147,7 @@ public class BlueprintCamelContext extends DefaultCamelContext implements Servic
 
     @Override
     public void blueprintEvent(BlueprintEvent event) {
-        if (log.isDebugEnabled()) {
+        if (LOG.isDebugEnabled()) {
             String eventTypeString;
 
             switch (event.getType()) {
@@ -173,16 +177,16 @@ public class BlueprintCamelContext extends DefaultCamelContext implements Servic
                 break;
             }
 
-            log.debug("Received BlueprintEvent[replay={} type={} bundle={}] {}", event.isReplay(), eventTypeString, event.getBundle().getSymbolicName(), event);
+            LOG.debug("Received BlueprintEvent[replay={} type={} bundle={}] {}", event.isReplay(), eventTypeString, event.getBundle().getSymbolicName(), event);
         }
 
         if (!event.isReplay() && this.getBundleContext().getBundle().getBundleId() == event.getBundle().getBundleId()) {
             if (event.getType() == BlueprintEvent.CREATED) {
                 try {
-                    log.info("Attempting to start CamelContext: {}", this.getName());
+                    LOG.info("Attempting to start CamelContext: {}", this.getName());
                     this.maybeStart();
                 } catch (Exception startEx) {
-                    log.error("Error occurred during starting CamelContext: {}", this.getName(), startEx);
+                    LOG.error("Error occurred during starting CamelContext: {}", this.getName(), startEx);
                 }
             }
         }
@@ -190,7 +194,7 @@ public class BlueprintCamelContext extends DefaultCamelContext implements Servic
 
     @Override
     public void serviceChanged(ServiceEvent event) {
-        if (log.isTraceEnabled()) {
+        if (LOG.isTraceEnabled()) {
             String eventTypeString;
 
             switch (event.getType()) {
@@ -212,7 +216,7 @@ public class BlueprintCamelContext extends DefaultCamelContext implements Servic
             }
 
             // use trace logging as this is very noisy
-            log.trace("Service: {} changed to: {}", event, eventTypeString);
+            LOG.trace("Service: {} changed to: {}", event, eventTypeString);
         }
     }
 
@@ -246,10 +250,10 @@ public class BlueprintCamelContext extends DefaultCamelContext implements Servic
     }
 
     private void maybeStart() throws Exception {
-        log.trace("maybeStart: {}", this);
+        LOG.trace("maybeStart: {}", this);
 
         if (!routeDefinitionValid.get()) {
-            log.trace("maybeStart: {} is skipping since CamelRoute definition is not correct.", this);
+            LOG.trace("maybeStart: {} is skipping since CamelRoute definition is not correct.", this);
             return;
         }
 
@@ -270,16 +274,16 @@ public class BlueprintCamelContext extends DefaultCamelContext implements Servic
         // when blueprint loading the bundle
         boolean skip = "true".equalsIgnoreCase(System.getProperty("skipStartingCamelContext"));
         if (skip) {
-            log.trace("maybeStart: {} is skipping as System property skipStartingCamelContext is set", this);
+            LOG.trace("maybeStart: {} is skipping as System property skipStartingCamelContext is set", this);
             return;
         }
 
         if (!isStarted() && !isStarting()) {
-            log.debug("Starting {}", this);
+            LOG.debug("Starting {}", this);
             start();
         } else {
             // ignore as Camel is already started
-            log.trace("Ignoring maybeStart() as {} is already started", this);
+            LOG.trace("Ignoring maybeStart() as {} is already started", this);
         }
     }
 
diff --git a/components/camel-box/camel-box-component/src/main/java/org/apache/camel/component/box/BoxConsumer.java b/components/camel-box/camel-box-component/src/main/java/org/apache/camel/component/box/BoxConsumer.java
index 2732e27..0080209 100644
--- a/components/camel-box/camel-box-component/src/main/java/org/apache/camel/component/box/BoxConsumer.java
+++ b/components/camel-box/camel-box-component/src/main/java/org/apache/camel/component/box/BoxConsumer.java
@@ -31,12 +31,16 @@ import org.apache.camel.support.component.AbstractApiConsumer;
 import org.apache.camel.support.component.ApiConsumerHelper;
 import org.apache.camel.support.component.ApiMethod;
 import org.apache.camel.support.component.ApiMethodHelper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * The Box consumer.
  */
 public class BoxConsumer extends AbstractApiConsumer<BoxApiName, BoxConfiguration> implements EventListener {
 
+    private static final Logger LOG = LoggerFactory.getLogger(BoxConsumer.class);
+
     private static final String LISTENER_PROPERTY = "listener";
 
     private BoxAPIConnection boxConnection;
@@ -72,10 +76,10 @@ public class BoxConsumer extends AbstractApiConsumer<BoxApiName, BoxConfiguratio
     public void onEvent(BoxEvent event) {
         try {
             // Convert Events to exchange and process
-            log.debug("Processed {} event for {}", ApiConsumerHelper.getResultsProcessed(this, event, false),
+            LOG.debug("Processed {} event for {}", ApiConsumerHelper.getResultsProcessed(this, event, false),
                     boxConnection);
         } catch (Exception e) {
-            log.info("Received exception consuming event: ", e);
+            LOG.info("Received exception consuming event: ", e);
         }
     }
 
diff --git a/components/camel-caffeine/src/main/java/org/apache/camel/component/caffeine/processor/aggregate/CaffeineAggregationRepository.java b/components/camel-caffeine/src/main/java/org/apache/camel/component/caffeine/processor/aggregate/CaffeineAggregationRepository.java
index 81e4ffa..23cca1b 100644
--- a/components/camel-caffeine/src/main/java/org/apache/camel/component/caffeine/processor/aggregate/CaffeineAggregationRepository.java
+++ b/components/camel-caffeine/src/main/java/org/apache/camel/component/caffeine/processor/aggregate/CaffeineAggregationRepository.java
@@ -28,9 +28,13 @@ import org.apache.camel.spi.RecoverableAggregationRepository;
 import org.apache.camel.support.DefaultExchange;
 import org.apache.camel.support.DefaultExchangeHolder;
 import org.apache.camel.support.service.ServiceSupport;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class CaffeineAggregationRepository extends ServiceSupport implements RecoverableAggregationRepository {
 
+    private static final Logger LOG = LoggerFactory.getLogger(CaffeineAggregationRepository.class);
+
     private CamelContext camelContext;
     private Cache<String, DefaultExchangeHolder> cache;
     private boolean allowSerializedHeaders;
@@ -123,7 +127,7 @@ public class CaffeineAggregationRepository extends ServiceSupport implements Rec
 
     @Override
     public Exchange add(final CamelContext camelContext, final String key, final Exchange exchange) {
-        log.trace("Adding an Exchange with ID {} for key {} in a thread-safe manner.", exchange.getExchangeId(), key);
+        LOG.trace("Adding an Exchange with ID {} for key {} in a thread-safe manner.", exchange.getExchangeId(), key);
 
         final DefaultExchangeHolder oldHolder = cache.getIfPresent(key);
         final DefaultExchangeHolder newHolder = DefaultExchangeHolder.marshal(exchange, true, allowSerializedHeaders);
@@ -140,13 +144,13 @@ public class CaffeineAggregationRepository extends ServiceSupport implements Rec
 
     @Override
     public void remove(CamelContext camelContext, String key, Exchange exchange) {
-        log.trace("Removing an exchange with ID {} for key {}", exchange.getExchangeId(), key);
+        LOG.trace("Removing an exchange with ID {} for key {}", exchange.getExchangeId(), key);
         cache.invalidate(key);
     }
 
     @Override
     public void confirm(CamelContext camelContext, String exchangeId) {
-        log.trace("Confirming an exchange with ID {}.", exchangeId);
+        LOG.trace("Confirming an exchange with ID {}.", exchangeId);
         cache.invalidate(exchangeId);
     }
 
@@ -159,15 +163,15 @@ public class CaffeineAggregationRepository extends ServiceSupport implements Rec
 
     @Override
     public Set<String> scan(CamelContext camelContext) {
-        log.trace("Scanning for exchanges to recover in {} context", camelContext.getName());
+        LOG.trace("Scanning for exchanges to recover in {} context", camelContext.getName());
         Set<String> scanned = Collections.unmodifiableSet(getKeys());
-        log.trace("Found {} keys for exchanges to recover in {} context", scanned.size(), camelContext.getName());
+        LOG.trace("Found {} keys for exchanges to recover in {} context", scanned.size(), camelContext.getName());
         return scanned;
     }
 
     @Override
     public Exchange recover(CamelContext camelContext, String exchangeId) {
-        log.trace("Recovering an Exchange with ID {}.", exchangeId);
+        LOG.trace("Recovering an Exchange with ID {}.", exchangeId);
         return useRecovery ? unmarshallExchange(camelContext, cache.getIfPresent(exchangeId)) : null;
     }
 
diff --git a/components/camel-cbor/src/main/java/org/apache/camel/component/cbor/CBORDataFormat.java b/components/camel-cbor/src/main/java/org/apache/camel/component/cbor/CBORDataFormat.java
index 7b257ed..610d595 100644
--- a/components/camel-cbor/src/main/java/org/apache/camel/component/cbor/CBORDataFormat.java
+++ b/components/camel-cbor/src/main/java/org/apache/camel/component/cbor/CBORDataFormat.java
@@ -39,10 +39,14 @@ import org.apache.camel.spi.DataFormatName;
 import org.apache.camel.spi.annotations.Dataformat;
 import org.apache.camel.support.ObjectHelper;
 import org.apache.camel.support.service.ServiceSupport;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 @Dataformat("cbor")
 public class CBORDataFormat extends ServiceSupport implements DataFormat, DataFormatName {
 
+    private static final Logger LOG = LoggerFactory.getLogger(CBORDataFormat.class);
+
     private CamelContext camelContext;
     private ObjectMapper objectMapper;
     private Class<?> unmarshalType;
@@ -297,15 +301,15 @@ public class CBORDataFormat extends ServiceSupport implements DataFormat, DataFo
                 Set<ObjectMapper> set = camelContext.getRegistry().findByType(ObjectMapper.class);
                 if (set.size() == 1) {
                     objectMapper = set.iterator().next();
-                    log.info("Found single ObjectMapper in Registry to use: {}", objectMapper);
+                    LOG.info("Found single ObjectMapper in Registry to use: {}", objectMapper);
                 } else if (set.size() > 1) {
-                    log.debug("Found {} ObjectMapper in Registry cannot use as default as there are more than one instance.", set.size());
+                    LOG.debug("Found {} ObjectMapper in Registry cannot use as default as there are more than one instance.", set.size());
                 }
             }
             if (objectMapper == null) {
                 CBORFactory factory = new CBORFactory();
                 objectMapper = new ObjectMapper(factory);
-                log.debug("Creating new ObjectMapper to use: {}", objectMapper);
+                LOG.debug("Creating new ObjectMapper to use: {}", objectMapper);
             }
         }
 
@@ -369,7 +373,6 @@ public class CBORDataFormat extends ServiceSupport implements DataFormat, DataFo
 
     @Override
     protected void doStop() throws Exception {
-        // TODO Auto-generated method stub
-
+        // noop
     }
 }
diff --git a/components/camel-cdi/src/main/java/org/apache/camel/cdi/transaction/TransactionErrorHandler.java b/components/camel-cdi/src/main/java/org/apache/camel/cdi/transaction/TransactionErrorHandler.java
index 47751ce..5d5326b 100644
--- a/components/camel-cdi/src/main/java/org/apache/camel/cdi/transaction/TransactionErrorHandler.java
+++ b/components/camel-cdi/src/main/java/org/apache/camel/cdi/transaction/TransactionErrorHandler.java
@@ -38,6 +38,8 @@ import org.apache.camel.support.AsyncCallbackToCompletableFutureAdapter;
 import org.apache.camel.support.ExchangeHelper;
 import org.apache.camel.support.service.ServiceHelper;
 import org.apache.camel.util.ObjectHelper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Does transactional execution according given policy. This class is based on
@@ -49,6 +51,8 @@ import org.apache.camel.util.ObjectHelper;
 public class TransactionErrorHandler extends ErrorHandlerSupport
         implements AsyncProcessor, ShutdownPrepared, Navigate<Processor> {
 
+    private static final Logger LOG = LoggerFactory.getLogger(TransactionErrorHandler.class);
+
     protected final Processor output;
 
     protected volatile boolean preparingShutdown;
@@ -156,16 +160,16 @@ public class TransactionErrorHandler extends ErrorHandlerSupport
         Boolean onlyLast = (Boolean) exchange.removeProperty(Exchange.ROLLBACK_ONLY_LAST);
         if (onlyLast != null && onlyLast) {
             // we only want this logged at debug level
-            if (log.isDebugEnabled()) {
+            if (LOG.isDebugEnabled()) {
                 // log exception if there was a cause exception so we have the
                 // stack trace
                 Exception cause = exchange.getException();
                 if (cause != null) {
-                    log.debug("Transaction rollback ({}) redelivered({}) for {} "
+                    LOG.debug("Transaction rollback ({}) redelivered({}) for {} "
                         + "due exchange was marked for rollbackOnlyLast and caught: ",
                         transactionKey, redelivered, ids, cause);
                 } else {
-                    log.debug("Transaction rollback ({}) redelivered({}) for {} "
+                    LOG.debug("Transaction rollback ({}) redelivered({}) for {} "
                         + "due exchange was marked for rollbackOnlyLast",
                         transactionKey, redelivered, ids);
                 }
@@ -212,8 +216,8 @@ public class TransactionErrorHandler extends ErrorHandlerSupport
 
                     // throw runtime exception to force rollback (which works
                     // best to rollback with Spring transaction manager)
-                    if (log.isTraceEnabled()) {
-                        log.trace("Throwing runtime exception to force transaction to rollback on {}",
+                    if (LOG.isTraceEnabled()) {
+                        LOG.trace("Throwing runtime exception to force transaction to rollback on {}",
                                 transactionPolicy);
                     }
                     throw rce;
@@ -244,8 +248,8 @@ public class TransactionErrorHandler extends ErrorHandlerSupport
      * Logs the transaction begin
      */
     private void logTransactionBegin(String redelivered, String ids) {
-        if (log.isDebugEnabled()) {
-            log.debug("Transaction begin ({}) redelivered({}) for {})",
+        if (LOG.isDebugEnabled()) {
+            LOG.debug("Transaction begin ({}) redelivered({}) for {})",
                     transactionKey, redelivered, ids);
         }
     }
@@ -261,7 +265,7 @@ public class TransactionErrorHandler extends ErrorHandlerSupport
             // committed this time
             if (rollbackLoggingLevel == LoggingLevel.INFO || rollbackLoggingLevel == LoggingLevel.WARN
                     || rollbackLoggingLevel == LoggingLevel.ERROR) {
-                log.info("Transaction commit ({}) redelivered({}) for {})",
+                LOG.info("Transaction commit ({}) redelivered({}) for {})",
                         transactionKey, redelivered, ids);
                 // return after we have logged
                 return;
@@ -269,7 +273,7 @@ public class TransactionErrorHandler extends ErrorHandlerSupport
         }
 
         // log non redelivered by default at DEBUG level
-        log.debug("Transaction commit ({}) redelivered({}) for {})", transactionKey, redelivered, ids);
+        LOG.debug("Transaction commit ({}) redelivered({}) for {})", transactionKey, redelivered, ids);
     }
 
     /**
@@ -278,44 +282,44 @@ public class TransactionErrorHandler extends ErrorHandlerSupport
     private void logTransactionRollback(String redelivered, String ids, Throwable e, boolean rollbackOnly) {
         if (rollbackLoggingLevel == LoggingLevel.OFF) {
             return;
-        } else if (rollbackLoggingLevel == LoggingLevel.ERROR && log.isErrorEnabled()) {
+        } else if (rollbackLoggingLevel == LoggingLevel.ERROR && LOG.isErrorEnabled()) {
             if (rollbackOnly) {
-                log.error("Transaction rollback ({}) redelivered({}) for {} due exchange was marked for rollbackOnly",
+                LOG.error("Transaction rollback ({}) redelivered({}) for {} due exchange was marked for rollbackOnly",
                         transactionKey, redelivered, ids);
             } else {
-                log.error("Transaction rollback ({}) redelivered({}) for {} caught: {}",
+                LOG.error("Transaction rollback ({}) redelivered({}) for {} caught: {}",
                         transactionKey, redelivered, ids, e.getMessage());
             }
-        } else if (rollbackLoggingLevel == LoggingLevel.WARN && log.isWarnEnabled()) {
+        } else if (rollbackLoggingLevel == LoggingLevel.WARN && LOG.isWarnEnabled()) {
             if (rollbackOnly) {
-                log.warn("Transaction rollback ({}) redelivered({}) for {} due exchange was marked for rollbackOnly",
+                LOG.warn("Transaction rollback ({}) redelivered({}) for {} due exchange was marked for rollbackOnly",
                         transactionKey, redelivered, ids);
             } else {
-                log.warn("Transaction rollback ({}) redelivered({}) for {} caught: {}",
+                LOG.warn("Transaction rollback ({}) redelivered({}) for {} caught: {}",
                         transactionKey, redelivered, ids, e.getMessage());
             }
-        } else if (rollbackLoggingLevel == LoggingLevel.INFO && log.isInfoEnabled()) {
+        } else if (rollbackLoggingLevel == LoggingLevel.INFO && LOG.isInfoEnabled()) {
             if (rollbackOnly) {
-                log.info("Transaction rollback ({}) redelivered({}) for {} due exchange was marked for rollbackOnly",
+                LOG.info("Transaction rollback ({}) redelivered({}) for {} due exchange was marked for rollbackOnly",
                         transactionKey, redelivered, ids);
             } else {
-                log.info("Transaction rollback ({}) redelivered({}) for {} caught: {}",
+                LOG.info("Transaction rollback ({}) redelivered({}) for {} caught: {}",
                         transactionKey, redelivered, ids, e.getMessage());
             }
-        } else if (rollbackLoggingLevel == LoggingLevel.DEBUG && log.isDebugEnabled()) {
+        } else if (rollbackLoggingLevel == LoggingLevel.DEBUG && LOG.isDebugEnabled()) {
             if (rollbackOnly) {
-                log.debug("Transaction rollback ({}) redelivered({}) for {} due exchange was marked for rollbackOnly",
+                LOG.debug("Transaction rollback ({}) redelivered({}) for {} due exchange was marked for rollbackOnly",
                         transactionKey, redelivered, ids);
             } else {
-                log.debug("Transaction rollback ({}) redelivered({}) for {} caught: {}",
+                LOG.debug("Transaction rollback ({}) redelivered({}) for {} caught: {}",
                         transactionKey, redelivered, ids, e.getMessage());
             }
-        } else if (rollbackLoggingLevel == LoggingLevel.TRACE && log.isTraceEnabled()) {
+        } else if (rollbackLoggingLevel == LoggingLevel.TRACE && LOG.isTraceEnabled()) {
             if (rollbackOnly) {
-                log.trace("Transaction rollback ({}) redelivered({}) for {} due exchange was marked for rollbackOnly",
+                LOG.trace("Transaction rollback ({}) redelivered({}) for {} due exchange was marked for rollbackOnly",
                         transactionKey, redelivered, ids);
             } else {
-                log.trace("Transaction rollback ({}) redelivered({}) for {} caught: {}",
+                LOG.trace("Transaction rollback ({}) redelivered({}) for {} caught: {}",
                         transactionKey, redelivered, ids, e.getMessage());
             }
         }
@@ -376,7 +380,7 @@ public class TransactionErrorHandler extends ErrorHandlerSupport
     @Override
     public void prepareShutdown(boolean suspendOnly, boolean forced) {
         // prepare for shutdown, eg do not allow redelivery if configured
-        log.trace("Prepare shutdown on error handler {}", this);
+        LOG.trace("Prepare shutdown on error handler {}", this);
         preparingShutdown = true;
     }
 }
diff --git a/components/camel-cm-sms/src/main/java/org/apache/camel/component/cm/CMComponent.java b/components/camel-cm-sms/src/main/java/org/apache/camel/component/cm/CMComponent.java
index 083cd5b..b1d2cab 100644
--- a/components/camel-cm-sms/src/main/java/org/apache/camel/component/cm/CMComponent.java
+++ b/components/camel-cm-sms/src/main/java/org/apache/camel/component/cm/CMComponent.java
@@ -52,7 +52,6 @@ public class CMComponent extends DefaultComponent {
         setProperties(endpoint, parameters);
 
         // Validate configuration
-        log.debug("Validating uri based configuration");
         final Set<ConstraintViolation<CMConfiguration>> constraintViolations = getValidator().validate(endpoint.getConfiguration());
         if (constraintViolations.size() > 0) {
             final StringBuffer msg = new StringBuffer();
diff --git a/components/camel-cm-sms/src/main/java/org/apache/camel/component/cm/CMProducer.java b/components/camel-cm-sms/src/main/java/org/apache/camel/component/cm/CMProducer.java
index e5bef34..a2cfc8f 100644
--- a/components/camel-cm-sms/src/main/java/org/apache/camel/component/cm/CMProducer.java
+++ b/components/camel-cm-sms/src/main/java/org/apache/camel/component/cm/CMProducer.java
@@ -28,12 +28,15 @@ import org.apache.camel.component.cm.exceptions.HostUnavailableException;
 import org.apache.camel.support.DefaultProducer;
 import org.apache.http.client.methods.HttpHead;
 import org.apache.http.impl.client.HttpClientBuilder;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * is the exchange processor. Sends a validated sms message to CM Endpoints.
  */
 public class CMProducer extends DefaultProducer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(CMProducer.class);
     private Validator validator;
 
     /**
@@ -58,29 +61,29 @@ public class CMProducer extends DefaultProducer {
         final SMSMessage smsMessage = exchange.getIn().getMandatoryBody(SMSMessage.class);
 
         // Validates Payload - SMSMessage
-        log.trace("Validating SMSMessage instance provided: {}", smsMessage);
+        LOG.trace("Validating SMSMessage instance provided: {}", smsMessage);
         final Set<ConstraintViolation<SMSMessage>> constraintViolations = getValidator().validate(smsMessage);
         if (constraintViolations.size() > 0) {
             final StringBuffer msg = new StringBuffer();
             for (final ConstraintViolation<SMSMessage> cv : constraintViolations) {
                 msg.append(String.format("- Invalid value for %s: %s", cv.getPropertyPath().toString(), cv.getMessage()));
             }
-            log.debug(msg.toString());
+            LOG.debug(msg.toString());
             throw new InvalidPayloadRuntimeException(exchange, SMSMessage.class);
         }
-        log.trace("SMSMessage instance is valid: {}", smsMessage);
+        LOG.trace("SMSMessage instance is valid: {}", smsMessage);
 
         // We have a valid (immutable) SMSMessage instance, lets extend to
         // CMMessage
         // This is the instance we will use to build the XML document to be
         // sent to CM SMS GW.
         final CMMessage cmMessage = new CMMessage(smsMessage.getPhoneNumber(), smsMessage.getMessage());
-        log.debug("CMMessage instance build from valid SMSMessage instance");
+        LOG.debug("CMMessage instance build from valid SMSMessage instance");
 
         if (smsMessage.getFrom() == null || smsMessage.getFrom().isEmpty()) {
             String df = getConfiguration().getDefaultFrom();
             cmMessage.setSender(df);
-            log.debug("Dynamic sender is set to default dynamic sender: {}", df);
+            LOG.debug("Dynamic sender is set to default dynamic sender: {}", df);
         }
 
         // Remember, this can be null.
@@ -93,7 +96,7 @@ public class CMProducer extends DefaultProducer {
         //  for abnormal situations.
         sender.send(cmMessage);
 
-        log.debug("Request accepted by CM Host: {}", cmMessage);
+        LOG.debug("Request accepted by CM Host: {}", cmMessage);
     }
 
     @Override
@@ -102,15 +105,15 @@ public class CMProducer extends DefaultProducer {
         // log at debug level for singletons, for prototype scoped log at trace
         // level to not spam logs
 
-        log.debug("Starting CMProducer");
+        LOG.debug("Starting CMProducer");
 
         final CMConfiguration configuration = getConfiguration();
 
         if (configuration.isTestConnectionOnStartup()) {
             try {
-                log.debug("Checking connection - {}", getEndpoint().getCMUrl());
+                LOG.debug("Checking connection - {}", getEndpoint().getCMUrl());
                 HttpClientBuilder.create().build().execute(new HttpHead(getEndpoint().getCMUrl()));
-                log.debug("Connection to {}: OK", getEndpoint().getCMUrl());
+                LOG.debug("Connection to {}: OK", getEndpoint().getCMUrl());
             } catch (final Exception e) {
                 throw new HostUnavailableException(String.format("Connection to %s: NOT AVAILABLE", getEndpoint().getCMUrl()), e);
             }
@@ -119,7 +122,7 @@ public class CMProducer extends DefaultProducer {
         // keep starting
         super.doStart();
 
-        log.debug("CMProducer started");
+        LOG.debug("CMProducer started");
     }
 
     @Override
diff --git a/components/camel-cmis/src/main/java/org/apache/camel/component/cmis/CMISConsumer.java b/components/camel-cmis/src/main/java/org/apache/camel/component/cmis/CMISConsumer.java
index fd4240b..1dfe2d1 100644
--- a/components/camel-cmis/src/main/java/org/apache/camel/component/cmis/CMISConsumer.java
+++ b/components/camel-cmis/src/main/java/org/apache/camel/component/cmis/CMISConsumer.java
@@ -23,12 +23,16 @@ import org.apache.camel.Exchange;
 import org.apache.camel.Processor;
 import org.apache.camel.support.ScheduledPollConsumer;
 import org.apache.chemistry.opencmis.client.api.OperationContext;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * The CMIS consumer.
  */
 public class CMISConsumer extends ScheduledPollConsumer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(CMISConsumer.class);
+
     private final CMISSessionFacadeFactory sessionFacadeFactory;
     private CMISSessionFacade sessionFacade;
 
@@ -57,7 +61,7 @@ public class CMISConsumer extends ScheduledPollConsumer {
         Exchange exchange = getEndpoint().createExchange();
         exchange.getIn().setHeaders(properties);
         exchange.getIn().setBody(inputStream);
-        log.debug("Polling node : {}", properties.get("cmis:name"));
+        LOG.debug("Polling node: {}", properties.get("cmis:name"));
         getProcessor().process(exchange);
         return 1;
     }
diff --git a/components/camel-cmis/src/main/java/org/apache/camel/component/cmis/CMISProducer.java b/components/camel-cmis/src/main/java/org/apache/camel/component/cmis/CMISProducer.java
index 609b130..8440e53 100644
--- a/components/camel-cmis/src/main/java/org/apache/camel/component/cmis/CMISProducer.java
+++ b/components/camel-cmis/src/main/java/org/apache/camel/component/cmis/CMISProducer.java
@@ -47,12 +47,16 @@ import org.apache.chemistry.opencmis.commons.data.ContentStream;
 import org.apache.chemistry.opencmis.commons.enums.Action;
 import org.apache.chemistry.opencmis.commons.enums.UnfileObject;
 import org.apache.chemistry.opencmis.commons.enums.VersioningState;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * The CMIS producer.
  */
 public class CMISProducer extends DefaultProducer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(CMISProducer.class);
+
     private final CMISSessionFacadeFactory sessionFacadeFactory;
     private CMISSessionFacade sessionFacade;
 
@@ -294,7 +298,7 @@ public class CMISProducer extends DefaultProducer {
             }
 
             try {
-                log.info("Moving document from " + sourceFolder.getName() + " to " + targetFolder.getName());
+                LOG.info("Moving document from " + sourceFolder.getName() + " to " + targetFolder.getName());
                 return  document.move(sourceFolder, targetFolder);
             } catch (Exception e) {
                 throw new CamelCmisException("Cannot move document to folder " + targetFolder.getName() + " : " + e.getMessage(), e);
@@ -511,7 +515,7 @@ public class CMISProducer extends DefaultProducer {
         if (!cmisProperties.containsKey(PropertyIds.OBJECT_TYPE_ID)) {
             cmisProperties.put(PropertyIds.OBJECT_TYPE_ID, CamelCMISConstants.CMIS_FOLDER);
         }
-        log.debug("Creating folder with properties: {}", cmisProperties);
+        LOG.debug("Creating folder with properties: {}", cmisProperties);
         return parentFolder.createFolder(cmisProperties);
     }
 
@@ -524,7 +528,7 @@ public class CMISProducer extends DefaultProducer {
         if (getSessionFacade().isObjectTypeVersionable((String) cmisProperties.get(PropertyIds.OBJECT_TYPE_ID))) {
             versioningState = VersioningState.MAJOR;
         }
-        log.debug("Creating document with properties: {}", cmisProperties);
+        LOG.debug("Creating document with properties: {}", cmisProperties);
 
         return parentFolder.createDocument(cmisProperties, contentStream, versioningState);
     }
diff --git a/components/camel-controlbus/src/main/java/org/apache/camel/component/controlbus/ControlBusProducer.java b/components/camel-controlbus/src/main/java/org/apache/camel/component/controlbus/ControlBusProducer.java
index 82f0dd7..072a0e0 100644
--- a/components/camel-controlbus/src/main/java/org/apache/camel/component/controlbus/ControlBusProducer.java
+++ b/components/camel-controlbus/src/main/java/org/apache/camel/component/controlbus/ControlBusProducer.java
@@ -33,12 +33,16 @@ import org.apache.camel.spi.UnitOfWork;
 import org.apache.camel.support.DefaultAsyncProducer;
 import org.apache.camel.support.ExchangeHelper;
 import org.apache.camel.util.ObjectHelper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * The control bus producer.
  */
 public class ControlBusProducer extends DefaultAsyncProducer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(ControlBusProducer.class);
+
     private final CamelLogger logger;
 
     public ControlBusProducer(Endpoint endpoint, CamelLogger logger) {
@@ -170,24 +174,24 @@ public class ControlBusProducer extends DefaultAsyncProducer {
 
             try {
                 if ("start".equals(action)) {
-                    log.debug("Starting route: {}", id);
+                    LOG.debug("Starting route: {}", id);
                     getEndpoint().getCamelContext().getRouteController().startRoute(id);
                 } else if ("stop".equals(action)) {
-                    log.debug("Stopping route: {}", id);
+                    LOG.debug("Stopping route: {}", id);
                     getEndpoint().getCamelContext().getRouteController().stopRoute(id);
                 } else if ("suspend".equals(action)) {
-                    log.debug("Suspending route: {}", id);
+                    LOG.debug("Suspending route: {}", id);
                     getEndpoint().getCamelContext().getRouteController().suspendRoute(id);
                 } else if ("resume".equals(action)) {
-                    log.debug("Resuming route: {}", id);
+                    LOG.debug("Resuming route: {}", id);
                     getEndpoint().getCamelContext().getRouteController().resumeRoute(id);
                 } else if ("restart".equals(action)) {
-                    log.debug("Restarting route: {}", id);
+                    LOG.debug("Restarting route: {}", id);
                     getEndpoint().getCamelContext().getRouteController().stopRoute(id);
                     int delay = getEndpoint().getRestartDelay();
                     if (delay > 0) {
                         try {
-                            log.debug("Sleeping {} ms before starting route: {}", delay, id);
+                            LOG.debug("Sleeping {} ms before starting route: {}", delay, id);
                             Thread.sleep(delay);
                         } catch (InterruptedException e) {
                             // ignore
@@ -195,13 +199,13 @@ public class ControlBusProducer extends DefaultAsyncProducer {
                     }
                     getEndpoint().getCamelContext().getRouteController().startRoute(id);
                 } else if ("status".equals(action)) {
-                    log.debug("Route status: {}", id);
+                    LOG.debug("Route status: {}", id);
                     ServiceStatus status = getEndpoint().getCamelContext().getRouteController().getRouteStatus(id);
                     if (status != null) {
                         result = status.name();
                     }
                 } else if ("stats".equals(action)) {
-                    log.debug("Route stats: {}", id);
+                    LOG.debug("Route stats: {}", id);
 
                     // camel context or per route
                     String name = getEndpoint().getCamelContext().getManagementName();
diff --git a/components/camel-couchbase/src/main/java/org/apache/camel/component/couchbase/CouchbaseConsumer.java b/components/camel-couchbase/src/main/java/org/apache/camel/component/couchbase/CouchbaseConsumer.java
index 6db26ea..5202d04 100644
--- a/components/camel-couchbase/src/main/java/org/apache/camel/component/couchbase/CouchbaseConsumer.java
+++ b/components/camel-couchbase/src/main/java/org/apache/camel/component/couchbase/CouchbaseConsumer.java
@@ -24,6 +24,9 @@ import com.couchbase.client.protocol.views.ViewRow;
 import org.apache.camel.Exchange;
 import org.apache.camel.Processor;
 import org.apache.camel.support.DefaultScheduledPollConsumer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 
 import static org.apache.camel.component.couchbase.CouchbaseConstants.HEADER_DESIGN_DOCUMENT_NAME;
 import static org.apache.camel.component.couchbase.CouchbaseConstants.HEADER_ID;
@@ -32,6 +35,8 @@ import static org.apache.camel.component.couchbase.CouchbaseConstants.HEADER_VIE
 
 public class CouchbaseConsumer extends DefaultScheduledPollConsumer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(CouchbaseConsumer.class);
+
     private final CouchbaseEndpoint endpoint;
     private final CouchbaseClient client;
     private final View view;
@@ -75,13 +80,13 @@ public class CouchbaseConsumer extends DefaultScheduledPollConsumer {
 
     @Override
     protected void doStart() throws Exception {
-        log.info("Starting Couchbase consumer");
+        LOG.info("Starting Couchbase consumer");
         super.doStart();
     }
 
     @Override
     protected void doStop() throws Exception {
-        log.info("Stopping Couchbase consumer");
+        LOG.info("Stopping Couchbase consumer");
         super.doStop();
         if (client != null) {
             client.shutdown();
@@ -91,10 +96,10 @@ public class CouchbaseConsumer extends DefaultScheduledPollConsumer {
     @Override
     protected synchronized int poll() throws Exception {
         ViewResponse result = client.query(view, query);
-        log.info("Received result set from Couchbase");
+        LOG.info("Received result set from Couchbase");
 
-        if (log.isTraceEnabled()) {
-            log.trace("ViewResponse = {}", result);
+        if (LOG.isTraceEnabled()) {
+            LOG.trace("ViewResponse = {}", result);
         }
 
         String consumerProcessedStrategy = endpoint.getConsumerProcessedStrategy();
@@ -116,17 +121,17 @@ public class CouchbaseConsumer extends DefaultScheduledPollConsumer {
             exchange.getIn().setHeader(HEADER_VIEWNAME, viewName);
 
             if ("delete".equalsIgnoreCase(consumerProcessedStrategy)) {
-                if (log.isTraceEnabled()) {
-                    log.trace("Deleting doc with ID {}", id);
+                if (LOG.isTraceEnabled()) {
+                    LOG.trace("Deleting doc with ID {}", id);
                 }
                 client.delete(id);
             } else if ("filter".equalsIgnoreCase(consumerProcessedStrategy)) {
-                if (log.isTraceEnabled()) {
-                    log.trace("Filtering out ID {}", id);
+                if (LOG.isTraceEnabled()) {
+                    LOG.trace("Filtering out ID {}", id);
                 }
                 // add filter for already processed docs
             } else {
-                log.trace("No strategy set for already processed docs, beware of duplicates!");
+                LOG.trace("No strategy set for already processed docs, beware of duplicates!");
             }
 
             logDetails(id, doc, key, designDocumentName, viewName, exchange);
@@ -143,14 +148,14 @@ public class CouchbaseConsumer extends DefaultScheduledPollConsumer {
 
     private void logDetails(String id, Object doc, String key, String designDocumentName, String viewName, Exchange exchange) {
 
-        if (log.isTraceEnabled()) {
-            log.trace("Created exchange = {}", exchange);
-            log.trace("Added Document in body = {}", doc);
-            log.trace("Adding to Header");
-            log.trace("ID = {}", id);
-            log.trace("Key = {}", key);
-            log.trace("Design Document Name = {}", designDocumentName);
-            log.trace("View Name = {}", viewName);
+        if (LOG.isTraceEnabled()) {
+            LOG.trace("Created exchange = {}", exchange);
+            LOG.trace("Added Document in body = {}", doc);
+            LOG.trace("Adding to Header");
+            LOG.trace("ID = {}", id);
+            LOG.trace("Key = {}", key);
+            LOG.trace("Design Document Name = {}", designDocumentName);
+            LOG.trace("View Name = {}", viewName);
         }
 
     }
diff --git a/components/camel-couchbase/src/main/java/org/apache/camel/component/couchbase/CouchbaseProducer.java b/components/camel-couchbase/src/main/java/org/apache/camel/component/couchbase/CouchbaseProducer.java
index cffb3aa..7fe4c58 100644
--- a/components/camel-couchbase/src/main/java/org/apache/camel/component/couchbase/CouchbaseProducer.java
+++ b/components/camel-couchbase/src/main/java/org/apache/camel/component/couchbase/CouchbaseProducer.java
@@ -25,6 +25,9 @@ import net.spy.memcached.ReplicateTo;
 import net.spy.memcached.internal.OperationFuture;
 import org.apache.camel.Exchange;
 import org.apache.camel.support.DefaultProducer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 
 import static org.apache.camel.component.couchbase.CouchbaseConstants.COUCHBASE_DELETE;
 import static org.apache.camel.component.couchbase.CouchbaseConstants.COUCHBASE_GET;
@@ -37,9 +40,10 @@ import static org.apache.camel.component.couchbase.CouchbaseConstants.HEADER_TTL
  * Couchbase producer generates various type of operations. PUT, GET, and DELETE
  * are currently supported
  */
-
 public class CouchbaseProducer extends DefaultProducer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(CouchbaseProducer.class);
+
     private CouchbaseEndpoint endpoint;
     private CouchbaseClientIF client;
     private long startId;
@@ -114,15 +118,15 @@ public class CouchbaseProducer extends DefaultProducer {
         }
 
         if (endpoint.getOperation().equals(COUCHBASE_PUT)) {
-            log.info("Type of operation: PUT");
+            LOG.debug("Type of operation: PUT");
             Object obj = exchange.getIn().getBody();
             exchange.getOut().setBody(setDocument(id, ttl, obj, persistTo, replicateTo));
         } else if (endpoint.getOperation().equals(COUCHBASE_GET)) {
-            log.info("Type of operation: GET");
+            LOG.debug("Type of operation: GET");
             Object result = client.get(id);
             exchange.getOut().setBody(result);
         } else if (endpoint.getOperation().equals(COUCHBASE_DELETE)) {
-            log.info("Type of operation: DELETE");
+            LOG.debug("Type of operation: DELETE");
             Future<Boolean> result = client.delete(id);
             exchange.getOut().setBody(result.get());
         }
@@ -156,7 +160,7 @@ public class CouchbaseProducer extends DefaultProducer {
             if (retryAttempts <= 0) {
                 throw e;
             } else {
-                log.info("Unable to save Document, retrying in " + producerRetryPause + "ms (" + retryAttempts + ")");
+                LOG.info("Unable to save Document, retrying in " + producerRetryPause + "ms (" + retryAttempts + ")");
                 Thread.sleep(producerRetryPause);
                 return setDocument(id, expiry, obj, retryAttempts - 1, persistTo, replicateTo);
             }
diff --git a/components/camel-couchdb/src/main/java/org/apache/camel/component/couchdb/CouchDbConsumer.java b/components/camel-couchdb/src/main/java/org/apache/camel/component/couchdb/CouchDbConsumer.java
index ca0c0e0..2e6d606 100644
--- a/components/camel-couchdb/src/main/java/org/apache/camel/component/couchdb/CouchDbConsumer.java
+++ b/components/camel-couchdb/src/main/java/org/apache/camel/component/couchdb/CouchDbConsumer.java
@@ -20,9 +20,13 @@ import java.util.concurrent.ExecutorService;
 
 import org.apache.camel.Processor;
 import org.apache.camel.support.DefaultConsumer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class CouchDbConsumer extends DefaultConsumer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(CouchDbConsumer.class);
+
     private final CouchDbClientWrapper couchClient;
     private final CouchDbEndpoint endpoint;
     private ExecutorService executor;
@@ -37,7 +41,7 @@ public class CouchDbConsumer extends DefaultConsumer {
     @Override
     protected void doStart() throws Exception {
         super.doStart();
-        log.info("Starting CouchDB consumer");
+        LOG.info("Starting CouchDB consumer");
 
         executor = endpoint.getCamelContext().getExecutorServiceManager().newFixedThreadPool(this, endpoint.getEndpointUri(), 1);
         task = new CouchDbChangesetTracker(endpoint, this, couchClient);
@@ -47,7 +51,7 @@ public class CouchDbConsumer extends DefaultConsumer {
     @Override
     protected void doStop() throws Exception {
         super.doStop();
-        log.info("Stopping CouchDB consumer");
+        LOG.info("Stopping CouchDB consumer");
         if (task != null) {
             task.stop();
         }
diff --git a/components/camel-couchdb/src/main/java/org/apache/camel/component/couchdb/CouchDbProducer.java b/components/camel-couchdb/src/main/java/org/apache/camel/component/couchdb/CouchDbProducer.java
index 4346112..80a4c79 100644
--- a/components/camel-couchdb/src/main/java/org/apache/camel/component/couchdb/CouchDbProducer.java
+++ b/components/camel-couchdb/src/main/java/org/apache/camel/component/couchdb/CouchDbProducer.java
@@ -25,9 +25,13 @@ import org.apache.camel.InvalidPayloadException;
 import org.apache.camel.support.DefaultProducer;
 import org.apache.camel.util.ObjectHelper;
 import org.lightcouch.Response;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class CouchDbProducer extends DefaultProducer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(CouchDbProducer.class);
+
     private final CouchDbClientWrapper couchClient;
 
     public CouchDbProducer(CouchDbEndpoint endpoint, CouchDbClientWrapper couchClient) {
@@ -45,8 +49,8 @@ public class CouchDbProducer extends DefaultProducer {
                 throw new CouchDbException("Could not save document [unknown reason]", exchange);
             }
 
-            if (log.isTraceEnabled()) {
-                log.trace("Document saved [_id={}, _rev={}]", save.getId(), save.getRev());
+            if (LOG.isTraceEnabled()) {
+                LOG.trace("Document saved [_id={}, _rev={}]", save.getId(), save.getRev());
             }
             exchange.getIn().setHeader(CouchDbConstants.HEADER_DOC_REV, save.getRev());
             exchange.getIn().setHeader(CouchDbConstants.HEADER_DOC_ID, save.getId());
@@ -57,8 +61,8 @@ public class CouchDbProducer extends DefaultProducer {
                     throw new CouchDbException("Could not delete document [unknown reason]", exchange);
                 }
 
-                if (log.isTraceEnabled()) {
-                    log.trace("Document saved [_id={}, _rev={}]", delete.getId(), delete.getRev());
+                if (LOG.isTraceEnabled()) {
+                    LOG.trace("Document saved [_id={}, _rev={}]", delete.getId(), delete.getRev());
                 }
                 exchange.getIn().setHeader(CouchDbConstants.HEADER_DOC_REV, delete.getRev());
                 exchange.getIn().setHeader(CouchDbConstants.HEADER_DOC_ID, delete.getId());
@@ -70,8 +74,8 @@ public class CouchDbProducer extends DefaultProducer {
                 }
                 Object response = getElement(docId);
 
-                if (log.isTraceEnabled()) {
-                    log.trace("Document retrieved [_id={}]", docId);
+                if (LOG.isTraceEnabled()) {
+                    LOG.trace("Document retrieved [_id={}]", docId);
                 }
                 
                 exchange.getIn().setBody(response);
diff --git a/components/camel-crypto-cms/src/main/java/org/apache/camel/component/crypto/cms/CryptoCmsComponent.java b/components/camel-crypto-cms/src/main/java/org/apache/camel/component/crypto/cms/CryptoCmsComponent.java
index 20ebcd9..b28efb9 100644
--- a/components/camel-crypto-cms/src/main/java/org/apache/camel/component/crypto/cms/CryptoCmsComponent.java
+++ b/components/camel-crypto-cms/src/main/java/org/apache/camel/component/crypto/cms/CryptoCmsComponent.java
@@ -39,10 +39,14 @@ import org.apache.camel.spi.annotations.Component;
 import org.apache.camel.support.DefaultComponent;
 import org.apache.camel.util.ObjectHelper;
 import org.bouncycastle.jce.provider.BouncyCastleProvider;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 @Component("crypto-cms")
 public class CryptoCmsComponent extends DefaultComponent {
 
+    private static final Logger LOG = LoggerFactory.getLogger(CryptoCmsComponent.class);
+
     @Metadata(label = "advanced")
     private SignedDataVerifierConfiguration signedDataVerifierConfiguration;
 
@@ -108,7 +112,7 @@ public class CryptoCmsComponent extends DefaultComponent {
             processor = new EnvelopedDataDecryptor(config);
         } else {
             String error = "Endpoint uri " + uri + " is wrong configured. Operation " + scheme + " is not supported. Supported operations are: sign, verify, encrypt, decrypt";
-            log.error(error);
+            LOG.error(error);
             throw new IllegalStateException(error);
         }
         CryptoCmsEndpoint endpoint = new CryptoCmsEndpoint(uri, this, processor);
@@ -150,7 +154,7 @@ public class CryptoCmsComponent extends DefaultComponent {
     @Override
     protected void doStart() throws Exception { // NOPMD
         if (Security.getProvider(BouncyCastleProvider.PROVIDER_NAME) == null) {
-            log.debug("Adding BouncyCastleProvider as security provider");
+            LOG.debug("Adding BouncyCastleProvider as security provider");
             Security.addProvider(new BouncyCastleProvider());
         }
         super.doStart();
diff --git a/components/camel-cxf/src/main/java/org/apache/camel/component/cxf/CxfBlueprintEndpoint.java b/components/camel-cxf/src/main/java/org/apache/camel/component/cxf/CxfBlueprintEndpoint.java
index 95a83db..96be055 100644
--- a/components/camel-cxf/src/main/java/org/apache/camel/component/cxf/CxfBlueprintEndpoint.java
+++ b/components/camel-cxf/src/main/java/org/apache/camel/component/cxf/CxfBlueprintEndpoint.java
@@ -21,9 +21,13 @@ import org.apache.camel.util.ObjectHelper;
 import org.apache.cxf.BusFactory;
 import org.osgi.framework.BundleContext;
 import org.osgi.service.blueprint.container.BlueprintContainer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class CxfBlueprintEndpoint extends CxfEndpoint {
 
+    private static final Logger LOG = LoggerFactory.getLogger(CxfBlueprintEndpoint.class);
+
     private BlueprintContainer blueprintContainer;
     private BundleContext bundleContext;
     private BlueprintCamelContext blueprintCamelContext;
@@ -53,7 +57,7 @@ public class CxfBlueprintEndpoint extends CxfEndpoint {
     @Override
     protected void checkName(Object value, String name) {
         if (ObjectHelper.isEmpty(value)) {
-            log.warn("The " + name + " of " + this.getEndpointUri() + " is empty, cxf will try to load the first one in wsdl for you.");
+            LOG.warn("The " + name + " of " + this.getEndpointUri() + " is empty, cxf will try to load the first one in wsdl for you.");
         }
     }
 
diff --git a/components/camel-cxf/src/main/java/org/apache/camel/component/cxf/CxfComponent.java b/components/camel-cxf/src/main/java/org/apache/camel/component/cxf/CxfComponent.java
index bb37fd7..b1066d9 100644
--- a/components/camel-cxf/src/main/java/org/apache/camel/component/cxf/CxfComponent.java
+++ b/components/camel-cxf/src/main/java/org/apache/camel/component/cxf/CxfComponent.java
@@ -27,6 +27,8 @@ import org.apache.camel.spi.annotations.Component;
 import org.apache.camel.support.CamelContextHelper;
 import org.apache.camel.support.HeaderFilterStrategyComponent;
 import org.apache.camel.util.PropertiesHelper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Defines the <a href="http://camel.apache.org/cxf.html">CXF Component</a>
@@ -34,6 +36,8 @@ import org.apache.camel.util.PropertiesHelper;
 @Component("cxf")
 public class CxfComponent extends HeaderFilterStrategyComponent implements SSLContextParametersAware {
 
+    private static final Logger LOG = LoggerFactory.getLogger(CxfComponent.class);
+
     @Metadata(label = "advanced")
     private Boolean allowStreaming;
     @Metadata(label = "security", defaultValue = "false")
@@ -83,7 +87,7 @@ public class CxfComponent extends HeaderFilterStrategyComponent implements SSLCo
 
         Object value = parameters.remove("setDefaultBus");
         if (value != null) {
-            log.warn("The option setDefaultBus is @deprecated, use name defaultBus instead");
+            LOG.warn("The option setDefaultBus is @deprecated, use name defaultBus instead");
             if (!parameters.containsKey("defaultBus")) {
                 parameters.put("defaultBus", value);
             }
diff --git a/components/camel-cxf/src/main/java/org/apache/camel/component/cxf/CxfConsumer.java b/components/camel-cxf/src/main/java/org/apache/camel/component/cxf/CxfConsumer.java
index 7f16c41..7e28829 100644
--- a/components/camel-cxf/src/main/java/org/apache/camel/component/cxf/CxfConsumer.java
+++ b/components/camel-cxf/src/main/java/org/apache/camel/component/cxf/CxfConsumer.java
@@ -22,6 +22,8 @@ import java.util.Map;
 
 import javax.xml.ws.WebFault;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.w3c.dom.Element;
 
 import org.apache.camel.AsyncCallback;
@@ -57,6 +59,8 @@ import org.apache.cxf.ws.addressing.EndpointReferenceType;
  */
 public class CxfConsumer extends DefaultConsumer implements Suspendable {
 
+    private static final Logger LOG = LoggerFactory.getLogger(CxfConsumer.class);
+
     private Server server;
     private CxfEndpoint cxfEndpoint;
 
@@ -150,14 +154,14 @@ public class CxfConsumer extends DefaultConsumer implements Suspendable {
         // we receive a CXF request when this method is called
         @Override
         public Object invoke(Exchange cxfExchange, Object o) {
-            log.trace("Received CXF Request: {}", cxfExchange);
+            LOG.trace("Received CXF Request: {}", cxfExchange);
             Continuation continuation;
             if (!endpoint.isSynchronous() && isAsyncInvocationSupported(cxfExchange)
                 && (continuation = getContinuation(cxfExchange)) != null) {
-                log.trace("Calling the Camel async processors.");
+                LOG.trace("Calling the Camel async processors.");
                 return asyncInvoke(cxfExchange, continuation);
             } else {
-                log.trace("Calling the Camel sync processors.");
+                LOG.trace("Calling the Camel sync processors.");
                 return syncInvoke(cxfExchange);
             }
         }
@@ -165,13 +169,13 @@ public class CxfConsumer extends DefaultConsumer implements Suspendable {
         // NOTE this code cannot work with CXF 2.2.x and JMSContinuation
         // as it doesn't break out the interceptor chain when we call it
         private Object asyncInvoke(Exchange cxfExchange, final Continuation continuation) {
-            log.trace("asyncInvoke continuation: {}", continuation);
+            LOG.trace("asyncInvoke continuation: {}", continuation);
             synchronized (continuation) {
                 if (continuation.isNew()) {
                     final org.apache.camel.Exchange camelExchange = prepareCamelExchange(cxfExchange);
 
                     // Now we don't set up the timeout value
-                    log.trace("Suspending continuation of exchangeId: {}", camelExchange.getExchangeId());
+                    LOG.trace("Suspending continuation of exchangeId: {}", camelExchange.getExchangeId());
 
                     // The continuation could be called before the suspend is called
                     continuation.suspend(cxfEndpoint.getContinuationTimeout());
@@ -183,7 +187,7 @@ public class CxfConsumer extends DefaultConsumer implements Suspendable {
                         public void done(boolean doneSync) {
                             // make sure the continuation resume will not be called before the suspend method in other thread
                             synchronized (continuation) {
-                                log.trace("Resuming continuation of exchangeId: {}", camelExchange.getExchangeId());
+                                LOG.trace("Resuming continuation of exchangeId: {}", camelExchange.getExchangeId());
                                 // resume processing after both, sync and async callbacks
                                 continuation.resume();
                             }
@@ -232,14 +236,14 @@ public class CxfConsumer extends DefaultConsumer implements Suspendable {
             org.apache.camel.Exchange camelExchange = prepareCamelExchange(cxfExchange);
             try {
                 try {
-                    log.trace("Processing +++ START +++");
+                    LOG.trace("Processing +++ START +++");
                     // send Camel exchange to the target processor
                     getProcessor().process(camelExchange);
                 } catch (Exception e) {
                     throw new Fault(e);
                 }
 
-                log.trace("Processing +++ END +++");
+                LOG.trace("Processing +++ END +++");
                 setResponseBack(cxfExchange, camelExchange);
             }  catch (Exception ex) {
                 doneUoW(camelExchange);
@@ -270,7 +274,7 @@ public class CxfConsumer extends DefaultConsumer implements Suspendable {
 
             if (boi != null) {
                 camelExchange.setProperty(BindingOperationInfo.class.getName(), boi);
-                log.trace("Set exchange property: BindingOperationInfo: {}", boi);
+                LOG.trace("Set exchange property: BindingOperationInfo: {}", boi);
                 // set the message exchange patter with the boi
                 if (boi.getOperationInfo().isOneWay()) {
                     camelExchange.setPattern(ExchangePattern.InOnly);
@@ -284,7 +288,7 @@ public class CxfConsumer extends DefaultConsumer implements Suspendable {
 
             // set data format mode in Camel exchange
             camelExchange.setProperty(CxfConstants.DATA_FORMAT_PROPERTY, dataFormat);
-            log.trace("Set Exchange property: {}={}", DataFormat.class.getName(), dataFormat);
+            LOG.trace("Set Exchange property: {}={}", DataFormat.class.getName(), dataFormat);
 
             camelExchange.setProperty(Message.MTOM_ENABLED, String.valueOf(endpoint.isMtomEnabled()));
 
@@ -303,7 +307,7 @@ public class CxfConsumer extends DefaultConsumer implements Suspendable {
             try {
                 CxfConsumer.this.createUoW(camelExchange);
             } catch (Exception e) {
-                log.error("Error processing request", e);
+                LOG.error("Error processing request", e);
                 throw new Fault(e);
             }
             return camelExchange;
diff --git a/components/camel-cxf/src/main/java/org/apache/camel/component/cxf/CxfEndpoint.java b/components/camel-cxf/src/main/java/org/apache/camel/component/cxf/CxfEndpoint.java
index 2f164b8..66b67ca 100644
--- a/components/camel-cxf/src/main/java/org/apache/camel/component/cxf/CxfEndpoint.java
+++ b/components/camel-cxf/src/main/java/org/apache/camel/component/cxf/CxfEndpoint.java
@@ -43,6 +43,8 @@ import javax.xml.ws.Provider;
 import javax.xml.ws.WebServiceProvider;
 import javax.xml.ws.handler.Handler;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.w3c.dom.Document;
 import org.w3c.dom.Node;
 
@@ -120,6 +122,8 @@ import org.apache.cxf.wsdl.WSDLManager;
 @UriEndpoint(firstVersion = "1.0.0", scheme = "cxf", title = "CXF", syntax = "cxf:beanId:address", label = "soap,webservice")
 public class CxfEndpoint extends DefaultEndpoint implements AsyncEndpoint, HeaderFilterStrategyAware, Service, Cloneable {
 
+    private static final Logger LOG = LoggerFactory.getLogger(CxfEndpoint.class);
+
     @UriParam(label = "advanced")
     protected Bus bus;
 
@@ -317,7 +321,7 @@ public class CxfEndpoint extends DefaultEndpoint implements AsyncEndpoint, Heade
                 sfb.getFeatures().add(feature);
             }
         } else {
-            log.debug("Ignore DataFormat mode {} since SEI class is annotated with WebServiceProvider", getDataFormat());
+            LOG.debug("Ignore DataFormat mode {} since SEI class is annotated with WebServiceProvider", getDataFormat());
         }
 
         if (isLoggingFeatureEnabled()) {
@@ -345,7 +349,7 @@ public class CxfEndpoint extends DefaultEndpoint implements AsyncEndpoint, Heade
             } else {
                 sfb.setProperties(getProperties());
             }
-            log.debug("ServerFactoryBean: {} added properties: {}", sfb, getProperties());
+            LOG.debug("ServerFactoryBean: {} added properties: {}", sfb, getProperties());
         }
         if (this.isSkipPayloadMessagePartCheck()) {
             if (sfb.getProperties() == null) {
@@ -531,7 +535,7 @@ public class CxfEndpoint extends DefaultEndpoint implements AsyncEndpoint, Heade
             } else {
                 factoryBean.setProperties(getProperties());
             }
-            log.debug("ClientFactoryBean: {} added properties: {}", factoryBean, getProperties());
+            LOG.debug("ClientFactoryBean: {} added properties: {}", factoryBean, getProperties());
         }
 
         // setup the basic authentication property
@@ -635,7 +639,7 @@ public class CxfEndpoint extends DefaultEndpoint implements AsyncEndpoint, Heade
 
     void checkName(Object value, String name) {
         if (ObjectHelper.isEmpty(value)) {
-            log.warn("The " + name + " of " + this.getEndpointUri() + " is empty, cxf will try to load the first one in wsdl for you.");
+            LOG.warn("The " + name + " of " + this.getEndpointUri() + " is empty, cxf will try to load the first one in wsdl for you.");
         }
     }
 
@@ -935,12 +939,12 @@ public class CxfEndpoint extends DefaultEndpoint implements AsyncEndpoint, Heade
         if (bus == null) {
             bus = CxfEndpointUtils.createBus(getCamelContext());
             this.createBus = true;
-            log.debug("Using DefaultBus {}", bus);
+            LOG.debug("Using DefaultBus {}", bus);
         }
 
         if (!getBusHasBeenCalled.getAndSet(true) && defaultBus) {
             BusFactory.setDefaultBus(bus);
-            log.debug("Set bus {} as thread default bus", bus);
+            LOG.debug("Set bus {} as thread default bus", bus);
         }
         return bus;
     }
@@ -1006,7 +1010,7 @@ public class CxfEndpoint extends DefaultEndpoint implements AsyncEndpoint, Heade
                                              this.properties);
             } catch (Throwable e) {
                 // TODO: Why dont't we rethrown this exception
-                log.warn("Error setting CamelContext. This exception will be ignored.", e);
+                LOG.warn("Error setting CamelContext. This exception will be ignored.", e);
             }
         }
     }
@@ -1028,7 +1032,7 @@ public class CxfEndpoint extends DefaultEndpoint implements AsyncEndpoint, Heade
                                              this.properties);
             } catch (Throwable e) {
                 // TODO: Why dont't we rethrown this exception
-                log.warn("Error setting properties. This exception will be ignored.", e);
+                LOG.warn("Error setting properties. This exception will be ignored.", e);
             }
         }
     }
@@ -1061,7 +1065,7 @@ public class CxfEndpoint extends DefaultEndpoint implements AsyncEndpoint, Heade
     protected void doStop() throws Exception {
         // we should consider to shutdown the bus if the bus is created by cxfEndpoint
         if (createBus && bus != null) {
-            log.info("shutdown the bus ... {}", bus);
+            LOG.info("shutdown the bus ... {}", bus);
             getBus().shutdown(false);
             // clean up the bus to create a new one if the endpoint is started again
             bus = null;
@@ -1241,7 +1245,7 @@ public class CxfEndpoint extends DefaultEndpoint implements AsyncEndpoint, Heade
                     }
                 } catch (XMLStreamException e) {
                     //ignore
-                    log.warn("Error finding the start element.", e);
+                    LOG.warn("Error finding the start element.", e);
                     return null;
                 }
                 return r.getLocalName();
@@ -1436,7 +1440,7 @@ public class CxfEndpoint extends DefaultEndpoint implements AsyncEndpoint, Heade
         try {
             return new URI(uriString);
         } catch (URISyntaxException e) {
-            log.error("cannot determine request URI", e);
+            LOG.error("cannot determine request URI", e);
             return null;
         }
     }
diff --git a/components/camel-cxf/src/main/java/org/apache/camel/component/cxf/CxfProducer.java b/components/camel-cxf/src/main/java/org/apache/camel/component/cxf/CxfProducer.java
index 2c350a8..36e1f79 100644
--- a/components/camel-cxf/src/main/java/org/apache/camel/component/cxf/CxfProducer.java
+++ b/components/camel-cxf/src/main/java/org/apache/camel/component/cxf/CxfProducer.java
@@ -45,6 +45,8 @@ import org.apache.cxf.message.ExchangeImpl;
 import org.apache.cxf.message.Message;
 import org.apache.cxf.service.model.BindingMessageInfo;
 import org.apache.cxf.service.model.BindingOperationInfo;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * CxfProducer binds a Camel exchange to a CXF exchange, acts as a CXF 
@@ -53,6 +55,8 @@ import org.apache.cxf.service.model.BindingOperationInfo;
  */
 public class CxfProducer extends DefaultAsyncProducer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(CxfProducer.class);
+
     private Client client;
     private CxfEndpoint endpoint;
 
@@ -96,7 +100,7 @@ public class CxfProducer extends DefaultAsyncProducer {
     // so we don't delegate the sync process call to the async process 
     @Override
     public boolean process(Exchange camelExchange, AsyncCallback callback) {
-        log.trace("Process exchange: {} in an async way.", camelExchange);
+        LOG.trace("Process exchange: {} in an async way.", camelExchange);
         
         try {
             // create CXF exchange
@@ -135,7 +139,7 @@ public class CxfProducer extends DefaultAsyncProducer {
      */
     @Override
     public void process(Exchange camelExchange) throws Exception {
-        log.trace("Process exchange: {} in sync way.", camelExchange);
+        LOG.trace("Process exchange: {} in sync way.", camelExchange);
         
         // create CXF exchange
         ExchangeImpl cxfExchange = new ExchangeImpl();
@@ -167,7 +171,7 @@ public class CxfProducer extends DefaultAsyncProducer {
                         endpoint.getCookieHandler().storeCookies(camelExchange, endpoint.getRequestUri(camelExchange), cxfHeaders);
                     }
                 } catch (IOException e) {
-                    log.error("Cannot store cookies", e);
+                    LOG.error("Cannot store cookies", e);
                 }
             }
             // bind the CXF response to Camel exchange
@@ -188,7 +192,7 @@ public class CxfProducer extends DefaultAsyncProducer {
         // set data format mode in exchange
         DataFormat dataFormat = endpoint.getDataFormat();
         camelExchange.setProperty(CxfConstants.DATA_FORMAT_PROPERTY, dataFormat);   
-        log.trace("Set Camel Exchange property: {}={}", DataFormat.class.getName(), dataFormat);
+        LOG.trace("Set Camel Exchange property: {}={}", DataFormat.class.getName(), dataFormat);
         
         if (endpoint.isMergeProtocolHeaders()) {
             camelExchange.setProperty(CxfConstants.CAMEL_CXF_PROTOCOL_HEADERS_MERGED, Boolean.TRUE);
@@ -200,7 +204,7 @@ public class CxfProducer extends DefaultAsyncProducer {
         // don't let CXF ClientImpl close the input stream 
         if (dataFormat.dealias() == DataFormat.RAW) {
             cxfExchange.put(Client.KEEP_CONDUIT_ALIVE, true);
-            log.trace("Set CXF Exchange property: {}={}", Client.KEEP_CONDUIT_ALIVE, true);
+            LOG.trace("Set CXF Exchange property: {}={}", Client.KEEP_CONDUIT_ALIVE, true);
         }
      
         // bind the request CXF exchange
@@ -223,7 +227,7 @@ public class CxfProducer extends DefaultAsyncProducer {
                     requestContext.put(Message.PROTOCOL_HEADERS, transportHeaders);
                 }
             } catch (IOException e) {
-                log.warn("Cannot load cookies", e);
+                LOG.warn("Cannot load cookies", e);
             }
         }
 
@@ -249,13 +253,13 @@ public class CxfProducer extends DefaultAsyncProducer {
         
         // store the original boi in the exchange
         camelExchange.setProperty(BindingOperationInfo.class.getName(), boi);
-        log.trace("Set exchange property: BindingOperationInfo: {}", boi);
+        LOG.trace("Set exchange property: BindingOperationInfo: {}", boi);
 
         // Unwrap boi before passing it to make a client call
         if (endpoint.getDataFormat() != DataFormat.PAYLOAD && !endpoint.isWrapped() && boi != null) {
             if (boi.isUnwrappedCapable()) {
                 boi = boi.getUnwrappedOperation();
-                log.trace("Unwrapped BOI {}", boi);
+                LOG.trace("Unwrapped BOI {}", boi);
             }
         }
         return  boi;
@@ -352,10 +356,10 @@ public class CxfProducer extends DefaultAsyncProducer {
             params[0] = exchange.getIn().getBody();
         }
 
-        if (log.isTraceEnabled()) {
+        if (LOG.isTraceEnabled()) {
             if (params != null) {
                 for (int i = 0; i < params.length; i++) {
-                    log.trace("params[{}] = {}", i, params[i]);
+                    LOG.trace("params[{}] = {}", i, params[i]);
                 }
             }
         }
@@ -378,11 +382,11 @@ public class CxfProducer extends DefaultAsyncProducer {
         BindingOperationInfo answer = null;
         String lp = ex.getIn().getHeader(CxfConstants.OPERATION_NAME, String.class);
         if (lp == null) {
-            log.debug("CxfProducer cannot find the {} from message header, trying with defaultOperationName", CxfConstants.OPERATION_NAME);
+            LOG.debug("CxfProducer cannot find the {} from message header, trying with defaultOperationName", CxfConstants.OPERATION_NAME);
             lp = endpoint.getDefaultOperationName();
         }
         if (lp == null) {
-            log.debug("CxfProducer cannot find the {} from message header and there is no DefaultOperationName setting, CxfProducer will pick up the first available operation.",
+            LOG.debug("CxfProducer cannot find the {} from message header and there is no DefaultOperationName setting, CxfProducer will pick up the first available operation.",
                      CxfConstants.OPERATION_NAME);
             Collection<BindingOperationInfo> bois = 
                 client.getEndpoint().getEndpointInfo().getBinding().getOperations();
@@ -399,12 +403,12 @@ public class CxfProducer extends DefaultAsyncProducer {
             }
             if (ns == null) {
                 ns = client.getEndpoint().getService().getName().getNamespaceURI();
-                log.trace("Operation namespace not in header. Set it to: {}", ns);
+                LOG.trace("Operation namespace not in header. Set it to: {}", ns);
             }            
 
             QName qname = new QName(ns, lp);
 
-            log.trace("Operation qname = {}", qname);
+            LOG.trace("Operation qname = {}", qname);
             
             answer = client.getEndpoint().getEndpointInfo().getBinding().getOperation(qname);
             if (answer == null) {
diff --git a/components/camel-dataset/src/main/java/org/apache/camel/component/dataset/DataSetTestEndpoint.java b/components/camel-dataset/src/main/java/org/apache/camel/component/dataset/DataSetTestEndpoint.java
index 5851f81..b05c152 100644
--- a/components/camel-dataset/src/main/java/org/apache/camel/component/dataset/DataSetTestEndpoint.java
+++ b/components/camel-dataset/src/main/java/org/apache/camel/component/dataset/DataSetTestEndpoint.java
@@ -32,6 +32,8 @@ import org.apache.camel.spi.UriParam;
 import org.apache.camel.spi.UriPath;
 import org.apache.camel.support.EndpointHelper;
 import org.apache.camel.support.ObjectHelper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * The dataset-test component extends the mock component by on startup to pull messages from another endpoint to set the expected message bodies.
@@ -45,6 +47,8 @@ import org.apache.camel.support.ObjectHelper;
 @UriEndpoint(firstVersion = "1.3.0", scheme = "dataset-test", title = "DataSet Test", syntax = "dataset-test:name", producerOnly = true, label = "core,testing", lenientProperties = true)
 public class DataSetTestEndpoint extends MockEndpoint {
 
+    private static final Logger LOG = LoggerFactory.getLogger(DataSetTestEndpoint.class);
+
     private Endpoint expectedMessageEndpoint;
 
     @UriPath(description = "Name of endpoint to lookup in the registry to use for polling messages used for testing") @Metadata(required = true)
@@ -68,7 +72,7 @@ public class DataSetTestEndpoint extends MockEndpoint {
 
     @Override
     protected void doStart() throws Exception {
-        log.debug("Consuming expected messages from: {}", expectedMessageEndpoint);
+        LOG.debug("Consuming expected messages from: {}", expectedMessageEndpoint);
 
         final List<Object> expectedBodies = new ArrayList<>();
         EndpointHelper.pollEndpoint(expectedMessageEndpoint, new Processor() {
@@ -83,7 +87,7 @@ public class DataSetTestEndpoint extends MockEndpoint {
                     Iterator<?> it = ObjectHelper.createIterator(body, delimiter, false, true);
                     while (it.hasNext()) {
                         Object line = it.next();
-                        log.trace("Received message body {}", line);
+                        LOG.trace("Received message body {}", line);
                         expectedBodies.add(line);
                     }
                 } else {
@@ -92,7 +96,7 @@ public class DataSetTestEndpoint extends MockEndpoint {
             }
         }, timeout);
 
-        log.info("Received: {} expected message(s) from: {}", expectedBodies.size(), expectedMessageEndpoint);
+        LOG.info("Received: {} expected message(s) from: {}", expectedBodies.size(), expectedMessageEndpoint);
         if (anyOrder) {
             expectedBodiesReceivedInAnyOrder(expectedBodies);
         } else {
diff --git a/components/camel-direct/src/main/java/org/apache/camel/component/direct/DirectProducer.java b/components/camel-direct/src/main/java/org/apache/camel/component/direct/DirectProducer.java
index 68f6775..a9f4033 100644
--- a/components/camel-direct/src/main/java/org/apache/camel/component/direct/DirectProducer.java
+++ b/components/camel-direct/src/main/java/org/apache/camel/component/direct/DirectProducer.java
@@ -19,12 +19,16 @@ package org.apache.camel.component.direct;
 import org.apache.camel.AsyncCallback;
 import org.apache.camel.Exchange;
 import org.apache.camel.support.DefaultAsyncProducer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * The direct producer.
  */
 public class DirectProducer extends DefaultAsyncProducer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(DirectProducer.class);
+
     private final DirectEndpoint endpoint;
 
     public DirectProducer(DirectEndpoint endpoint) {
@@ -39,7 +43,7 @@ public class DirectProducer extends DefaultAsyncProducer {
             if (endpoint.isFailIfNoConsumers()) {
                 throw new DirectConsumerNotAvailableException("No consumers available on endpoint: " + endpoint, exchange);
             } else {
-                log.debug("message ignored, no consumers available on endpoint: {}", endpoint);
+                LOG.debug("message ignored, no consumers available on endpoint: {}", endpoint);
             }
         } else {
             consumer.getProcessor().process(exchange);
@@ -54,7 +58,7 @@ public class DirectProducer extends DefaultAsyncProducer {
                 if (endpoint.isFailIfNoConsumers()) {
                     exchange.setException(new DirectConsumerNotAvailableException("No consumers available on endpoint: " + endpoint, exchange));
                 } else {
-                    log.debug("message ignored, no consumers available on endpoint: {}", endpoint);
+                    LOG.debug("message ignored, no consumers available on endpoint: {}", endpoint);
                 }
                 callback.done(true);
                 return true;
diff --git a/components/camel-directvm/src/main/java/org/apache/camel/component/directvm/DirectVmBlockingProducer.java b/components/camel-directvm/src/main/java/org/apache/camel/component/directvm/DirectVmBlockingProducer.java
index fe05b41..ff1cf26 100644
--- a/components/camel-directvm/src/main/java/org/apache/camel/component/directvm/DirectVmBlockingProducer.java
+++ b/components/camel-directvm/src/main/java/org/apache/camel/component/directvm/DirectVmBlockingProducer.java
@@ -20,6 +20,8 @@ import org.apache.camel.AsyncCallback;
 import org.apache.camel.Exchange;
 import org.apache.camel.support.DefaultAsyncProducer;
 import org.apache.camel.util.StopWatch;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * The direct producer.
@@ -37,6 +39,8 @@ import org.apache.camel.util.StopWatch;
  */
 public class DirectVmBlockingProducer extends DefaultAsyncProducer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(DirectVmBlockingProducer.class);
+
     private final DirectVmEndpoint endpoint;
 
     public DirectVmBlockingProducer(DirectVmEndpoint endpoint) {
@@ -85,8 +89,8 @@ public class DirectVmBlockingProducer extends DefaultAsyncProducer {
         while (!done) {
             // sleep a bit to give chance for the consumer to be ready
             Thread.sleep(500);
-            if (log.isDebugEnabled()) {
-                log.debug("Waited {} for consumer to be ready", watch.taken());
+            if (LOG.isDebugEnabled()) {
+                LOG.debug("Waited {} for consumer to be ready", watch.taken());
             }
 
             answer = endpoint.getConsumer();
diff --git a/components/camel-directvm/src/main/java/org/apache/camel/component/directvm/DirectVmProcessor.java b/components/camel-directvm/src/main/java/org/apache/camel/component/directvm/DirectVmProcessor.java
index 266d111..25654ae 100644
--- a/components/camel-directvm/src/main/java/org/apache/camel/component/directvm/DirectVmProcessor.java
+++ b/components/camel-directvm/src/main/java/org/apache/camel/component/directvm/DirectVmProcessor.java
@@ -22,12 +22,12 @@ import org.apache.camel.ExtendedExchange;
 import org.apache.camel.Processor;
 import org.apache.camel.support.ExchangeHelper;
 import org.apache.camel.support.processor.DelegateAsyncProcessor;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
-/**
-*
-*/
 public final class DirectVmProcessor extends DelegateAsyncProcessor {
 
+    private static final Logger LOG = LoggerFactory.getLogger(DirectVmProcessor.class);
     private final DirectVmEndpoint endpoint;
 
     public DirectVmProcessor(Processor processor, DirectVmEndpoint endpoint) {
@@ -46,7 +46,7 @@ public final class DirectVmProcessor extends DelegateAsyncProcessor {
             // set TCCL to application context class loader if given
             ClassLoader appClassLoader = endpoint.getCamelContext().getApplicationContextClassLoader();
             if (appClassLoader != null) {
-                log.trace("Setting Thread ContextClassLoader to {}", appClassLoader);
+                LOG.trace("Setting Thread ContextClassLoader to {}", appClassLoader);
                 Thread.currentThread().setContextClassLoader(appClassLoader);
                 changed = true;
             }
@@ -58,7 +58,7 @@ public final class DirectVmProcessor extends DelegateAsyncProcessor {
                     try {
                         // restore TCCL if it was changed during processing
                         if (chgd) {
-                            log.trace("Restoring Thread ContextClassLoader to {}", current);
+                            LOG.trace("Restoring Thread ContextClassLoader to {}", current);
                             Thread.currentThread().setContextClassLoader(current);
                         }
                         // make sure to copy results back
@@ -72,7 +72,7 @@ public final class DirectVmProcessor extends DelegateAsyncProcessor {
         } finally {
             // restore TCCL if it was changed during processing
             if (changed) {
-                log.trace("Restoring Thread ContextClassLoader to {}", current);
+                LOG.trace("Restoring Thread ContextClassLoader to {}", current);
                 Thread.currentThread().setContextClassLoader(current);
             }
         }
diff --git a/components/camel-directvm/src/main/java/org/apache/camel/component/directvm/DirectVmProducer.java b/components/camel-directvm/src/main/java/org/apache/camel/component/directvm/DirectVmProducer.java
index 88fd51a..4ca9809 100644
--- a/components/camel-directvm/src/main/java/org/apache/camel/component/directvm/DirectVmProducer.java
+++ b/components/camel-directvm/src/main/java/org/apache/camel/component/directvm/DirectVmProducer.java
@@ -21,12 +21,16 @@ import org.apache.camel.Exchange;
 import org.apache.camel.Message;
 import org.apache.camel.spi.HeaderFilterStrategy;
 import org.apache.camel.support.DefaultAsyncProducer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * The Direct-VM producer.
  */
 public class DirectVmProducer extends DefaultAsyncProducer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(DirectVmProducer.class);
+
     private DirectVmEndpoint endpoint;
 
     public DirectVmProducer(DirectVmEndpoint endpoint) {
@@ -43,7 +47,7 @@ public class DirectVmProducer extends DefaultAsyncProducer {
             if (endpoint.isFailIfNoConsumers()) {
                 exchange.setException(new DirectVmConsumerNotAvailableException("No consumers available on endpoint: " + endpoint, exchange));
             } else {
-                log.debug("message ignored, no consumers available on endpoint: {}", endpoint);
+                LOG.debug("message ignored, no consumers available on endpoint: {}", endpoint);
             }
             callback.done(true);
             return true;
diff --git a/components/camel-disruptor/src/main/java/org/apache/camel/component/disruptor/DisruptorProducer.java b/components/camel-disruptor/src/main/java/org/apache/camel/component/disruptor/DisruptorProducer.java
index 88ae473..4514dc9 100644
--- a/components/camel-disruptor/src/main/java/org/apache/camel/component/disruptor/DisruptorProducer.java
+++ b/components/camel-disruptor/src/main/java/org/apache/camel/component/disruptor/DisruptorProducer.java
@@ -28,12 +28,16 @@ import org.apache.camel.WaitForTaskToComplete;
 import org.apache.camel.support.DefaultAsyncProducer;
 import org.apache.camel.support.ExchangeHelper;
 import org.apache.camel.support.SynchronizationAdapter;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A Producer for the Disruptor component.
  */
 public class DisruptorProducer extends DefaultAsyncProducer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(DisruptorProducer.class);
+
     private final WaitForTaskToComplete waitForTaskToComplete;
     private final long timeout;
 
@@ -87,13 +91,13 @@ public class DisruptorProducer extends DefaultAsyncProducer {
                 public void onDone(final Exchange response) {
                     // check for timeout, which then already would have invoked the latch
                     if (latch.getCount() == 0) {
-                        if (log.isTraceEnabled()) {
-                            log.trace("{}. Timeout occurred so response will be ignored: {}", this,
+                        if (LOG.isTraceEnabled()) {
+                            LOG.trace("{}. Timeout occurred so response will be ignored: {}", this,
                                     response.getMessage());
                         }
                     } else {
-                        if (log.isTraceEnabled()) {
-                            log.trace("{} with response: {}", this,
+                        if (LOG.isTraceEnabled()) {
+                            LOG.trace("{} with response: {}", this,
                                     response.getMessage());
                         }
                         try {
@@ -121,8 +125,8 @@ public class DisruptorProducer extends DefaultAsyncProducer {
             doPublish(copy);
 
             if (timeout > 0) {
-                if (log.isTraceEnabled()) {
-                    log.trace("Waiting for task to complete using timeout (ms): {} at [{}]", timeout,
+                if (LOG.isTraceEnabled()) {
+                    LOG.trace("Waiting for task to complete using timeout (ms): {} at [{}]", timeout,
                             endpoint.getEndpointUri());
                 }
                 // lets see if we can get the task done before the timeout
@@ -150,8 +154,8 @@ public class DisruptorProducer extends DefaultAsyncProducer {
                     latch.countDown();
                 }
             } else {
-                if (log.isTraceEnabled()) {
-                    log.trace("Waiting for task to complete (blocking) at [{}]", endpoint.getEndpointUri());
+                if (LOG.isTraceEnabled()) {
+                    LOG.trace("Waiting for task to complete (blocking) at [{}]", endpoint.getEndpointUri());
                 }
                 // no timeout then wait until its done
                 try {
@@ -174,7 +178,7 @@ public class DisruptorProducer extends DefaultAsyncProducer {
     }
 
     private void doPublish(Exchange exchange) {
-        log.trace("Publishing Exchange to disruptor ringbuffer: {}", exchange);
+        LOG.trace("Publishing Exchange to disruptor ringbuffer: {}", exchange);
 
         try {
             if (blockWhenFull) {
diff --git a/components/camel-docker/src/main/java/org/apache/camel/component/docker/consumer/DockerEventsConsumer.java b/components/camel-docker/src/main/java/org/apache/camel/component/docker/consumer/DockerEventsConsumer.java
index 9fed31a..ead73ae 100644
--- a/components/camel-docker/src/main/java/org/apache/camel/component/docker/consumer/DockerEventsConsumer.java
+++ b/components/camel-docker/src/main/java/org/apache/camel/component/docker/consumer/DockerEventsConsumer.java
@@ -29,9 +29,13 @@ import org.apache.camel.component.docker.DockerConstants;
 import org.apache.camel.component.docker.DockerEndpoint;
 import org.apache.camel.component.docker.DockerHelper;
 import org.apache.camel.support.DefaultConsumer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class DockerEventsConsumer extends DefaultConsumer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(DockerEventsConsumer.class);
+
     private DockerEndpoint endpoint;
     private DockerComponent component;
     private EventsCmd eventsCmd;
@@ -80,18 +84,18 @@ public class DockerEventsConsumer extends DefaultConsumer {
 
         @Override
         public void onNext(Event event) {
-            log.debug("Received Docker Event: {}", event);
+            LOG.debug("Received Docker Event: {}", event);
 
             final Exchange exchange = getEndpoint().createExchange();
             Message message = exchange.getIn();
             message.setBody(event);
 
             try {
-                log.trace("Processing exchange [{}]...", exchange);
+                LOG.trace("Processing exchange [{}]...", exchange);
                 getAsyncProcessor().process(exchange, new AsyncCallback() {
                     @Override
                     public void done(boolean doneSync) {
-                        log.trace("Done processing exchange [{}]...", exchange);
+                        LOG.trace("Done processing exchange [{}]...", exchange);
                     }
                 });
             } catch (Exception e) {
diff --git a/components/camel-docker/src/main/java/org/apache/camel/component/docker/producer/AsyncDockerProducer.java b/components/camel-docker/src/main/java/org/apache/camel/component/docker/producer/AsyncDockerProducer.java
index 2f0ede5..f7a0de5 100644
--- a/components/camel-docker/src/main/java/org/apache/camel/component/docker/producer/AsyncDockerProducer.java
+++ b/components/camel-docker/src/main/java/org/apache/camel/component/docker/producer/AsyncDockerProducer.java
@@ -61,7 +61,7 @@ import org.slf4j.LoggerFactory;
  * The Docker producer.
  */
 public class AsyncDockerProducer extends DefaultAsyncProducer {
-    private static final Logger LOGGER = LoggerFactory.getLogger(AsyncDockerProducer.class);
+    private static final Logger LOG = LoggerFactory.getLogger(AsyncDockerProducer.class);
     private DockerConfiguration configuration;
     private DockerComponent component;
 
@@ -80,7 +80,7 @@ public class AsyncDockerProducer extends DefaultAsyncProducer {
 
             DockerOperation operation = configuration.getOperation();
 
-            Object result = null;
+            Object result;
 
             switch (operation) {
 
@@ -90,7 +90,7 @@ public class AsyncDockerProducer extends DefaultAsyncProducer {
                 result = executeBuildImageRequest(client, message).exec(new BuildImageResultCallback() {
                     @Override
                     public void onNext(BuildResponseItem item) {
-                        log.trace("build image callback {}", item);
+                        LOG.trace("build image callback {}", item);
                         super.onNext(item);
                     }
                 });
@@ -108,7 +108,7 @@ public class AsyncDockerProducer extends DefaultAsyncProducer {
                 result = executePullImageRequest(client, message).exec(new PullImageResultCallback() {
                     @Override
                     public void onNext(PullResponseItem item) {
-                        log.trace("pull image callback {}", item);
+                        LOG.trace("pull image callback {}", item);
                         super.onNext(item);
                     }
                 });
@@ -124,7 +124,7 @@ public class AsyncDockerProducer extends DefaultAsyncProducer {
                 result = executePushImageRequest(client, message).exec(new PushImageResultCallback() {
                     @Override
                     public void onNext(PushResponseItem item) {
-                        log.trace("push image callback {}", item);
+                        LOG.trace("push image callback {}", item);
                         super.onNext(item);
                     }
                 });
@@ -141,7 +141,7 @@ public class AsyncDockerProducer extends DefaultAsyncProducer {
                 result = executeAttachContainerRequest(client, message).exec(new AttachContainerResultCallback() {
                     @Override
                     public void onNext(Frame item) {
-                        log.trace("attach container callback {}", item);
+                        LOG.trace("attach container callback {}", item);
                         super.onNext(item);
                     }
 
@@ -158,7 +158,7 @@ public class AsyncDockerProducer extends DefaultAsyncProducer {
                 result = executeLogContainerRequest(client, message).exec(new LogContainerResultCallback() {
                     @Override
                     public void onNext(Frame item) {
-                        log.trace("log container callback {}", item);
+                        LOG.trace("log container callback {}", item);
                         super.onNext(item);
                     }
 
@@ -176,7 +176,7 @@ public class AsyncDockerProducer extends DefaultAsyncProducer {
                 result = executeWaitContainerRequest(client, message).exec(new WaitContainerResultCallback() {
                     @Override
                     public void onNext(WaitResponse item) {
-                        log.trace("wait contanier callback {}", item);
+                        LOG.trace("wait contanier callback {}", item);
                         super.onNext(item);
                     }
 
@@ -195,7 +195,7 @@ public class AsyncDockerProducer extends DefaultAsyncProducer {
                 result = executeExecStartRequest(client, message).exec(new ExecStartResultCallback() {
                     @Override
                     public void onNext(Frame item) {
-                        log.trace("exec start callback {}", item);
+                        LOG.trace("exec start callback {}", item);
                         super.onNext(item);
                     }
 
@@ -217,7 +217,7 @@ public class AsyncDockerProducer extends DefaultAsyncProducer {
                 exchange.getIn().setBody(result);
             }
         } catch (DockerException | InterruptedException | IOException e) {
-            log.error(e.getMessage(), e);
+            LOG.error(e.getMessage(), e);
         }
 
         callback.done(false);
@@ -226,15 +226,10 @@ public class AsyncDockerProducer extends DefaultAsyncProducer {
 
     /**
      * Produces a build image request
-     *
-     * @param client
-     * @param message
-     * @return
-     * @throws DockerException
      */
     private BuildImageCmd executeBuildImageRequest(DockerClient client, Message message) throws DockerException {
 
-        LOGGER.debug("Executing Docker Build Image Request");
+        LOG.debug("Executing Docker Build Image Request");
 
         Object body = message.getBody();
 
@@ -278,14 +273,10 @@ public class AsyncDockerProducer extends DefaultAsyncProducer {
 
     /**
      * Produces a pull image request
-     *
-     * @param client
-     * @param message
-     * @return
      */
     private PullImageCmd executePullImageRequest(DockerClient client, Message message) {
 
-        LOGGER.debug("Executing Docker Pull Image Request");
+        LOG.debug("Executing Docker Pull Image Request");
 
         String repository = DockerHelper.getProperty(DockerConstants.DOCKER_REPOSITORY, configuration, message, String.class);
 
@@ -315,14 +306,10 @@ public class AsyncDockerProducer extends DefaultAsyncProducer {
 
     /**
      * Produces a push image request
-     *
-     * @param client
-     * @param message
-     * @return
      */
     private PushImageCmd executePushImageRequest(DockerClient client, Message message) {
 
-        LOGGER.debug("Executing Docker Push Image Request");
+        LOG.debug("Executing Docker Push Image Request");
 
         String name = DockerHelper.getProperty(DockerConstants.DOCKER_NAME, configuration, message, String.class);
 
@@ -348,14 +335,10 @@ public class AsyncDockerProducer extends DefaultAsyncProducer {
 
     /**
      * Produce a attach container request
-     *
-     * @param client
-     * @param message
-     * @return
      */
     private AttachContainerCmd executeAttachContainerRequest(DockerClient client, Message message) {
 
-        LOGGER.debug("Executing Docker Attach Container Request");
+        LOG.debug("Executing Docker Attach Container Request");
 
         String containerId = DockerHelper.getProperty(DockerConstants.DOCKER_CONTAINER_ID, configuration, message, String.class);
 
@@ -399,14 +382,10 @@ public class AsyncDockerProducer extends DefaultAsyncProducer {
 
     /**
      * Produce a log container request
-     *
-     * @param client
-     * @param message
-     * @return
      */
     private LogContainerCmd executeLogContainerRequest(DockerClient client, Message message) {
 
-        LOGGER.debug("Executing Docker Log Container Request");
+        LOG.debug("Executing Docker Log Container Request");
 
         String containerId = DockerHelper.getProperty(DockerConstants.DOCKER_CONTAINER_ID, configuration, message, String.class);
 
@@ -456,14 +435,10 @@ public class AsyncDockerProducer extends DefaultAsyncProducer {
 
     /**
      * Produce a wait container request
-     *
-     * @param client
-     * @param message
-     * @return
      */
     private WaitContainerCmd executeWaitContainerRequest(DockerClient client, Message message) {
 
-        LOGGER.debug("Executing Docker Wait Container Request");
+        LOG.debug("Executing Docker Wait Container Request");
 
         String containerId = DockerHelper.getProperty(DockerConstants.DOCKER_CONTAINER_ID, configuration, message, String.class);
 
@@ -477,14 +452,10 @@ public class AsyncDockerProducer extends DefaultAsyncProducer {
 
     /**
      * Produces a exec start request
-     *
-     * @param client
-     * @param message
-     * @return
      */
     private ExecStartCmd executeExecStartRequest(DockerClient client, Message message) {
 
-        LOGGER.debug("Executing Docker Exec Start Request");
+        LOG.debug("Executing Docker Exec Start Request");
 
         String execId = DockerHelper.getProperty(DockerConstants.DOCKER_EXEC_ID, configuration, message, String.class);
 
diff --git a/components/camel-dozer/src/main/java/org/apache/camel/component/dozer/DozerEndpoint.java b/components/camel-dozer/src/main/java/org/apache/camel/component/dozer/DozerEndpoint.java
index 14d60cb..99dcb5a 100644
--- a/components/camel-dozer/src/main/java/org/apache/camel/component/dozer/DozerEndpoint.java
+++ b/components/camel-dozer/src/main/java/org/apache/camel/component/dozer/DozerEndpoint.java
@@ -33,6 +33,8 @@ import org.apache.camel.spi.UriEndpoint;
 import org.apache.camel.spi.UriParam;
 import org.apache.camel.support.DefaultEndpoint;
 import org.apache.camel.support.ResourceHelper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * The dozer component provides the ability to map between Java beans using the Dozer mapping library.
@@ -40,6 +42,8 @@ import org.apache.camel.support.ResourceHelper;
 @UriEndpoint(firstVersion = "2.15.0", scheme = "dozer", title = "Dozer", syntax = "dozer:name", producerOnly = true, label = "transformation")
 public class DozerEndpoint extends DefaultEndpoint {
 
+    private static final Logger LOG = LoggerFactory.getLogger(DozerEndpoint.class);
+
     // IDs for built-in custom converters used with the Dozer component
     private static final String CUSTOM_MAPPING_ID = "_customMapping";
     private static final String VARIABLE_MAPPING_ID = "_variableMapping";
@@ -109,7 +113,7 @@ public class DozerEndpoint extends DefaultEndpoint {
     }
 
     protected void initDozerBeanContainerAndMapper() throws Exception {
-        log.info("Configuring {}...", Mapper.class.getName());
+        LOG.info("Configuring {}...", Mapper.class.getName());
 
         // Validate endpoint parameters
         if (configuration.getTargetModel() == null) {
diff --git a/components/camel-dozer/src/main/java/org/apache/camel/component/dozer/DozerProducer.java b/components/camel-dozer/src/main/java/org/apache/camel/component/dozer/DozerProducer.java
index f12291c..76d15c5 100644
--- a/components/camel-dozer/src/main/java/org/apache/camel/component/dozer/DozerProducer.java
+++ b/components/camel-dozer/src/main/java/org/apache/camel/component/dozer/DozerProducer.java
@@ -22,12 +22,16 @@ import org.apache.camel.spi.DataFormat;
 import org.apache.camel.support.DefaultProducer;
 import org.apache.camel.support.processor.MarshalProcessor;
 import org.apache.camel.support.processor.UnmarshalProcessor;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Producer class for Dozer endpoints.
  */
 public class DozerProducer extends DefaultProducer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(DozerProducer.class);
+
     private DozerEndpoint endpoint;
     private UnmarshalProcessor unmarshaller;
     private MarshalProcessor marshaller;
@@ -46,7 +50,7 @@ public class DozerProducer extends DefaultProducer {
         // Unmarshal the source content only if an unmarshaller is configured.
         String unmarshalId = endpoint.getConfiguration().getUnmarshalId();
         if (unmarshalId != null) {
-            log.debug("Unmarshalling input data using data format '{}'.", unmarshalId);
+            LOG.debug("Unmarshalling input data using data format '{}'.", unmarshalId);
             resolveUnmarshaller(exchange, unmarshalId).process(exchange);
             if (exchange.getException() != null) {
                 throw exchange.getException();
@@ -63,7 +67,7 @@ public class DozerProducer extends DefaultProducer {
         // Convert to source model, if specified
         String sourceType = endpoint.getConfiguration().getSourceModel();
         if (sourceType != null) {
-            log.debug("Converting to source model {}.", sourceType);
+            LOG.debug("Converting to source model {}.", sourceType);
             Class<?> sourceModel = endpoint.getCamelContext()
                     .getClassResolver().resolveClass(sourceType);
             if (sourceModel == null) {
@@ -73,7 +77,7 @@ public class DozerProducer extends DefaultProducer {
         }
         
         // Perform mappings
-        log.debug("Mapping to target model {}.", targetModel.getName());
+        LOG.debug("Mapping to target model {}.", targetModel.getName());
         Object targetObject = endpoint.getMapper().map(msg.getBody(), targetModel);
         // Second pass to process literal mappings
         endpoint.getMapper().map(endpoint.getVariableMapper(), targetObject);
@@ -91,7 +95,7 @@ public class DozerProducer extends DefaultProducer {
         // Marshal the source content only if a marshaller is configured.
         String marshalId = endpoint.getConfiguration().getMarshalId();
         if (marshalId != null) {
-            log.debug("Marshalling output data using data format '{}'.", marshalId);
+            LOG.debug("Marshalling output data using data format '{}'.", marshalId);
             resolveMarshaller(exchange, marshalId).process(exchange);
             if (exchange.getException() != null) {
                 throw exchange.getException();
diff --git a/components/camel-drill/src/main/java/org/apache/camel/component/drill/DrillProducer.java b/components/camel-drill/src/main/java/org/apache/camel/component/drill/DrillProducer.java
index b408f6d..a662894 100644
--- a/components/camel-drill/src/main/java/org/apache/camel/component/drill/DrillProducer.java
+++ b/components/camel-drill/src/main/java/org/apache/camel/component/drill/DrillProducer.java
@@ -24,12 +24,16 @@ import java.sql.Statement;
 
 import org.apache.camel.Exchange;
 import org.apache.camel.support.DefaultProducer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A drill producer
  */
 public class DrillProducer extends DefaultProducer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(DrillProducer.class);
+
     private DrillEndpoint endpoint;
 
     private Connection connection;
@@ -83,9 +87,7 @@ public class DrillProducer extends DefaultProducer {
     private void createJDBCConnection() throws ClassNotFoundException, SQLException {
         Class.forName(DrillConstants.DRILL_DRIVER);
 
-        // if(log.isDebugEnabled()) {
-        log.info("connection url: {}", endpoint.toJDBCUri());
-        // }
+        LOG.info("connection url: {}", endpoint.toJDBCUri());
 
         this.connection = DriverManager.getConnection(endpoint.toJDBCUri());
     }
diff --git a/components/camel-dropbox/src/main/java/org/apache/camel/component/dropbox/integration/producer/DropboxDelProducer.java b/components/camel-dropbox/src/main/java/org/apache/camel/component/dropbox/integration/producer/DropboxDelProducer.java
index 996e2ef..b340983 100644
--- a/components/camel-dropbox/src/main/java/org/apache/camel/component/dropbox/integration/producer/DropboxDelProducer.java
+++ b/components/camel-dropbox/src/main/java/org/apache/camel/component/dropbox/integration/producer/DropboxDelProducer.java
@@ -42,7 +42,6 @@ public class DropboxDelProducer extends DropboxProducer {
 
         exchange.getIn().setHeader(DropboxResultHeader.DELETED_PATH.name(), result.getEntry());
         exchange.getIn().setBody(result.getEntry());
-        log.debug("Deleted: {}", remotePath);
     }
 
 }
diff --git a/components/camel-dropbox/src/main/java/org/apache/camel/component/dropbox/integration/producer/DropboxMoveProducer.java b/components/camel-dropbox/src/main/java/org/apache/camel/component/dropbox/integration/producer/DropboxMoveProducer.java
index 5728f30..fcb890a 100644
--- a/components/camel-dropbox/src/main/java/org/apache/camel/component/dropbox/integration/producer/DropboxMoveProducer.java
+++ b/components/camel-dropbox/src/main/java/org/apache/camel/component/dropbox/integration/producer/DropboxMoveProducer.java
@@ -43,8 +43,6 @@ public class DropboxMoveProducer extends DropboxProducer {
 
         exchange.getIn().setHeader(DropboxResultHeader.MOVED_PATH.name(), result.getOldPath());
         exchange.getIn().setBody(result.getNewPath());
-
-        log.debug("Moved from {} to {}", remotePath, newRemotePath);
     }
 
 }
diff --git a/components/camel-ehcache/src/main/java/org/apache/camel/component/ehcache/processor/aggregate/EhcacheAggregationRepository.java b/components/camel-ehcache/src/main/java/org/apache/camel/component/ehcache/processor/aggregate/EhcacheAggregationRepository.java
index ad64afd..81e9a47 100644
--- a/components/camel-ehcache/src/main/java/org/apache/camel/component/ehcache/processor/aggregate/EhcacheAggregationRepository.java
+++ b/components/camel-ehcache/src/main/java/org/apache/camel/component/ehcache/processor/aggregate/EhcacheAggregationRepository.java
@@ -30,9 +30,13 @@ import org.apache.camel.support.service.ServiceSupport;
 import org.apache.camel.util.ObjectHelper;
 import org.ehcache.Cache;
 import org.ehcache.CacheManager;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class EhcacheAggregationRepository extends ServiceSupport implements RecoverableAggregationRepository {
 
+    private static final Logger LOG = LoggerFactory.getLogger(EhcacheAggregationRepository.class);
+
     private CamelContext camelContext;
     private CacheManager cacheManager;
     private String cacheName;
@@ -143,7 +147,7 @@ public class EhcacheAggregationRepository extends ServiceSupport implements Reco
 
     @Override
     public Exchange add(final CamelContext camelContext, final String key, final Exchange exchange) {
-        log.trace("Adding an Exchange with ID {} for key {} in a thread-safe manner.", exchange.getExchangeId(), key);
+        LOG.trace("Adding an Exchange with ID {} for key {} in a thread-safe manner.", exchange.getExchangeId(), key);
 
         final DefaultExchangeHolder oldHolder = cache.get(key);
         final DefaultExchangeHolder newHolder = DefaultExchangeHolder.marshal(exchange, true, allowSerializedHeaders);
@@ -160,13 +164,13 @@ public class EhcacheAggregationRepository extends ServiceSupport implements Reco
 
     @Override
     public void remove(CamelContext camelContext, String key, Exchange exchange) {
-        log.trace("Removing an exchange with ID {} for key {}", exchange.getExchangeId(), key);
+        LOG.trace("Removing an exchange with ID {} for key {}", exchange.getExchangeId(), key);
         cache.remove(key);
     }
 
     @Override
     public void confirm(CamelContext camelContext, String exchangeId) {
-        log.trace("Confirming an exchange with ID {}.", exchangeId);
+        LOG.trace("Confirming an exchange with ID {}.", exchangeId);
         cache.remove(exchangeId);
     }
 
@@ -180,15 +184,15 @@ public class EhcacheAggregationRepository extends ServiceSupport implements Reco
 
     @Override
     public Set<String> scan(CamelContext camelContext) {
-        log.trace("Scanning for exchanges to recover in {} context", camelContext.getName());
+        LOG.trace("Scanning for exchanges to recover in {} context", camelContext.getName());
         Set<String> scanned = Collections.unmodifiableSet(getKeys());
-        log.trace("Found {} keys for exchanges to recover in {} context", scanned.size(), camelContext.getName());
+        LOG.trace("Found {} keys for exchanges to recover in {} context", scanned.size(), camelContext.getName());
         return scanned;
     }
 
     @Override
     public Exchange recover(CamelContext camelContext, String exchangeId) {
-        log.trace("Recovering an Exchange with ID {}.", exchangeId);
+        LOG.trace("Recovering an Exchange with ID {}.", exchangeId);
         return useRecovery ? unmarshallExchange(camelContext, cache.get(exchangeId)) : null;
     }
 
diff --git a/components/camel-elasticsearch-rest/src/main/java/org/apache/camel/component/elasticsearch/ElasticsearchProducer.java b/components/camel-elasticsearch-rest/src/main/java/org/apache/camel/component/elasticsearch/ElasticsearchProducer.java
index 79dc42a..fd30e78 100644
--- a/components/camel-elasticsearch-rest/src/main/java/org/apache/camel/component/elasticsearch/ElasticsearchProducer.java
+++ b/components/camel-elasticsearch-rest/src/main/java/org/apache/camel/component/elasticsearch/ElasticsearchProducer.java
@@ -47,6 +47,9 @@ import org.elasticsearch.client.sniff.Sniffer;
 import org.elasticsearch.client.sniff.SnifferBuilder;
 import org.elasticsearch.rest.RestStatus;
 import org.elasticsearch.search.builder.SearchSourceBuilder;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 
 import static org.apache.camel.component.elasticsearch.ElasticsearchConstants.PARAM_SCROLL;
 import static org.apache.camel.component.elasticsearch.ElasticsearchConstants.PARAM_SCROLL_KEEP_ALIVE_MS;
@@ -56,6 +59,8 @@ import static org.apache.camel.component.elasticsearch.ElasticsearchConstants.PA
  */
 public class ElasticsearchProducer extends DefaultProducer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(ElasticsearchProducer.class);
+
     protected final ElasticsearchConfiguration configuration;
     private RestClient client;
     private Sniffer sniffer;
@@ -270,12 +275,12 @@ public class ElasticsearchProducer extends DefaultProducer {
 
     private void startClient() throws NoSuchMethodException, IllegalAccessException, InvocationTargetException, InstantiationException, UnknownHostException {
         if (client == null) {
-            log.info("Connecting to the ElasticSearch cluster: {}", configuration.getClusterName());
+            LOG.info("Connecting to the ElasticSearch cluster: {}", configuration.getClusterName());
             if (configuration.getHostAddressesList() != null
                 && !configuration.getHostAddressesList().isEmpty()) {
                 client = createClient();
             } else {
-                log.warn("Incorrect ip address and port parameters settings for ElasticSearch cluster");
+                LOG.warn("Incorrect ip address and port parameters settings for ElasticSearch cluster");
             }
         }
     }
@@ -307,7 +312,7 @@ public class ElasticsearchProducer extends DefaultProducer {
     @Override
     protected void doStop() throws Exception {
         if (client != null) {
-            log.info("Disconnecting from ElasticSearch cluster: {}", configuration.getClusterName());
+            LOG.info("Disconnecting from ElasticSearch cluster: {}", configuration.getClusterName());
             client.close();
             if (sniffer != null) {
                 sniffer.close();
diff --git a/components/camel-elsql/src/main/java/org/apache/camel/component/elsql/ElsqlEndpoint.java b/components/camel-elsql/src/main/java/org/apache/camel/component/elsql/ElsqlEndpoint.java
index d04c809..8b24d4f 100644
--- a/components/camel-elsql/src/main/java/org/apache/camel/component/elsql/ElsqlEndpoint.java
+++ b/components/camel-elsql/src/main/java/org/apache/camel/component/elsql/ElsqlEndpoint.java
@@ -40,6 +40,8 @@ import org.apache.camel.spi.UriParam;
 import org.apache.camel.spi.UriPath;
 import org.apache.camel.support.ObjectHelper;
 import org.apache.camel.support.ResourceHelper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate;
 import org.springframework.jdbc.core.namedparam.SqlParameterSource;
 
@@ -50,6 +52,8 @@ import org.springframework.jdbc.core.namedparam.SqlParameterSource;
         label = "database,sql")
 public class ElsqlEndpoint extends DefaultSqlEndpoint {
 
+    private static final Logger LOG = LoggerFactory.getLogger(ElsqlEndpoint.class);
+
     private ElSql elSql;
     private final NamedParameterJdbcTemplate namedJdbcTemplate;
 
@@ -82,7 +86,7 @@ public class ElsqlEndpoint extends DefaultSqlEndpoint {
         final Exchange dummy = createExchange();
         final SqlParameterSource param = new ElsqlSqlMapSource(dummy, null);
         final String sql = elSql.getSql(elsqlName, new SpringSqlParams(param));
-        log.debug("ElsqlConsumer @{} using sql: {}", elsqlName, sql);
+        LOG.debug("ElsqlConsumer @{} using sql: {}", elsqlName, sql);
 
         final ElsqlConsumer consumer = new ElsqlConsumer(this, processor, namedJdbcTemplate, sql, param, preStategy, proStrategy);
         consumer.setMaxMessagesPerPoll(getMaxMessagesPerPoll());
diff --git a/components/camel-elsql/src/main/java/org/apache/camel/component/elsql/ElsqlProducer.java b/components/camel-elsql/src/main/java/org/apache/camel/component/elsql/ElsqlProducer.java
index fdefbd7..bb37a50 100644
--- a/components/camel-elsql/src/main/java/org/apache/camel/component/elsql/ElsqlProducer.java
+++ b/components/camel-elsql/src/main/java/org/apache/camel/component/elsql/ElsqlProducer.java
@@ -35,6 +35,8 @@ import org.apache.camel.component.sql.SqlConstants;
 import org.apache.camel.component.sql.SqlOutputType;
 import org.apache.camel.component.sql.SqlPrepareStatementStrategy;
 import org.apache.camel.support.DefaultProducer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.springframework.dao.DataAccessException;
 import org.springframework.jdbc.core.PreparedStatementCallback;
 import org.springframework.jdbc.core.PreparedStatementCreator;
@@ -51,6 +53,8 @@ import static org.springframework.jdbc.support.JdbcUtils.closeStatement;
 
 public class ElsqlProducer extends DefaultProducer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(ElsqlProducer.class);
+
     private final ElSql elSql;
     private final String elSqlName;
     private final NamedParameterJdbcTemplate jdbcTemplate;
@@ -80,7 +84,7 @@ public class ElsqlProducer extends DefaultProducer {
 
         final SqlParameterSource param = new ElsqlSqlMapSource(exchange, data);
         final String sql = elSql.getSql(elSqlName, new SpringSqlParams(param));
-        log.debug("ElsqlProducer @{} using sql: {}", elSqlName, sql);
+        LOG.debug("ElsqlProducer @{} using sql: {}", elSqlName, sql);
 
         // special for processing stream list (batch not supported)
         final SqlOutputType outputType = getEndpoint().getOutputType();
@@ -89,7 +93,7 @@ public class ElsqlProducer extends DefaultProducer {
             return;
         }
 
-        log.trace("jdbcTemplate.execute: {}", sql);
+        LOG.trace("jdbcTemplate.execute: {}", sql);
         jdbcTemplate.execute(sql, param, new PreparedStatementCallback<Object>() {
             @Override
             public Object doInPreparedStatement(final PreparedStatement ps) throws SQLException, DataAccessException {
@@ -128,7 +132,7 @@ public class ElsqlProducer extends DefaultProducer {
                             exchange.getOut().getHeaders().putAll(exchange.getIn().getHeaders());
 
                             final SqlOutputType outputType = getEndpoint().getOutputType();
-                            log.trace("Got result list from query: {}, outputType={}", rs, outputType);
+                            LOG.trace("Got result list from query: {}, outputType={}", rs, outputType);
                             if (outputType == SqlOutputType.SelectList) {
                                 final List<?> data = getEndpoint().queryForList(rs, true);
                                 // for noop=true we still want to enrich with the row count header
@@ -194,7 +198,7 @@ public class ElsqlProducer extends DefaultProducer {
     }
 
     protected void processStreamList(final Exchange exchange, final PreparedStatementCreator statementCreator, final String preparedQuery) throws Exception {
-        log.trace("processStreamList: {}", preparedQuery);
+        LOG.trace("processStreamList: {}", preparedQuery);
 
         // do not use the jdbcTemplate as it will auto-close connection/ps/rs when exiting the execute method
         // and we need to keep the connection alive while routing and close it when the Exchange is done being routed
diff --git a/components/camel-eventadmin/src/main/java/org/apache/camel/component/eventadmin/EventAdminConsumer.java b/components/camel-eventadmin/src/main/java/org/apache/camel/component/eventadmin/EventAdminConsumer.java
index e21c490..50c34fc 100644
--- a/components/camel-eventadmin/src/main/java/org/apache/camel/component/eventadmin/EventAdminConsumer.java
+++ b/components/camel-eventadmin/src/main/java/org/apache/camel/component/eventadmin/EventAdminConsumer.java
@@ -26,9 +26,13 @@ import org.osgi.framework.ServiceRegistration;
 import org.osgi.service.event.Event;
 import org.osgi.service.event.EventConstants;
 import org.osgi.service.event.EventHandler;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class EventAdminConsumer extends DefaultConsumer implements EventHandler {
 
+    private static final Logger LOG = LoggerFactory.getLogger(EventAdminConsumer.class);
+
     private final EventAdminEndpoint endpoint;
     private ServiceRegistration<?> registration;
 
@@ -43,7 +47,7 @@ public class EventAdminConsumer extends DefaultConsumer implements EventHandler
         // TODO: populate exchange headers
         exchange.getIn().setBody(event);
 
-        log.trace("EventAdmin {} is firing", endpoint.getTopic());
+        LOG.trace("EventAdmin {} is firing", endpoint.getTopic());
         try {
             getProcessor().process(exchange);
             // log exception if an exception occurred and was not handled
diff --git a/components/camel-facebook/src/main/java/org/apache/camel/component/facebook/FacebookConsumer.java b/components/camel-facebook/src/main/java/org/apache/camel/component/facebook/FacebookConsumer.java
index 123483e..54b41ea 100644
--- a/components/camel-facebook/src/main/java/org/apache/camel/component/facebook/FacebookConsumer.java
+++ b/components/camel-facebook/src/main/java/org/apache/camel/component/facebook/FacebookConsumer.java
@@ -41,6 +41,9 @@ import org.apache.camel.component.facebook.data.FacebookMethodsTypeHelper.MatchT
 import org.apache.camel.component.facebook.data.FacebookPropertiesHelper;
 import org.apache.camel.component.facebook.data.ReadingBuilder;
 import org.apache.camel.support.ScheduledPollConsumer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 
 import static org.apache.camel.component.facebook.FacebookConstants.FACEBOOK_DATE_FORMAT;
 import static org.apache.camel.component.facebook.FacebookConstants.READING_PREFIX;
@@ -55,6 +58,8 @@ import static org.apache.camel.component.facebook.data.FacebookMethodsTypeHelper
  */
 public class FacebookConsumer extends ScheduledPollConsumer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(FacebookConsumer.class);
+
     private static final String SINCE_PREFIX = "since=";
 
     private final FacebookEndpoint endpoint;
@@ -93,10 +98,10 @@ public class FacebookConsumer extends ScheduledPollConsumer {
                 } catch (UnsupportedEncodingException e) {
                     throw new RuntimeCamelException(String.format("Error decoding %s.since with value %s due to: %s", READING_PREFIX, strSince, e.getMessage()), e);
                 }
-                log.debug("Using supplied property {}since value {}", READING_PREFIX, this.sinceTime);
+                LOG.debug("Using supplied property {}since value {}", READING_PREFIX, this.sinceTime);
             }
             if (queryString.contains("until=")) {
-                log.debug("Overriding configured property {}until", READING_PREFIX);
+                LOG.debug("Overriding configured property {}until", READING_PREFIX);
             }
         }
         this.endpointProperties = Collections.unmodifiableMap(properties);
@@ -132,7 +137,7 @@ public class FacebookConsumer extends ScheduledPollConsumer {
             result = filteredMethods.get(0);
         } else {
             result = getHighestPriorityMethod(filteredMethods);
-            log.warn("Using highest priority method {} from methods {}", method, filteredMethods);
+            LOG.warn("Using highest priority method {} from methods {}", method, filteredMethods);
         }
         return result;
     }
diff --git a/components/camel-facebook/src/main/java/org/apache/camel/component/facebook/FacebookEndpoint.java b/components/camel-facebook/src/main/java/org/apache/camel/component/facebook/FacebookEndpoint.java
index 494d5d6..dbce597 100644
--- a/components/camel-facebook/src/main/java/org/apache/camel/component/facebook/FacebookEndpoint.java
+++ b/components/camel-facebook/src/main/java/org/apache/camel/component/facebook/FacebookEndpoint.java
@@ -38,6 +38,9 @@ import org.apache.camel.spi.UriPath;
 import org.apache.camel.support.DefaultEndpoint;
 import org.apache.camel.support.PropertyBindingSupport;
 import org.apache.camel.util.ObjectHelper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 
 import static org.apache.camel.component.facebook.data.FacebookMethodsTypeHelper.convertToGetMethod;
 import static org.apache.camel.component.facebook.data.FacebookMethodsTypeHelper.convertToSearchMethod;
@@ -54,6 +57,8 @@ import static org.apache.camel.component.facebook.data.FacebookPropertiesHelper.
 @UriEndpoint(firstVersion = "2.14.0", scheme = "facebook", title = "Facebook", syntax = "facebook:methodName", label = "social")
 public class FacebookEndpoint extends DefaultEndpoint implements FacebookConstants {
 
+    private static final Logger LOG = LoggerFactory.getLogger(FacebookEndpoint.class);
+
     private FacebookNameStyle nameStyle;
 
     @UriPath(name = "methodName", description = "What operation to perform") @Metadata(required = true)
@@ -158,10 +163,10 @@ public class FacebookEndpoint extends DefaultEndpoint implements FacebookConstan
         }
 
         // log missing/extra properties for debugging
-        if (log.isDebugEnabled()) {
+        if (LOG.isDebugEnabled()) {
             final Set<String> missing = getMissingProperties(method, nameStyle, arguments);
             if (!missing.isEmpty()) {
-                log.debug("Method {} could use one or more properties from {}", method, missing);
+                LOG.debug("Method {} could use one or more properties from {}", method, missing);
             }
         }
     }
diff --git a/components/camel-file-watch/src/main/java/org/apache/camel/component/file/watch/FileWatchConsumer.java b/components/camel-file-watch/src/main/java/org/apache/camel/component/file/watch/FileWatchConsumer.java
index 908e876..5f9d0be 100644
--- a/components/camel-file-watch/src/main/java/org/apache/camel/component/file/watch/FileWatchConsumer.java
+++ b/components/camel-file-watch/src/main/java/org/apache/camel/component/file/watch/FileWatchConsumer.java
@@ -36,12 +36,16 @@ import org.apache.camel.component.file.watch.utils.PathUtils;
 import org.apache.camel.support.DefaultConsumer;
 import org.apache.camel.util.AntPathMatcher;
 import org.apache.camel.util.ObjectHelper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * The file-watch consumer.
  */
 public class FileWatchConsumer extends DefaultConsumer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(FileWatchConsumer.class);
+
     private ExecutorService watchDirExecutorService;
     private ExecutorService pollExecutorService;
     private LinkedBlockingQueue<FileEvent> eventQueue;
@@ -79,7 +83,7 @@ public class FileWatchConsumer extends DefaultConsumer {
 
         DirectoryWatcher.Builder watcherBuilder = DirectoryWatcher.builder()
             .path(this.baseDirectory)
-            .logger(log)
+            .logger(LOG)
             .listener(new FileWatchDirectoryChangeListener());
 
         if (!System.getProperty("os.name").toLowerCase().contains("mac")) {
@@ -172,7 +176,7 @@ public class FileWatchConsumer extends DefaultConsumer {
                     return false;
                 }
             } catch (IOException e) {
-                log.warn(String.format("Exception occurred during executing filter. Filtering file %s out.", fileEvent.getEventPath()), e);
+                LOG.warn(String.format("Exception occurred during executing filter. Filtering file %s out.", fileEvent.getEventPath()), e);
                 return false;
             }
         }
@@ -197,7 +201,7 @@ public class FileWatchConsumer extends DefaultConsumer {
         @Override
         public void onEvent(DirectoryChangeEvent directoryChangeEvent) {
             if (directoryChangeEvent.eventType() == DirectoryChangeEvent.EventType.OVERFLOW) {
-                log.warn("OVERFLOW occurred, some events may be lost. Consider increasing of option 'pollThreads'");
+                LOG.warn("OVERFLOW occurred, some events may be lost. Consider increasing of option 'pollThreads'");
                 return;
             }
             FileEvent fileEvent = new FileEvent(directoryChangeEvent);
diff --git a/components/camel-file/src/main/java/org/apache/camel/component/file/FileConsumer.java b/components/camel-file/src/main/java/org/apache/camel/component/file/FileConsumer.java
index 220ba4f..4c382bc 100644
--- a/components/camel-file/src/main/java/org/apache/camel/component/file/FileConsumer.java
+++ b/components/camel-file/src/main/java/org/apache/camel/component/file/FileConsumer.java
@@ -33,12 +33,15 @@ import org.apache.camel.Message;
 import org.apache.camel.Processor;
 import org.apache.camel.util.FileUtil;
 import org.apache.camel.util.ObjectHelper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * File consumer.
  */
 public class FileConsumer extends GenericFileConsumer<File> {
 
+    private static final Logger LOG = LoggerFactory.getLogger(FileConsumer.class);
     private String endpointPath;
     private Set<String> extendedAttributes;
 
@@ -54,33 +57,33 @@ public class FileConsumer extends GenericFileConsumer<File> {
 
     @Override
     protected boolean pollDirectory(String fileName, List<GenericFile<File>> fileList, int depth) {
-        log.trace("pollDirectory from fileName: {}", fileName);
+        LOG.trace("pollDirectory from fileName: {}", fileName);
 
         depth++;
 
         File directory = new File(fileName);
         if (!directory.exists() || !directory.isDirectory()) {
-            log.debug("Cannot poll as directory does not exists or its not a directory: {}", directory);
+            LOG.debug("Cannot poll as directory does not exists or its not a directory: {}", directory);
             if (getEndpoint().isDirectoryMustExist()) {
                 throw new GenericFileOperationFailedException("Directory does not exist: " + directory);
             }
             return true;
         }
 
-        if (log.isTraceEnabled()) {
-            log.trace("Polling directory: {}, absolute path: {}", directory.getPath(), directory.getAbsolutePath());
+        if (LOG.isTraceEnabled()) {
+            LOG.trace("Polling directory: {}, absolute path: {}", directory.getPath(), directory.getAbsolutePath());
         }
         File[] dirFiles = directory.listFiles();
         if (dirFiles == null || dirFiles.length == 0) {
             // no files in this directory to poll
-            if (log.isTraceEnabled()) {
-                log.trace("No files found in directory: {}", directory.getPath());
+            if (LOG.isTraceEnabled()) {
+                LOG.trace("No files found in directory: {}", directory.getPath());
             }
             return true;
         } else {
             // we found some files
-            if (log.isTraceEnabled()) {
-                log.trace("Found {} in directory: {}", dirFiles.length, directory.getPath());
+            if (LOG.isTraceEnabled()) {
+                LOG.trace("Found {} in directory: {}", dirFiles.length, directory.getPath());
             }
         }
         List<File> files = Arrays.asList(dirFiles);
@@ -95,8 +98,8 @@ public class FileConsumer extends GenericFileConsumer<File> {
             }
 
             // trace log as Windows/Unix can have different views what the file is?
-            if (log.isTraceEnabled()) {
-                log.trace("Found file: {} [isAbsolute: {}, isDirectory: {}, isFile: {}, isHidden: {}]",
+            if (LOG.isTraceEnabled()) {
+                LOG.trace("Found file: {} [isAbsolute: {}, isDirectory: {}, isFile: {}, isHidden: {}]",
                         file, file.isAbsolute(), file.isDirectory(), file.isFile(), file.isHidden());
             }
 
@@ -115,7 +118,7 @@ public class FileConsumer extends GenericFileConsumer<File> {
             } else {
                 // Windows can report false to a file on a share so regard it always as a file (if its not a directory)
                 if (depth >= endpoint.minDepth && isValidFile(gf, false, files)) {
-                    log.trace("Adding valid file: {}", file);
+                    LOG.trace("Adding valid file: {}", file);
                     // matched file so add
                     if (extendedAttributes != null) {
                         Path path = file.toPath();
@@ -142,8 +145,8 @@ public class FileConsumer extends GenericFileConsumer<File> {
                                     allAttributes.put(attribute, Files.getAttribute(path, attribute));
                                 }
                             } catch (IOException e) {
-                                if (log.isDebugEnabled()) {
-                                    log.debug("Unable to read attribute {} on file {}", attribute, file, e);
+                                if (LOG.isDebugEnabled()) {
+                                    LOG.debug("Unable to read attribute {} on file {}", attribute, file, e);
                                 }
                             }
                         }
@@ -169,7 +172,7 @@ public class FileConsumer extends GenericFileConsumer<File> {
                 return true;
             }
         }
-        log.trace("Done file: {} does not exist", doneFileName);
+        LOG.trace("Done file: {} does not exist", doneFileName);
         return false;
     }
 
diff --git a/components/camel-file/src/main/java/org/apache/camel/component/file/FileEndpoint.java b/components/camel-file/src/main/java/org/apache/camel/component/file/FileEndpoint.java
index 95a0ef1..322a6d3 100644
--- a/components/camel-file/src/main/java/org/apache/camel/component/file/FileEndpoint.java
+++ b/components/camel-file/src/main/java/org/apache/camel/component/file/FileEndpoint.java
@@ -38,6 +38,8 @@ import org.apache.camel.spi.UriPath;
 import org.apache.camel.support.processor.idempotent.MemoryIdempotentRepository;
 import org.apache.camel.util.FileUtil;
 import org.apache.camel.util.ObjectHelper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * The file component is used for reading or writing files.
@@ -45,6 +47,8 @@ import org.apache.camel.util.ObjectHelper;
 @UriEndpoint(firstVersion = "1.0.0", scheme = "file", title = "File", syntax = "file:directoryName", label = "core,file")
 public class FileEndpoint extends GenericFileEndpoint<File> {
 
+    private static final Logger LOG = LoggerFactory.getLogger(FileEndpoint.class);
+
     private static final Integer CHMOD_WRITE_MASK = 02;
     private static final Integer CHMOD_READ_MASK = 04;
     private static final Integer CHMOD_EXECUTE_MASK = 01;
@@ -89,11 +93,11 @@ public class FileEndpoint extends GenericFileEndpoint<File> {
         // auto create starting directory if needed
         if (!file.exists() && !file.isDirectory()) {
             if (isAutoCreate()) {
-                log.debug("Creating non existing starting directory: {}", file);
+                LOG.debug("Creating non existing starting directory: {}", file);
                 boolean absolute = FileUtil.isAbsolute(file);
                 boolean created = operations.buildDirectory(file.getPath(), absolute);
                 if (!created) {
-                    log.warn("Cannot auto create starting directory: {}", file);
+                    LOG.warn("Cannot auto create starting directory: {}", file);
                 }
             } else if (isStartingDirectoryMustExist()) {
                 throw new FileNotFoundException("Starting directory does not exist: " + file);
@@ -114,13 +118,13 @@ public class FileEndpoint extends GenericFileEndpoint<File> {
 
         // if noop=true then idempotent should also be configured
         if (isNoop() && !isIdempotentSet()) {
-            log.info("Endpoint is configured with noop=true so forcing endpoint to be idempotent as well");
+            LOG.info("Endpoint is configured with noop=true so forcing endpoint to be idempotent as well");
             setIdempotent(true);
         }
 
         // if idempotent and no repository set then create a default one
         if (isIdempotentSet() && isIdempotent() && idempotentRepository == null) {
-            log.info("Using default memory based idempotent repository with cache max size: {}", DEFAULT_IDEMPOTENT_CACHE_SIZE);
+            LOG.info("Using default memory based idempotent repository with cache max size: {}", DEFAULT_IDEMPOTENT_CACHE_SIZE);
             idempotentRepository = MemoryIdempotentRepository.memoryIdempotentRepository(DEFAULT_IDEMPOTENT_CACHE_SIZE);
         }
 
@@ -147,8 +151,8 @@ public class FileEndpoint extends GenericFileEndpoint<File> {
         ObjectHelper.notNull(operations, "operations");
         ObjectHelper.notNull(file, "file");
 
-        if (log.isDebugEnabled()) {
-            log.debug("Creating GenericFilePollingConsumer with queueSize: {} blockWhenFull: {} blockTimeout: {}",
+        if (LOG.isDebugEnabled()) {
+            LOG.debug("Creating GenericFilePollingConsumer with queueSize: {} blockWhenFull: {} blockTimeout: {}",
                 getPollingConsumerQueueSize(), isPollingConsumerBlockWhenFull(), getPollingConsumerBlockTimeout());
         }
         GenericFilePollingConsumer result = new GenericFilePollingConsumer(this);
diff --git a/components/camel-file/src/main/java/org/apache/camel/component/file/GenericFileConsumer.java b/components/camel-file/src/main/java/org/apache/camel/component/file/GenericFileConsumer.java
index 56d52ff..2f2cd01 100644
--- a/components/camel-file/src/main/java/org/apache/camel/component/file/GenericFileConsumer.java
+++ b/components/camel-file/src/main/java/org/apache/camel/component/file/GenericFileConsumer.java
@@ -38,12 +38,16 @@ import org.apache.camel.util.CastUtils;
 import org.apache.camel.util.StopWatch;
 import org.apache.camel.util.StringHelper;
 import org.apache.camel.util.TimeUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Base class for file consumers.
  */
 public abstract class GenericFileConsumer<T> extends ScheduledBatchPollingConsumer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(GenericFileConsumer.class);
+
     protected GenericFileEndpoint<T> endpoint;
     protected GenericFileOperations<T> operations;
     protected GenericFileProcessStrategy<T> processStrategy;
@@ -113,7 +117,7 @@ public abstract class GenericFileConsumer<T> extends ScheduledBatchPollingConsum
         // before we poll is there anything we need to check?
         // such as are we connected to the FTP Server still?
         if (!prePollCheck()) {
-            log.debug("Skipping poll as pre poll check returned false");
+            LOG.debug("Skipping poll as pre poll check returned false");
             return 0;
         }
 
@@ -129,19 +133,19 @@ public abstract class GenericFileConsumer<T> extends ScheduledBatchPollingConsum
         } catch (Exception e) {
             // during poll directory we add files to the in progress repository, in case of any exception thrown after this work
             // we must then drain the in progress files before rethrowing the exception
-            log.debug("Error occurred during poll directory: {} due {}. Removing {} files marked as in-progress.", name, e.getMessage(), files.size());
+            LOG.debug("Error occurred during poll directory: {} due {}. Removing {} files marked as in-progress.", name, e.getMessage(), files.size());
             removeExcessiveInProgressFiles(files);
             throw e;
         }
 
         long delta = stop.taken();
-        if (log.isDebugEnabled()) {
-            log.debug("Took {} to poll: {}", TimeUtils.printDuration(delta), name);
+        if (LOG.isDebugEnabled()) {
+            LOG.debug("Took {} to poll: {}", TimeUtils.printDuration(delta), name);
         }
 
         // log if we hit the limit
         if (limitHit) {
-            log.debug("Limiting maximum messages to poll at {} files as there were more messages in this poll.", maxMessagesPerPoll);
+            LOG.debug("Limiting maximum messages to poll at {} files as there were more messages in this poll.", maxMessagesPerPoll);
         }
 
         // sort files using file comparator if provided
@@ -172,7 +176,7 @@ public abstract class GenericFileConsumer<T> extends ScheduledBatchPollingConsum
         // we are not eager limiting, but we have configured a limit, so cut the list of files
         if (!eagerLimitMaxMessagesPerPoll && maxMessagesPerPoll > 0) {
             if (files.size() > maxMessagesPerPoll) {
-                log.debug("Limiting maximum messages to poll at {} files as there were more messages in this poll.", maxMessagesPerPoll);
+                LOG.debug("Limiting maximum messages to poll at {} files as there were more messages in this poll.", maxMessagesPerPoll);
                 // must first remove excessive files from the in progress repository
                 removeExcessiveInProgressFiles(q, maxMessagesPerPoll);
             }
@@ -181,7 +185,7 @@ public abstract class GenericFileConsumer<T> extends ScheduledBatchPollingConsum
         // consume files one by one
         int total = exchanges.size();
         if (total > 0) {
-            log.debug("Total {} files to consume", total);
+            LOG.debug("Total {} files to consume", total);
         }
 
         int polledMessages = processBatch(CastUtils.cast(q));
@@ -198,7 +202,7 @@ public abstract class GenericFileConsumer<T> extends ScheduledBatchPollingConsum
 
         // limit if needed
         if (maxMessagesPerPoll > 0 && total > maxMessagesPerPoll) {
-            log.debug("Limiting to maximum messages to poll {} as there were {} messages in this poll.", maxMessagesPerPoll, total);
+            LOG.debug("Limiting to maximum messages to poll {} as there were {} messages in this poll.", maxMessagesPerPoll, total);
             total = maxMessagesPerPoll;
         }
 
@@ -350,7 +354,7 @@ public abstract class GenericFileConsumer<T> extends ScheduledBatchPollingConsum
      */
     protected boolean processExchange(final Exchange exchange) {
         GenericFile<T> file = getExchangeFileProperty(exchange);
-        log.trace("Processing file: {}", file);
+        LOG.trace("Processing file: {}", file);
 
         // must extract the absolute name before the begin strategy as the file could potentially be pre moved
         // and then the file name would be changed
@@ -368,7 +372,7 @@ public abstract class GenericFileConsumer<T> extends ScheduledBatchPollingConsum
         if (!begin) {
             // no something was wrong, so we need to abort and remove the file from the in progress list
             Exception abortCause = null;
-            log.debug("{} cannot begin processing file: {}", endpoint, file);
+            LOG.debug("{} cannot begin processing file: {}", endpoint, file);
             try {
                 // abort
                 processStrategy.abort(operations, endpoint, exchange, file);
@@ -403,7 +407,7 @@ public abstract class GenericFileConsumer<T> extends ScheduledBatchPollingConsum
             
             if (isRetrieveFile()) {
                 // retrieve the file using the stream
-                log.trace("Retrieving file: {} from: {}", name, endpoint);
+                LOG.trace("Retrieving file: {} from: {}", name, endpoint);
     
                 // retrieve the file and check it was a success
                 boolean retrieved;
@@ -417,7 +421,7 @@ public abstract class GenericFileConsumer<T> extends ScheduledBatchPollingConsum
 
                 if (!retrieved) {
                     if (ignoreCannotRetrieveFile(name, exchange, cause)) {
-                        log.trace("Cannot retrieve file {} maybe it does not exists. Ignoring.", name);
+                        LOG.trace("Cannot retrieve file {} maybe it does not exists. Ignoring.", name);
                         // remove file from the in progress list as we could not retrieve it, but should ignore
                         endpoint.getInProgressRepository().remove(absoluteFileName);
                         return false;
@@ -433,9 +437,9 @@ public abstract class GenericFileConsumer<T> extends ScheduledBatchPollingConsum
                     }
                 }
     
-                log.trace("Retrieved file: {} from: {}", name, endpoint);                
+                LOG.trace("Retrieved file: {} from: {}", name, endpoint);
             } else {
-                log.trace("Skipped retrieval of file: {} from: {}", name, endpoint);
+                LOG.trace("Skipped retrieval of file: {} from: {}", name, endpoint);
                 exchange.getIn().setBody(null);
             }
 
@@ -443,7 +447,7 @@ public abstract class GenericFileConsumer<T> extends ScheduledBatchPollingConsum
             // (for instance to move the file after we have processed it)
             exchange.adapt(ExtendedExchange.class).addOnCompletion(new GenericFileOnCompletion<>(endpoint, operations, processStrategy, target, absoluteFileName));
 
-            log.debug("About to process file: {} using exchange: {}", target, exchange);
+            LOG.debug("About to process file: {} using exchange: {}", target, exchange);
 
             if (endpoint.isSynchronous()) {
                 // process synchronously
@@ -495,7 +499,7 @@ public abstract class GenericFileConsumer<T> extends ScheduledBatchPollingConsum
      */
     protected boolean customProcessExchange(final Exchange exchange, final Processor processor) {
         GenericFile<T> file = getExchangeFileProperty(exchange);
-        log.trace("Custom processing file: {}", file);
+        LOG.trace("Custom processing file: {}", file);
 
         // must extract the absolute name before the begin strategy as the file could potentially be pre moved
         // and then the file name would be changed
@@ -505,8 +509,8 @@ public abstract class GenericFileConsumer<T> extends ScheduledBatchPollingConsum
             // process using the custom processor
             processor.process(exchange);
         } catch (Exception e) {
-            if (log.isDebugEnabled()) {
-                log.debug(endpoint + " error custom processing: " + file + " due to: " + e.getMessage() + ". This exception will be ignored.", e);
+            if (LOG.isDebugEnabled()) {
+                LOG.debug(endpoint + " error custom processing: " + file + " due to: " + e.getMessage() + ". This exception will be ignored.", e);
             }
             handleException(e);
         } finally {
@@ -531,7 +535,7 @@ public abstract class GenericFileConsumer<T> extends ScheduledBatchPollingConsum
         String absoluteFilePath = file.getAbsoluteFilePath();
 
         if (!isMatched(file, isDirectory, files)) {
-            log.trace("File did not match. Will skip this file: {}", file);
+            LOG.trace("File did not match. Will skip this file: {}", file);
             return false;
         }
 
@@ -542,8 +546,8 @@ public abstract class GenericFileConsumer<T> extends ScheduledBatchPollingConsum
 
         // check if file is already in progress
         if (endpoint.getInProgressRepository().contains(absoluteFilePath)) {
-            if (log.isTraceEnabled()) {
-                log.trace("Skipping as file is already in progress: {}", file.getFileName());
+            if (LOG.isTraceEnabled()) {
+                LOG.trace("Skipping as file is already in progress: {}", file.getFileName());
             }
             return false;
         }
@@ -555,10 +559,10 @@ public abstract class GenericFileConsumer<T> extends ScheduledBatchPollingConsum
             if (endpoint.getIdempotentKey() != null) {
                 Exchange dummy = endpoint.createExchange(file);
                 key = endpoint.getIdempotentKey().evaluate(dummy, String.class);
-                log.trace("Evaluated idempotentKey: {} for file: {}", key, file);
+                LOG.trace("Evaluated idempotentKey: {} for file: {}", key, file);
             }
             if (key != null && endpoint.getIdempotentRepository().contains(key)) {
-                log.trace("This consumer is idempotent and the file has been consumed before matching idempotentKey: {}. Will skip this file: {}", key, file);
+                LOG.trace("This consumer is idempotent and the file has been consumed before matching idempotentKey: {}. Will skip this file: {}", key, file);
                 return false;
             }
         }
@@ -661,7 +665,7 @@ public abstract class GenericFileConsumer<T> extends ScheduledBatchPollingConsum
 
             // is it a done file name?
             if (endpoint.isDoneFile(file.getFileNameOnly())) {
-                log.trace("Skipping done file: {}", file);
+                LOG.trace("Skipping done file: {}", file);
                 return false;
             }
 
diff --git a/components/camel-file/src/main/java/org/apache/camel/component/file/GenericFileEndpoint.java b/components/camel-file/src/main/java/org/apache/camel/component/file/GenericFileEndpoint.java
index d47c718..bf26427 100644
--- a/components/camel-file/src/main/java/org/apache/camel/component/file/GenericFileEndpoint.java
+++ b/components/camel-file/src/main/java/org/apache/camel/component/file/GenericFileEndpoint.java
@@ -60,12 +60,12 @@ import org.slf4j.LoggerFactory;
  */
 public abstract class GenericFileEndpoint<T> extends ScheduledPollEndpoint implements BrowsableEndpoint {
 
+    private static final Logger LOG = LoggerFactory.getLogger(GenericFileEndpoint.class);
+
     protected static final String DEFAULT_STRATEGYFACTORY_CLASS = "org.apache.camel.component.file.strategy.GenericFileProcessStrategyFactory";
     protected static final int DEFAULT_IDEMPOTENT_CACHE_SIZE = 1000;
     protected static final int DEFAULT_IN_PROGRESS_CACHE_SIZE = 50000;
 
-    protected final Logger log = LoggerFactory.getLogger(getClass());
-
     // common options
 
     @UriParam(label = "advanced", defaultValue = "true")
@@ -261,7 +261,7 @@ public abstract class GenericFileEndpoint<T> extends ScheduledPollEndpoint imple
             try {
                 ServiceHelper.stopService(consumer);
             } catch (Exception e) {
-                log.debug("Error stopping consumer used for browsing exchanges. This exception will be ignored", e);
+                LOG.debug("Error stopping consumer used for browsing exchanges. This exception will be ignored", e);
             }
         }
 
@@ -276,29 +276,29 @@ public abstract class GenericFileEndpoint<T> extends ScheduledPollEndpoint imple
         Class<?> factory = null;
         try {
             FactoryFinder finder = getCamelContext().adapt(ExtendedCamelContext.class).getFactoryFinder("META-INF/services/org/apache/camel/component/");
-            log.trace("Using FactoryFinder: {}", finder);
+            LOG.trace("Using FactoryFinder: {}", finder);
             factory = finder.findClass(getScheme(), "strategy.factory.", CamelContext.class).orElse(null);
         } catch (IOException e) {
-            log.trace("No strategy factory defined in 'META-INF/services/org/apache/camel/component/'", e);
+            LOG.trace("No strategy factory defined in 'META-INF/services/org/apache/camel/component/'", e);
         }
 
         if (factory == null) {
             // use default
             try {
-                log.trace("Using ClassResolver to resolve class: {}", DEFAULT_STRATEGYFACTORY_CLASS);
+                LOG.trace("Using ClassResolver to resolve class: {}", DEFAULT_STRATEGYFACTORY_CLASS);
                 factory = this.getCamelContext().getClassResolver().resolveClass(DEFAULT_STRATEGYFACTORY_CLASS);
             } catch (Exception e) {
-                log.trace("Cannot load class: {}", DEFAULT_STRATEGYFACTORY_CLASS, e);
+                LOG.trace("Cannot load class: {}", DEFAULT_STRATEGYFACTORY_CLASS, e);
             }
             // fallback and us this class loader
             try {
-                if (log.isTraceEnabled()) {
-                    log.trace("Using classloader: {} to resolve class: {}", this.getClass().getClassLoader(), DEFAULT_STRATEGYFACTORY_CLASS);
+                if (LOG.isTraceEnabled()) {
+                    LOG.trace("Using classloader: {} to resolve class: {}", this.getClass().getClassLoader(), DEFAULT_STRATEGYFACTORY_CLASS);
                 }
                 factory = this.getCamelContext().getClassResolver().resolveClass(DEFAULT_STRATEGYFACTORY_CLASS, this.getClass().getClassLoader());
             } catch (Exception e) {
-                if (log.isTraceEnabled()) {
-                    log.trace("Cannot load class: {} using classloader: " + this.getClass().getClassLoader(), DEFAULT_STRATEGYFACTORY_CLASS, e);
+                if (LOG.isTraceEnabled()) {
+                    LOG.trace("Cannot load class: {} using classloader: " + this.getClass().getClassLoader(), DEFAULT_STRATEGYFACTORY_CLASS, e);
                 }
             }
 
@@ -310,7 +310,7 @@ public abstract class GenericFileEndpoint<T> extends ScheduledPollEndpoint imple
         try {
             Method factoryMethod = factory.getMethod("createGenericFileProcessStrategy", CamelContext.class, Map.class);
             Map<String, Object> params = getParamsAsMap();
-            log.debug("Parameters for Generic file process strategy {}", params);
+            LOG.debug("Parameters for Generic file process strategy {}", params);
             return (GenericFileProcessStrategy<T>) ObjectHelper.invokeMethod(factoryMethod, null, getCamelContext(), params);
         } catch (NoSuchMethodException e) {
             throw new TypeNotPresentException(factory.getSimpleName() + ".createGenericFileProcessStrategy method not found", e);
diff --git a/components/camel-file/src/main/java/org/apache/camel/component/file/GenericFileOnCompletion.java b/components/camel-file/src/main/java/org/apache/camel/component/file/GenericFileOnCompletion.java
index 44c527e..e74e744 100644
--- a/components/camel-file/src/main/java/org/apache/camel/component/file/GenericFileOnCompletion.java
+++ b/components/camel-file/src/main/java/org/apache/camel/component/file/GenericFileOnCompletion.java
@@ -32,7 +32,7 @@ import org.slf4j.LoggerFactory;
  */
 public class GenericFileOnCompletion<T> implements Synchronization {
 
-    private final Logger log = LoggerFactory.getLogger(GenericFileOnCompletion.class);
+    private static final Logger LOG = LoggerFactory.getLogger(GenericFileOnCompletion.class);
     private GenericFileEndpoint<T> endpoint;
     private GenericFileOperations<T> operations;
     private GenericFileProcessStrategy<T> processStrategy;
@@ -72,7 +72,7 @@ public class GenericFileOnCompletion<T> implements Synchronization {
     }
 
     protected void onCompletion(Exchange exchange) {
-        log.debug("Done processing file: {} using exchange: {}", file, exchange);
+        LOG.debug("Done processing file: {} using exchange: {}", file, exchange);
 
         // commit or rollback
         boolean committed = false;
@@ -123,7 +123,7 @@ public class GenericFileOnCompletion<T> implements Synchronization {
         handleDoneFile(exchange);
 
         try {
-            log.trace("Commit file strategy: {} for file: {}", processStrategy, file);
+            LOG.trace("Commit file strategy: {} for file: {}", processStrategy, file);
             processStrategy.commit(operations, endpoint, exchange, file);
         } catch (Exception e) {
             handleException("Error during commit", exchange, e);
@@ -140,8 +140,8 @@ public class GenericFileOnCompletion<T> implements Synchronization {
     protected void processStrategyRollback(GenericFileProcessStrategy<T> processStrategy,
                                            Exchange exchange, GenericFile<T> file) {
 
-        if (log.isWarnEnabled()) {
-            log.warn("Rollback file strategy: {} for file: {}", processStrategy, file);
+        if (LOG.isWarnEnabled()) {
+            LOG.warn("Rollback file strategy: {} for file: {}", processStrategy, file);
         }
 
         // only delete done file if moveFailed option is enabled, as otherwise on rollback,
@@ -170,9 +170,9 @@ public class GenericFileOnCompletion<T> implements Synchronization {
                 try {
                     // delete done file
                     boolean deleted = operations.deleteFile(doneFileName);
-                    log.trace("Done file: {} was deleted: {}", doneFileName, deleted);
+                    LOG.trace("Done file: {} was deleted: {}", doneFileName, deleted);
                     if (!deleted) {
-                        log.warn("Done file: {} could not be deleted", doneFileName);
+                        LOG.warn("Done file: {} could not be deleted", doneFileName);
                     }
                 } catch (Exception e) {
                     handleException("Error deleting done file: " + doneFileName, exchange, e);
diff --git a/components/camel-file/src/main/java/org/apache/camel/component/file/GenericFilePollingConsumer.java b/components/camel-file/src/main/java/org/apache/camel/component/file/GenericFilePollingConsumer.java
index dffa628..084c35b 100644
--- a/components/camel-file/src/main/java/org/apache/camel/component/file/GenericFilePollingConsumer.java
+++ b/components/camel-file/src/main/java/org/apache/camel/component/file/GenericFilePollingConsumer.java
@@ -21,12 +21,14 @@ import org.apache.camel.Exchange;
 import org.apache.camel.RuntimeCamelException;
 import org.apache.camel.spi.PollingConsumerPollStrategy;
 import org.apache.camel.support.EventDrivenPollingConsumer;
-import org.apache.camel.support.ScheduledBatchPollingConsumer;
 import org.apache.camel.support.service.ServiceHelper;
 import org.apache.camel.util.StopWatch;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class GenericFilePollingConsumer extends EventDrivenPollingConsumer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(GenericFilePollingConsumer.class);
     private final long delay;
 
     public GenericFilePollingConsumer(GenericFileEndpoint endpoint) throws Exception {
@@ -72,8 +74,8 @@ public class GenericFilePollingConsumer extends EventDrivenPollingConsumer {
 
     @Override
     public Exchange receiveNoWait() {
-        if (log.isTraceEnabled()) {
-            log.trace("receiveNoWait polling file: {}", getConsumer().getEndpoint());
+        if (LOG.isTraceEnabled()) {
+            LOG.trace("receiveNoWait polling file: {}", getConsumer().getEndpoint());
         }
         int polled = doReceive(0);
         if (polled > 0) {
@@ -85,8 +87,8 @@ public class GenericFilePollingConsumer extends EventDrivenPollingConsumer {
 
     @Override
     public Exchange receive() {
-        if (log.isTraceEnabled()) {
-            log.trace("receive polling file: {}", getConsumer().getEndpoint());
+        if (LOG.isTraceEnabled()) {
+            LOG.trace("receive polling file: {}", getConsumer().getEndpoint());
         }
         int polled = doReceive(Long.MAX_VALUE);
         if (polled > 0) {
@@ -98,8 +100,8 @@ public class GenericFilePollingConsumer extends EventDrivenPollingConsumer {
 
     @Override
     public Exchange receive(long timeout) {
-        if (log.isTraceEnabled()) {
-            log.trace("receive({}) polling file: {}", timeout, getConsumer().getEndpoint());
+        if (LOG.isTraceEnabled()) {
+            LOG.trace("receive({}) polling file: {}", timeout, getConsumer().getEndpoint());
         }
         int polled = doReceive(timeout);
         if (polled > 0) {
@@ -126,9 +128,9 @@ public class GenericFilePollingConsumer extends EventDrivenPollingConsumer {
                 if (isRunAllowed()) {
 
                     if (retryCounter == -1) {
-                        log.trace("Starting to poll: {}", this.getEndpoint());
+                        LOG.trace("Starting to poll: {}", this.getEndpoint());
                     } else {
-                        log.debug("Retrying attempt {} to poll: {}", retryCounter, this.getEndpoint());
+                        LOG.debug("Retrying attempt {} to poll: {}", retryCounter, this.getEndpoint());
                     }
 
                     // mark we are polling which should also include the begin/poll/commit
@@ -136,7 +138,7 @@ public class GenericFilePollingConsumer extends EventDrivenPollingConsumer {
                     if (begin) {
                         retryCounter++;
                         polledMessages = getConsumer().poll();
-                        log.trace("Polled {} messages", polledMessages);
+                        LOG.trace("Polled {} messages", polledMessages);
 
                         if (polledMessages == 0 && sendEmptyMessageWhenIdle) {
                             // send an "empty" exchange
@@ -148,11 +150,11 @@ public class GenericFilePollingConsumer extends EventDrivenPollingConsumer {
 
                         pollStrategy.commit(getConsumer(), getEndpoint(), polledMessages);
                     } else {
-                        log.debug("Cannot begin polling as pollStrategy returned false: {}", pollStrategy);
+                        LOG.debug("Cannot begin polling as pollStrategy returned false: {}", pollStrategy);
                     }
                 }
 
-                log.trace("Finished polling: {}", this.getEndpoint());
+                LOG.trace("Finished polling: {}", this.getEndpoint());
             } catch (Exception e) {
                 try {
                     boolean retry = pollStrategy.rollback(getConsumer(), getEndpoint(), retryCounter, e);
@@ -201,7 +203,7 @@ public class GenericFilePollingConsumer extends EventDrivenPollingConsumer {
     public void process(Exchange exchange) throws Exception {
         Object name = exchange.getIn().getHeader(Exchange.FILE_NAME);
         if (name != null) {
-            log.debug("Received file: {}", name);
+            LOG.debug("Received file: {}", name);
         }
         super.process(exchange);
     }
@@ -213,7 +215,7 @@ public class GenericFilePollingConsumer extends EventDrivenPollingConsumer {
      */
     protected void processEmptyMessage() throws Exception {
         Exchange exchange = getEndpoint().createExchange();
-        log.debug("Sending empty message as there were no messages from polling: {}", this.getEndpoint());
+        LOG.debug("Sending empty message as there were no messages from polling: {}", this.getEndpoint());
         process(exchange);
     }
 
@@ -221,7 +223,7 @@ public class GenericFilePollingConsumer extends EventDrivenPollingConsumer {
         if (delay <= 0) {
             return;
         }
-        log.trace("Sleeping for: {} millis", delay);
+        LOG.trace("Sleeping for: {} millis", delay);
         Thread.sleep(delay);
     }
 
diff --git a/components/camel-file/src/main/java/org/apache/camel/component/file/GenericFileProducer.java b/components/camel-file/src/main/java/org/apache/camel/component/file/GenericFileProducer.java
index 8e187db..97c3e60 100644
--- a/components/camel-file/src/main/java/org/apache/camel/component/file/GenericFileProducer.java
+++ b/components/camel-file/src/main/java/org/apache/camel/component/file/GenericFileProducer.java
@@ -38,7 +38,8 @@ import org.slf4j.LoggerFactory;
  * Generic file producer
  */
 public class GenericFileProducer<T> extends DefaultProducer {
-    protected final Logger log = LoggerFactory.getLogger(getClass());
+    private static final Logger LOG = LoggerFactory.getLogger(GenericFileProducer.class);
+
     protected final GenericFileEndpoint<T> endpoint;
     protected GenericFileOperations<T> operations;
     // assume writing to 100 different files concurrently at most for the same file producer
@@ -103,7 +104,7 @@ public class GenericFileProducer<T> extends DefaultProducer {
      * @throws Exception is thrown if some error
      */
     protected void processExchange(Exchange exchange, String target) throws Exception {
-        log.trace("Processing file: {} for exchange: {}", target, exchange);
+        LOG.trace("Processing file: {} for exchange: {}", target, exchange);
 
         try {
             preWriteCheck(exchange);
@@ -117,7 +118,7 @@ public class GenericFileProducer<T> extends DefaultProducer {
                 // compute temporary name with the temp prefix
                 tempTarget = createTempFileName(exchange, target);
 
-                log.trace("Writing using tempNameFile: {}", tempTarget);
+                LOG.trace("Writing using tempNameFile: {}", tempTarget);
                
                 //if we should eager delete target file before deploying temporary file
                 if (endpoint.getFileExist() != GenericFileExist.TryRename && endpoint.isEagerDeleteTargetFile()) {
@@ -129,11 +130,11 @@ public class GenericFileProducer<T> extends DefaultProducer {
                     targetExists = operations.existsFile(target);
                     if (targetExists) {
                         
-                        log.trace("EagerDeleteTargetFile, target exists");
+                        LOG.trace("EagerDeleteTargetFile, target exists");
                         
                         if (endpoint.getFileExist() == GenericFileExist.Ignore) {
                             // ignore but indicate that the file was written
-                            log.trace("An existing file already exists: {}. Ignore and do not override it.", target);
+                            LOG.trace("An existing file already exists: {}. Ignore and do not override it.", target);
                             return;
                         } else if (endpoint.getFileExist() == GenericFileExist.Fail) {
                             throw new GenericFileOperationFailedException("File already exist: " + target + ". Cannot write new file.");
@@ -143,7 +144,7 @@ public class GenericFileProducer<T> extends DefaultProducer {
                         } else if (endpoint.isEagerDeleteTargetFile() && endpoint.getFileExist() == GenericFileExist.Override) {
                             // we override the target so we do this by deleting it so the temp file can be renamed later
                             // with success as the existing target file have been deleted
-                            log.trace("Eagerly deleting existing file: {}", target);
+                            LOG.trace("Eagerly deleting existing file: {}", target);
                             if (!operations.deleteFile(target)) {
                                 throw new GenericFileOperationFailedException("Cannot delete file: " + target);
                             }
@@ -153,7 +154,7 @@ public class GenericFileProducer<T> extends DefaultProducer {
 
                 // delete any pre existing temp file
                 if (endpoint.getFileExist() != GenericFileExist.TryRename && operations.existsFile(tempTarget)) {
-                    log.trace("Deleting existing temp file: {}", tempTarget);
+                    LOG.trace("Deleting existing temp file: {}", tempTarget);
                     if (!operations.deleteFile(tempTarget)) {
                         throw new GenericFileOperationFailedException("Cannot delete file: " + tempTarget);
                     }
@@ -173,18 +174,18 @@ public class GenericFileProducer<T> extends DefaultProducer {
                     targetExists = operations.existsFile(target);
                     if (targetExists) {
 
-                        log.trace("Not using EagerDeleteTargetFile, target exists");
+                        LOG.trace("Not using EagerDeleteTargetFile, target exists");
 
                         if (endpoint.getFileExist() == GenericFileExist.Ignore) {
                             // ignore but indicate that the file was written
-                            log.trace("An existing file already exists: {}. Ignore and do not override it.", target);
+                            LOG.trace("An existing file already exists: {}. Ignore and do not override it.", target);
                             return;
                         } else if (endpoint.getFileExist() == GenericFileExist.Fail) {
                             throw new GenericFileOperationFailedException("File already exist: " + target + ". Cannot write new file.");
                         } else if (endpoint.getFileExist() == GenericFileExist.Override) {
                             // we override the target so we do this by deleting it so the temp file can be renamed later
                             // with success as the existing target file have been deleted
-                            log.trace("Deleting existing file: {}", target);
+                            LOG.trace("Deleting existing file: {}", target);
                             if (!operations.deleteFile(target)) {
                                 throw new GenericFileOperationFailedException("Cannot delete file: " + target);
                             }
@@ -193,7 +194,7 @@ public class GenericFileProducer<T> extends DefaultProducer {
                 }
 
                 // now we are ready to rename the temp file to the target file
-                log.trace("Renaming file: [{}] to: [{}]", tempTarget, target);
+                LOG.trace("Renaming file: [{}] to: [{}]", tempTarget, target);
                 boolean renamed = operations.renameFile(tempTarget, target);
                 if (!renamed) {
                     throw new GenericFileOperationFailedException("Cannot rename file from: " + tempTarget + " to: " + target);
@@ -209,7 +210,7 @@ public class GenericFileProducer<T> extends DefaultProducer {
                 Exchange empty = new DefaultExchange(exchange);
                 empty.getIn().setBody("");
 
-                log.trace("Writing done file: [{}]", doneFileName);
+                LOG.trace("Writing done file: [{}]", doneFileName);
                 // delete any existing done file
                 if (operations.existsFile(doneFileName)) {
                     if (!operations.deleteFile(doneFileName)) {
@@ -287,21 +288,21 @@ public class GenericFileProducer<T> extends DefaultProducer {
             boolean absolute = FileUtil.isAbsolute(file);
             if (directory != null) {
                 if (!operations.buildDirectory(directory, absolute)) {
-                    log.debug("Cannot build directory [{}] (could be because of denied permissions)", directory);
+                    LOG.debug("Cannot build directory [{}] (could be because of denied permissions)", directory);
                 }
             }
         }
 
         // upload
-        if (log.isTraceEnabled()) {
-            log.trace("About to write [{}] to [{}] from exchange [{}]", fileName, getEndpoint(), exchange);
+        if (LOG.isTraceEnabled()) {
+            LOG.trace("About to write [{}] to [{}] from exchange [{}]", fileName, getEndpoint(), exchange);
         }
 
         boolean success = operations.storeFile(fileName, exchange, -1);
         if (!success) {
             throw new GenericFileOperationFailedException("Error writing file [" + fileName + "]");
         }
-        log.debug("Wrote [{}] to [{}]", fileName, getEndpoint());
+        LOG.debug("Wrote [{}] to [{}]", fileName, getEndpoint());
     }
 
     public String createFileName(Exchange exchange) {
@@ -327,7 +328,7 @@ public class GenericFileProducer<T> extends DefaultProducer {
         }
 
         if (value instanceof String && StringHelper.hasStartToken((String) value, "simple")) {
-            log.warn("Simple expression: {} detected in header: {} of type String. This feature has been removed (see CAMEL-6748).", value, Exchange.FILE_NAME);
+            LOG.warn("Simple expression: {} detected in header: {} of type String. This feature has been removed (see CAMEL-6748).", value, Exchange.FILE_NAME);
         }
 
         // expression support
@@ -339,7 +340,7 @@ public class GenericFileProducer<T> extends DefaultProducer {
         // evaluate the name as a String from the value
         String name;
         if (expression != null) {
-            log.trace("Filename evaluated as expression: {}", expression);
+            LOG.trace("Filename evaluated as expression: {}", expression);
             name = expression.evaluate(exchange, String.class);
         } else {
             name = exchange.getContext().getTypeConverter().convertTo(String.class, exchange, value);
diff --git a/components/camel-file/src/main/java/org/apache/camel/component/file/strategy/FileIdempotentChangedRepositoryReadLockStrategy.java b/components/camel-file/src/main/java/org/apache/camel/component/file/strategy/FileIdempotentChangedRepositoryReadLockStrategy.java
index 9f00e0f..9c83f8f 100644
--- a/components/camel-file/src/main/java/org/apache/camel/component/file/strategy/FileIdempotentChangedRepositoryReadLockStrategy.java
+++ b/components/camel-file/src/main/java/org/apache/camel/component/file/strategy/FileIdempotentChangedRepositoryReadLockStrategy.java
@@ -32,6 +32,8 @@ import org.apache.camel.spi.CamelLogger;
 import org.apache.camel.spi.IdempotentRepository;
 import org.apache.camel.support.service.ServiceSupport;
 import org.apache.camel.util.ObjectHelper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A file read lock that uses an {@link IdempotentRepository} and {@link FileChangedExclusiveReadLockStrategy changed} as the lock strategy.
@@ -40,6 +42,8 @@ import org.apache.camel.util.ObjectHelper;
  */
 public class FileIdempotentChangedRepositoryReadLockStrategy extends ServiceSupport implements GenericFileExclusiveReadLockStrategy<File>, CamelContextAware {
 
+    private static final Logger LOG = LoggerFactory.getLogger(FileIdempotentChangedRepositoryReadLockStrategy.class);
+
     private final FileChangedExclusiveReadLockStrategy changed;
     private GenericFileEndpoint<File> endpoint;
     private LoggingLevel readLockLoggingLevel = LoggingLevel.DEBUG;
@@ -63,7 +67,7 @@ public class FileIdempotentChangedRepositoryReadLockStrategy extends ServiceSupp
     @Override
     public void prepareOnStartup(GenericFileOperations<File> operations, GenericFileEndpoint<File> endpoint) throws Exception {
         this.endpoint = endpoint;
-        log.info("Using FileIdempotentRepositoryReadLockStrategy: {} on endpoint: {}", idempotentRepository, endpoint);
+        LOG.info("Using FileIdempotentRepositoryReadLockStrategy: {} on endpoint: {}", idempotentRepository, endpoint);
 
         changed.prepareOnStartup(operations, endpoint);
     }
@@ -81,7 +85,7 @@ public class FileIdempotentChangedRepositoryReadLockStrategy extends ServiceSupp
         boolean answer = idempotentRepository.add(key);
         if (!answer) {
             // another node is processing the file so skip
-            CamelLogger.log(log, readLockLoggingLevel, "Cannot acquire read lock. Will skip the file: " + file);
+            CamelLogger.log(LOG, readLockLoggingLevel, "Cannot acquire read lock. Will skip the file: " + file);
         }
 
         if (answer) {
@@ -114,15 +118,15 @@ public class FileIdempotentChangedRepositoryReadLockStrategy extends ServiceSupp
             try {
                 changed.releaseExclusiveReadLockOnRollback(operations, file, exchange);
             } catch (Exception e) {
-                log.warn("Error during releasing exclusive readlock on rollback. This exception is ignored.", e);
+                LOG.warn("Error during releasing exclusive readlock on rollback. This exception is ignored.", e);
             }
         };
 
         if (readLockIdempotentReleaseDelay > 0 && readLockIdempotentReleaseExecutorService != null) {
-            log.debug("Scheduling readlock release task to run asynchronous delayed after {} millis", readLockIdempotentReleaseDelay);
+            LOG.debug("Scheduling readlock release task to run asynchronous delayed after {} millis", readLockIdempotentReleaseDelay);
             readLockIdempotentReleaseExecutorService.schedule(r, readLockIdempotentReleaseDelay, TimeUnit.MILLISECONDS);
         } else if (readLockIdempotentReleaseDelay > 0) {
-            log.debug("Delaying readlock release task {} millis", readLockIdempotentReleaseDelay);
+            LOG.debug("Delaying readlock release task {} millis", readLockIdempotentReleaseDelay);
             Thread.sleep(readLockIdempotentReleaseDelay);
             r.run();
         } else {
@@ -144,15 +148,15 @@ public class FileIdempotentChangedRepositoryReadLockStrategy extends ServiceSupp
             try {
                 changed.releaseExclusiveReadLockOnCommit(operations, file, exchange);
             } catch (Exception e) {
-                log.warn("Error during releasing exclusive readlock on rollback. This exception is ignored.", e);
+                LOG.warn("Error during releasing exclusive readlock on rollback. This exception is ignored.", e);
             }
         };
 
         if (readLockIdempotentReleaseDelay > 0 && readLockIdempotentReleaseExecutorService != null) {
-            log.debug("Scheduling readlock release task to run asynchronous delayed after {} millis", readLockIdempotentReleaseDelay);
+            LOG.debug("Scheduling readlock release task to run asynchronous delayed after {} millis", readLockIdempotentReleaseDelay);
             readLockIdempotentReleaseExecutorService.schedule(r, readLockIdempotentReleaseDelay, TimeUnit.MILLISECONDS);
         } else if (readLockIdempotentReleaseDelay > 0) {
-            log.debug("Delaying readlock release task {} millis", readLockIdempotentReleaseDelay);
+            LOG.debug("Delaying readlock release task {} millis", readLockIdempotentReleaseDelay);
             Thread.sleep(readLockIdempotentReleaseDelay);
             r.run();
         } else {
diff --git a/components/camel-file/src/main/java/org/apache/camel/component/file/strategy/FileIdempotentRenameRepositoryReadLockStrategy.java b/components/camel-file/src/main/java/org/apache/camel/component/file/strategy/FileIdempotentRenameRepositoryReadLockStrategy.java
index 6c644f8..b414e79 100644
--- a/components/camel-file/src/main/java/org/apache/camel/component/file/strategy/FileIdempotentRenameRepositoryReadLockStrategy.java
+++ b/components/camel-file/src/main/java/org/apache/camel/component/file/strategy/FileIdempotentRenameRepositoryReadLockStrategy.java
@@ -30,6 +30,8 @@ import org.apache.camel.spi.CamelLogger;
 import org.apache.camel.spi.IdempotentRepository;
 import org.apache.camel.support.service.ServiceSupport;
 import org.apache.camel.util.ObjectHelper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A file read lock that uses an {@link IdempotentRepository} and {@link FileRenameExclusiveReadLockStrategy rename} as the lock strategy.
@@ -38,6 +40,7 @@ import org.apache.camel.util.ObjectHelper;
  */
 public class FileIdempotentRenameRepositoryReadLockStrategy extends ServiceSupport implements GenericFileExclusiveReadLockStrategy<File>, CamelContextAware {
 
+    private static final Logger LOG = LoggerFactory.getLogger(FileIdempotentRenameRepositoryReadLockStrategy.class);
     private final FileRenameExclusiveReadLockStrategy rename;
     private GenericFileEndpoint<File> endpoint;
     private LoggingLevel readLockLoggingLevel = LoggingLevel.DEBUG;
@@ -56,7 +59,7 @@ public class FileIdempotentRenameRepositoryReadLockStrategy extends ServiceSuppo
     @Override
     public void prepareOnStartup(GenericFileOperations<File> operations, GenericFileEndpoint<File> endpoint) throws Exception {
         this.endpoint = endpoint;
-        log.info("Using FileIdempotentRepositoryReadLockStrategy: {} on endpoint: {}", idempotentRepository, endpoint);
+        LOG.info("Using FileIdempotentRepositoryReadLockStrategy: {} on endpoint: {}", idempotentRepository, endpoint);
 
         rename.prepareOnStartup(operations, endpoint);
     }
@@ -74,7 +77,7 @@ public class FileIdempotentRenameRepositoryReadLockStrategy extends ServiceSuppo
         boolean answer = idempotentRepository.add(key);
         if (!answer) {
             // another node is processing the file so skip
-            CamelLogger.log(log, readLockLoggingLevel, "Cannot acquire read lock. Will skip the file: " + file);
+            CamelLogger.log(LOG, readLockLoggingLevel, "Cannot acquire read lock. Will skip the file: " + file);
         }
 
         if (answer) {
diff --git a/components/camel-file/src/main/java/org/apache/camel/component/file/strategy/FileIdempotentRepositoryReadLockStrategy.java b/components/camel-file/src/main/java/org/apache/camel/component/file/strategy/FileIdempotentRepositoryReadLockStrategy.java
index 49e125b..8fbe17a 100644
--- a/components/camel-file/src/main/java/org/apache/camel/component/file/strategy/FileIdempotentRepositoryReadLockStrategy.java
+++ b/components/camel-file/src/main/java/org/apache/camel/component/file/strategy/FileIdempotentRepositoryReadLockStrategy.java
@@ -32,6 +32,8 @@ import org.apache.camel.spi.CamelLogger;
 import org.apache.camel.spi.IdempotentRepository;
 import org.apache.camel.support.service.ServiceSupport;
 import org.apache.camel.util.ObjectHelper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A file read lock that uses an {@link org.apache.camel.spi.IdempotentRepository} as the lock strategy. This allows to plugin and use existing
@@ -40,6 +42,7 @@ import org.apache.camel.util.ObjectHelper;
  */
 public class FileIdempotentRepositoryReadLockStrategy extends ServiceSupport implements GenericFileExclusiveReadLockStrategy<File>, CamelContextAware {
 
+    private static final Logger LOG = LoggerFactory.getLogger(FileIdempotentRepositoryReadLockStrategy.class);
     private GenericFileEndpoint<File> endpoint;
     private LoggingLevel readLockLoggingLevel = LoggingLevel.DEBUG;
     private CamelContext camelContext;
@@ -55,7 +58,7 @@ public class FileIdempotentRepositoryReadLockStrategy extends ServiceSupport imp
     @Override
     public void prepareOnStartup(GenericFileOperations<File> operations, GenericFileEndpoint<File> endpoint) throws Exception {
         this.endpoint = endpoint;
-        log.info("Using FileIdempotentRepositoryReadLockStrategy: {} on endpoint: {}", idempotentRepository, endpoint);
+        LOG.info("Using FileIdempotentRepositoryReadLockStrategy: {} on endpoint: {}", idempotentRepository, endpoint);
     }
 
     @Override
@@ -71,7 +74,7 @@ public class FileIdempotentRepositoryReadLockStrategy extends ServiceSupport imp
         boolean answer = idempotentRepository.add(key);
         if (!answer) {
             // another node is processing the file so skip
-            CamelLogger.log(log, readLockLoggingLevel, "Cannot acquire read lock. Will skip the file: " + file);
+            CamelLogger.log(LOG, readLockLoggingLevel, "Cannot acquire read lock. Will skip the file: " + file);
         }
         return answer;
     }
@@ -94,10 +97,10 @@ public class FileIdempotentRepositoryReadLockStrategy extends ServiceSupport imp
         };
 
         if (readLockIdempotentReleaseDelay > 0 && readLockIdempotentReleaseExecutorService != null) {
-            log.debug("Scheduling readlock release task to run asynchronous delayed after {} millis", readLockIdempotentReleaseDelay);
+            LOG.debug("Scheduling readlock release task to run asynchronous delayed after {} millis", readLockIdempotentReleaseDelay);
             readLockIdempotentReleaseExecutorService.schedule(r, readLockIdempotentReleaseDelay, TimeUnit.MILLISECONDS);
         } else if (readLockIdempotentReleaseDelay > 0) {
-            log.debug("Delaying readlock release task {} millis", readLockIdempotentReleaseDelay);
+            LOG.debug("Delaying readlock release task {} millis", readLockIdempotentReleaseDelay);
             Thread.sleep(readLockIdempotentReleaseDelay);
             r.run();
         } else {
@@ -118,10 +121,10 @@ public class FileIdempotentRepositoryReadLockStrategy extends ServiceSupport imp
         };
 
         if (readLockIdempotentReleaseDelay > 0 && readLockIdempotentReleaseExecutorService != null) {
-            log.debug("Scheduling readlock release task to run asynchronous delayed after {} millis", readLockIdempotentReleaseDelay);
+            LOG.debug("Scheduling readlock release task to run asynchronous delayed after {} millis", readLockIdempotentReleaseDelay);
             readLockIdempotentReleaseExecutorService.schedule(r, readLockIdempotentReleaseDelay, TimeUnit.MILLISECONDS);
         } else if (readLockIdempotentReleaseDelay > 0) {
-            log.debug("Delaying readlock release task {} millis", readLockIdempotentReleaseDelay);
+            LOG.debug("Delaying readlock release task {} millis", readLockIdempotentReleaseDelay);
             Thread.sleep(readLockIdempotentReleaseDelay);
             r.run();
         } else {
diff --git a/components/camel-file/src/main/java/org/apache/camel/component/file/strategy/GenericFileDeleteProcessStrategy.java b/components/camel-file/src/main/java/org/apache/camel/component/file/strategy/GenericFileDeleteProcessStrategy.java
index 14bdf30..ebdcab6 100644
--- a/components/camel-file/src/main/java/org/apache/camel/component/file/strategy/GenericFileDeleteProcessStrategy.java
+++ b/components/camel-file/src/main/java/org/apache/camel/component/file/strategy/GenericFileDeleteProcessStrategy.java
@@ -22,9 +22,13 @@ import org.apache.camel.component.file.GenericFileEndpoint;
 import org.apache.camel.component.file.GenericFileOperationFailedException;
 import org.apache.camel.component.file.GenericFileOperations;
 import org.apache.camel.support.ExchangeHelper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class GenericFileDeleteProcessStrategy<T> extends GenericFileProcessStrategySupport<T> {
 
+    private static final Logger LOG = LoggerFactory.getLogger(GenericFileDeleteProcessStrategy.class);
+
     private GenericFileRenamer<T> failureRenamer;
     private GenericFileRenamer<T> beginRenamer;
 
@@ -81,7 +85,7 @@ public class GenericFileDeleteProcessStrategy<T> extends GenericFileProcessStrat
                 if (!exits) {
                     deleted = true;
                 } else {
-                    log.trace("File was not deleted at this attempt will try again in 1 sec.: {}", file);
+                    LOG.trace("File was not deleted at this attempt will try again in 1 sec.: {}", file);
                     // sleep a bit and try again
                     Thread.sleep(1000);
                 }
diff --git a/components/camel-file/src/main/java/org/apache/camel/component/file/strategy/GenericFileProcessStrategySupport.java b/components/camel-file/src/main/java/org/apache/camel/component/file/strategy/GenericFileProcessStrategySupport.java
index dad767f..36966dc 100644
--- a/components/camel-file/src/main/java/org/apache/camel/component/file/strategy/GenericFileProcessStrategySupport.java
+++ b/components/camel-file/src/main/java/org/apache/camel/component/file/strategy/GenericFileProcessStrategySupport.java
@@ -31,11 +31,16 @@ import org.apache.camel.component.file.GenericFileProcessStrategy;
 import org.apache.camel.support.service.ServiceHelper;
 import org.apache.camel.support.service.ServiceSupport;
 import org.apache.camel.util.FileUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Base class for implementations of {@link GenericFileProcessStrategy}.
  */
 public abstract class GenericFileProcessStrategySupport<T> extends ServiceSupport implements GenericFileProcessStrategy<T>, CamelContextAware {
+
+    private static final Logger LOG = LoggerFactory.getLogger(GenericFileProcessStrategySupport.class);
+
     protected GenericFileExclusiveReadLockStrategy<T> exclusiveReadLockStrategy;
     protected CamelContext camelContext;
 
@@ -126,7 +131,7 @@ public abstract class GenericFileProcessStrategySupport<T> extends ServiceSuppor
             throw new GenericFileOperationFailedException("Cannot create directory: " + to.getParent() + " (could be because of denied permissions)");
         }
 
-        log.debug("Renaming file: {} to: {}", from, to);
+        LOG.debug("Renaming file: {} to: {}", from, to);
         boolean renamed = operations.renameFile(from.getAbsoluteFilePath(), to.getAbsoluteFilePath());
         if (!renamed) {
             throw new GenericFileOperationFailedException("Cannot rename file: " + from + " to: " + to);
@@ -140,7 +145,7 @@ public abstract class GenericFileProcessStrategySupport<T> extends ServiceSuppor
         File local = exchange.getIn().getHeader(Exchange.FILE_LOCAL_WORK_PATH, File.class);
         if (local != null && local.exists()) {
             boolean deleted = FileUtil.deleteFile(local);
-            log.trace("Local work file: {} was deleted: {}", local, deleted);
+            LOG.trace("Local work file: {} was deleted: {}", local, deleted);
         }
     }
 
diff --git a/components/camel-ftp/src/main/java/org/apache/camel/component/file/remote/FtpConsumer.java b/components/camel-ftp/src/main/java/org/apache/camel/component/file/remote/FtpConsumer.java
index a181195..1c6ff32 100644
--- a/components/camel-ftp/src/main/java/org/apache/camel/component/file/remote/FtpConsumer.java
+++ b/components/camel-ftp/src/main/java/org/apache/camel/component/file/remote/FtpConsumer.java
@@ -37,6 +37,8 @@ import org.apache.camel.util.TimeUtils;
 import org.apache.camel.util.URISupport;
 import org.apache.commons.net.ftp.FTPClient;
 import org.apache.commons.net.ftp.FTPFile;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * FTP consumer
@@ -44,6 +46,8 @@ import org.apache.commons.net.ftp.FTPFile;
 @ManagedResource(description = "Managed FtpConsumer")
 public class FtpConsumer extends RemoteFileConsumer<FTPFile> {
 
+    private static final Logger LOG = LoggerFactory.getLogger(FtpConsumer.class);
+
     protected String endpointPath;
 
     private transient String ftpConsumerToString;
@@ -66,13 +70,13 @@ public class FtpConsumer extends RemoteFileConsumer<FTPFile> {
         try {
             super.doStart();
             if (endpoint.isAutoCreate()) {
-                log.debug("Auto creating directory: {}", endpoint.getConfiguration().getDirectory());
+                LOG.debug("Auto creating directory: {}", endpoint.getConfiguration().getDirectory());
                 try {
                     connectIfNecessary();
                     operations.buildDirectory(endpoint.getConfiguration().getDirectory(), true);
                 } catch (GenericFileOperationFailedException e) {
                     // log a WARN as we want to start the consumer.
-                    log.warn("Error auto creating directory: " + endpoint.getConfiguration().getDirectory()
+                    LOG.warn("Error auto creating directory: " + endpoint.getConfiguration().getDirectory()
                         + " due " + e.getMessage() + ". This exception is ignored.", e);
                 }
             }
@@ -114,7 +118,7 @@ public class FtpConsumer extends RemoteFileConsumer<FTPFile> {
 
     @Override
     protected boolean doPollDirectory(String absolutePath, String dirName, List<GenericFile<FTPFile>> fileList, int depth) {
-        log.trace("doPollDirectory from absolutePath: {}, dirName: {}", absolutePath, dirName);
+        LOG.trace("doPollDirectory from absolutePath: {}, dirName: {}", absolutePath, dirName);
 
         depth++;
 
@@ -132,7 +136,7 @@ public class FtpConsumer extends RemoteFileConsumer<FTPFile> {
                 dir = absolutePath;
             }
 
-            log.trace("Polling directory: {}", dir);
+            LOG.trace("Polling directory: {}", dir);
             if (isUseList()) {
                 if (isStepwise()) {
                     files = operations.listFiles();
@@ -153,7 +157,7 @@ public class FtpConsumer extends RemoteFileConsumer<FTPFile> {
             }
         } catch (GenericFileOperationFailedException e) {
             if (ignoreCannotRetrieveFile(null, null, e)) {
-                log.debug("Cannot list files in directory {} due directory does not exists or file permission error.", dir);
+                LOG.debug("Cannot list files in directory {} due directory does not exists or file permission error.", dir);
             } else {
                 throw e;
             }
@@ -161,11 +165,11 @@ public class FtpConsumer extends RemoteFileConsumer<FTPFile> {
 
         if (files == null || files.isEmpty()) {
             // no files in this directory to poll
-            log.trace("No files found in directory: {}", dir);
+            LOG.trace("No files found in directory: {}", dir);
             return true;
         } else {
             // we found some files
-            log.trace("Found {} in directory: {}", files.size(), dir);
+            LOG.trace("Found {} in directory: {}", files.size(), dir);
         }
 
         if (getEndpoint().isPreSort()) {
@@ -174,8 +178,8 @@ public class FtpConsumer extends RemoteFileConsumer<FTPFile> {
 
         for (FTPFile file : files) {
 
-            if (log.isTraceEnabled()) {
-                log.trace("FtpFile[name={}, dir={}, file={}]", file.getName(), file.isDirectory(), file.isFile());
+            if (LOG.isTraceEnabled()) {
+                LOG.trace("FtpFile[name={}, dir={}, file={}]", file.getName(), file.isDirectory(), file.isFile());
             }
 
             // check if we can continue polling in files
@@ -201,7 +205,7 @@ public class FtpConsumer extends RemoteFileConsumer<FTPFile> {
                     fileList.add(remote);
                 }
             } else {
-                log.debug("Ignoring unsupported remote file type: {}", file);
+                LOG.debug("Ignoring unsupported remote file type: {}", file);
             }
         }
 
@@ -218,7 +222,7 @@ public class FtpConsumer extends RemoteFileConsumer<FTPFile> {
             }
         }
 
-        log.trace("Done file: {} does not exist", doneFileName);
+        LOG.trace("Done file: {} does not exist", doneFileName);
         return false;
     }
 
diff --git a/components/camel-ftp/src/main/java/org/apache/camel/component/file/remote/FtpEndpoint.java b/components/camel-ftp/src/main/java/org/apache/camel/component/file/remote/FtpEndpoint.java
index 04ed69d..144578d 100644
--- a/components/camel-ftp/src/main/java/org/apache/camel/component/file/remote/FtpEndpoint.java
+++ b/components/camel-ftp/src/main/java/org/apache/camel/component/file/remote/FtpEndpoint.java
@@ -37,6 +37,8 @@ import org.apache.camel.util.ObjectHelper;
 import org.apache.commons.net.ftp.FTPClient;
 import org.apache.commons.net.ftp.FTPClientConfig;
 import org.apache.commons.net.ftp.FTPFile;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * The ftp component is used for uploading or downloading files from FTP servers.
@@ -47,6 +49,9 @@ import org.apache.commons.net.ftp.FTPFile;
         excludeProperties = "appendChars,readLockIdempotentReleaseAsync,readLockIdempotentReleaseAsyncPoolSize,readLockIdempotentReleaseDelay,readLockIdempotentReleaseExecutorService")
 @ManagedResource(description = "Managed FtpEndpoint")
 public class FtpEndpoint<T extends FTPFile> extends RemoteFileEndpoint<FTPFile> {
+
+    private static final Logger LOG = LoggerFactory.getLogger(FtpEndpoint.class);
+
     protected int soTimeout;
     protected int dataTimeout;
 
@@ -153,7 +158,7 @@ public class FtpEndpoint<T extends FTPFile> extends RemoteFileEndpoint<FTPFile>
             }
             int min = getCamelContext().getTypeConverter().mandatoryConvertTo(int.class, parts[0]);
             int max = getCamelContext().getTypeConverter().mandatoryConvertTo(int.class, parts[1]);
-            log.debug("Using active port range: {}-{}", min, max);
+            LOG.debug("Using active port range: {}-{}", min, max);
             client.setActivePortRange(min, max);
         }
 
@@ -186,8 +191,8 @@ public class FtpEndpoint<T extends FTPFile> extends RemoteFileEndpoint<FTPFile>
             client.setDataTimeout(dataTimeout);
         }
 
-        if (log.isDebugEnabled()) {
-            log.debug("Created FTPClient[connectTimeout: {}, soTimeout: {}, dataTimeout: {}, bufferSize: {}"
+        if (LOG.isDebugEnabled()) {
+            LOG.debug("Created FTPClient[connectTimeout: {}, soTimeout: {}, dataTimeout: {}, bufferSize: {}"
                             + ", receiveDataSocketBufferSize: {}, sendDataSocketBufferSize: {}]: {}",
                     client.getConnectTimeout(), getSoTimeout(), dataTimeout, client.getBufferSize(),
                     client.getReceiveDataSocketBufferSize(), client.getSendDataSocketBufferSize(), client);
diff --git a/components/camel-ftp/src/main/java/org/apache/camel/component/file/remote/FtpsEndpoint.java b/components/camel-ftp/src/main/java/org/apache/camel/component/file/remote/FtpsEndpoint.java
index 58ffbd9..829df4b 100644
--- a/components/camel-ftp/src/main/java/org/apache/camel/component/file/remote/FtpsEndpoint.java
+++ b/components/camel-ftp/src/main/java/org/apache/camel/component/file/remote/FtpsEndpoint.java
@@ -28,6 +28,7 @@ import javax.net.ssl.SSLSocket;
 import javax.net.ssl.TrustManagerFactory;
 
 import org.apache.camel.api.management.ManagedResource;
+import org.apache.camel.component.file.GenericFileEndpoint;
 import org.apache.camel.spi.UriEndpoint;
 import org.apache.camel.spi.UriParam;
 import org.apache.camel.support.jsse.SSLContextParameters;
@@ -36,6 +37,8 @@ import org.apache.commons.net.ftp.FTPClient;
 import org.apache.commons.net.ftp.FTPClientConfig;
 import org.apache.commons.net.ftp.FTPFile;
 import org.apache.commons.net.ftp.FTPSClient;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * The ftps (FTP secure SSL/TLS) component is used for uploading or downloading files from FTP servers.
@@ -46,6 +49,8 @@ import org.apache.commons.net.ftp.FTPSClient;
         excludeProperties = "appendChars,readLockIdempotentReleaseAsync,readLockIdempotentReleaseAsyncPoolSize,readLockIdempotentReleaseDelay,readLockIdempotentReleaseExecutorService")
 @ManagedResource(description = "Managed FtpsEndpoint")
 public class FtpsEndpoint extends FtpEndpoint<FTPFile> {
+    private static final Logger LOG = LoggerFactory.getLogger(FtpsEndpoint.class);
+
     @UriParam
     protected FtpsConfiguration configuration;
     @UriParam(label = "security")
@@ -119,7 +124,7 @@ public class FtpsEndpoint extends FtpEndpoint<FTPFile> {
                 try {
                     keyStore.load(keyStoreFileInputStream, password.toCharArray());
                 } finally {
-                    IOHelper.close(keyStoreFileInputStream, "keyStore", log);
+                    IOHelper.close(keyStoreFileInputStream, "keyStore", LOG);
                 }
     
                 KeyManagerFactory keyMgrFactory = KeyManagerFactory.getInstance(algorithm);
@@ -142,7 +147,7 @@ public class FtpsEndpoint extends FtpEndpoint<FTPFile> {
                 try {
                     trustStore.load(trustStoreFileInputStream, password.toCharArray());
                 } finally {
-                    IOHelper.close(trustStoreFileInputStream, "trustStore", log);
+                    IOHelper.close(trustStoreFileInputStream, "trustStore", LOG);
                 }
     
                 TrustManagerFactory trustMgrFactory = TrustManagerFactory.getInstance(algorithm);
@@ -206,8 +211,8 @@ public class FtpsEndpoint extends FtpEndpoint<FTPFile> {
             client.setDataTimeout(dataTimeout);
         }
 
-        if (log.isDebugEnabled()) {
-            log.debug("Created FTPSClient[connectTimeout: {}, soTimeout: {}, dataTimeout: {}, bufferSize: {}"
+        if (LOG.isDebugEnabled()) {
+            LOG.debug("Created FTPSClient[connectTimeout: {}, soTimeout: {}, dataTimeout: {}, bufferSize: {}"
                             + ", receiveDataSocketBufferSize: {}, sendDataSocketBufferSize: {}]: {}",
                     client.getConnectTimeout(), getSoTimeout(), dataTimeout, client.getBufferSize(),
                     client.getReceiveDataSocketBufferSize(), client.getSendDataSocketBufferSize(), client);
diff --git a/components/camel-ftp/src/main/java/org/apache/camel/component/file/remote/RemoteFileConsumer.java b/components/camel-ftp/src/main/java/org/apache/camel/component/file/remote/RemoteFileConsumer.java
index 25573fb..9c364ad 100644
--- a/components/camel-ftp/src/main/java/org/apache/camel/component/file/remote/RemoteFileConsumer.java
+++ b/components/camel-ftp/src/main/java/org/apache/camel/component/file/remote/RemoteFileConsumer.java
@@ -28,11 +28,16 @@ import org.apache.camel.component.file.GenericFileConsumer;
 import org.apache.camel.component.file.GenericFileOperationFailedException;
 import org.apache.camel.component.file.GenericFileProcessStrategy;
 import org.apache.camel.support.SynchronizationAdapter;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Base class for remote file consumers.
  */
 public abstract class RemoteFileConsumer<T> extends GenericFileConsumer<T> {
+
+    private static final Logger LOG = LoggerFactory.getLogger(RemoteFileConsumer.class);
+
     protected transient boolean loggedIn;
     protected transient boolean loggedInWarning;
 
@@ -53,8 +58,8 @@ public abstract class RemoteFileConsumer<T> extends GenericFileConsumer<T> {
 
     @Override
     protected boolean prePollCheck() throws Exception {
-        if (log.isTraceEnabled()) {
-            log.trace("prePollCheck on {}", getEndpoint().getConfiguration().remoteServerInformation());
+        if (LOG.isTraceEnabled()) {
+            LOG.trace("prePollCheck on {}", getEndpoint().getConfiguration().remoteServerInformation());
         }
         try {
             connectIfNecessary();
@@ -70,7 +75,7 @@ public abstract class RemoteFileConsumer<T> extends GenericFileConsumer<T> {
         if (!loggedIn) {
             String message = "Cannot connect/login to: " + remoteServer() + ". Will skip this poll.";
             if (!loggedInWarning) {
-                log.warn(message);
+                LOG.warn(message);
                 loggedInWarning = true;
             }
             return false;
@@ -84,15 +89,15 @@ public abstract class RemoteFileConsumer<T> extends GenericFileConsumer<T> {
 
     @Override
     protected void postPollCheck(int polledMessages) {
-        if (log.isTraceEnabled()) {
-            log.trace("postPollCheck on {}", getEndpoint().getConfiguration().remoteServerInformation());
+        if (LOG.isTraceEnabled()) {
+            LOG.trace("postPollCheck on {}", getEndpoint().getConfiguration().remoteServerInformation());
         }
 
         // if we did not poll any messages, but are configured to disconnect then we need to do this now
         // as there is no exchanges to be routed that otherwise will disconnect from the last UoW
         if (polledMessages == 0) {
             if (getEndpoint().isDisconnect()) {
-                log.trace("postPollCheck disconnect from: {}", getEndpoint());
+                LOG.trace("postPollCheck disconnect from: {}", getEndpoint());
                 disconnect();
             }
         }
@@ -110,7 +115,7 @@ public abstract class RemoteFileConsumer<T> extends GenericFileConsumer<T> {
             exchange.adapt(ExtendedExchange.class).addOnCompletion(new SynchronizationAdapter() {
                 @Override
                 public void onDone(Exchange exchange) {
-                    log.trace("processExchange disconnect from: {}", getEndpoint());
+                    LOG.trace("processExchange disconnect from: {}", getEndpoint());
                     disconnect();
                 }
 
@@ -153,14 +158,14 @@ public abstract class RemoteFileConsumer<T> extends GenericFileConsumer<T> {
         // disconnect
         try {
             if (getOperations().isConnected()) {
-                if (log.isDebugEnabled()) {
-                    log.debug("Disconnecting from: {}", remoteServer());
+                if (LOG.isDebugEnabled()) {
+                    LOG.debug("Disconnecting from: {}", remoteServer());
                 }
                 getOperations().disconnect();
             }
         } catch (GenericFileOperationFailedException e) {
             // ignore just log a warning
-            log.warn("Error occurred while disconnecting from " + remoteServer() + " due: " + e.getMessage() + ". This exception will be ignored.");
+            LOG.warn("Error occurred while disconnecting from " + remoteServer() + " due: " + e.getMessage() + ". This exception will be ignored.");
         }
     }
 
@@ -170,13 +175,13 @@ public abstract class RemoteFileConsumer<T> extends GenericFileConsumer<T> {
 
         // disconnect
         try {
-            if (log.isDebugEnabled()) {
-                log.debug("Force disconnecting from: {}", remoteServer());
+            if (LOG.isDebugEnabled()) {
+                LOG.debug("Force disconnecting from: {}", remoteServer());
             }
             getOperations().forceDisconnect();
         } catch (GenericFileOperationFailedException e) {
             // ignore just log a warning
-            log.warn("Error occurred while disconnecting from " + remoteServer() + " due: " + e.getMessage() + ". This exception will be ignored.");
+            LOG.warn("Error occurred while disconnecting from " + remoteServer() + " due: " + e.getMessage() + ". This exception will be ignored.");
         }
     }
 
@@ -187,18 +192,18 @@ public abstract class RemoteFileConsumer<T> extends GenericFileConsumer<T> {
             isConnected = getOperations().sendNoop();
         } catch (Exception ex) {
             // here we just ignore the exception and try to reconnect
-            if (log.isDebugEnabled()) {
-                log.debug("Exception checking connection status: {}", ex.getMessage());
+            if (LOG.isDebugEnabled()) {
+                LOG.debug("Exception checking connection status: {}", ex.getMessage());
             }
         }
 
         if (!loggedIn || !isConnected) {
-            if (log.isDebugEnabled()) {
-                log.debug("Not connected/logged in, connecting to: {}", remoteServer());
+            if (LOG.isDebugEnabled()) {
+                LOG.debug("Not connected/logged in, connecting to: {}", remoteServer());
             }
             loggedIn = getOperations().connect((RemoteFileConfiguration) endpoint.getConfiguration(), null);
             if (loggedIn) {
-                log.debug("Connected and logged in to: {}", remoteServer());
+                LOG.debug("Connected and logged in to: {}", remoteServer());
             }
         }
     }
@@ -222,17 +227,17 @@ public abstract class RemoteFileConsumer<T> extends GenericFileConsumer<T> {
      */
     protected boolean doSafePollSubDirectory(String absolutePath, String dirName, List<GenericFile<T>> fileList, int depth) {
         try {
-            log.trace("Polling sub directory: {} from: {}", absolutePath, endpoint);
+            LOG.trace("Polling sub directory: {} from: {}", absolutePath, endpoint);
             //Try to poll the directory
             return doPollDirectory(absolutePath, dirName, fileList, depth);
         } catch (Exception e) {
-            log.debug("Caught exception {}", e.getMessage());
+            LOG.debug("Caught exception {}", e.getMessage());
             if (ignoreCannotRetrieveFile(absolutePath, null, e)) {
-                log.trace("Ignoring file error {} for {}", e.getMessage(), absolutePath);
+                LOG.trace("Ignoring file error {} for {}", e.getMessage(), absolutePath);
                 //indicate no files in this directory to poll, continue with fileList
                 return true;
             } else {
-                log.trace("Not ignoring file error {} for {}", e.getMessage(), absolutePath);
+                LOG.trace("Not ignoring file error {} for {}", e.getMessage(), absolutePath);
                 if (e instanceof GenericFileOperationFailedException) {
                     throw (GenericFileOperationFailedException) e;
                 } else {
diff --git a/components/camel-ftp/src/main/java/org/apache/camel/component/file/remote/RemoteFileEndpoint.java b/components/camel-ftp/src/main/java/org/apache/camel/component/file/remote/RemoteFileEndpoint.java
index f04f290..fdf7646 100644
--- a/components/camel-ftp/src/main/java/org/apache/camel/component/file/remote/RemoteFileEndpoint.java
+++ b/components/camel-ftp/src/main/java/org/apache/camel/component/file/remote/RemoteFileEndpoint.java
@@ -29,12 +29,16 @@ import org.apache.camel.component.file.GenericFileProducer;
 import org.apache.camel.spi.UriParam;
 import org.apache.camel.support.processor.idempotent.MemoryIdempotentRepository;
 import org.apache.camel.util.StringHelper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Remote file endpoint.
  */
 public abstract class RemoteFileEndpoint<T> extends GenericFileEndpoint<T> {
 
+    private static final Logger LOG = LoggerFactory.getLogger(RemoteFileEndpoint.class);
+
     @UriParam(label = "advanced")
     private int maximumReconnectAttempts = 3;
     @UriParam(label = "advanced")
@@ -114,13 +118,13 @@ public abstract class RemoteFileEndpoint<T> extends GenericFileEndpoint<T> {
 
         // if noop=true then idempotent should also be configured
         if (isNoop() && !isIdempotentSet()) {
-            log.info("Endpoint is configured with noop=true so forcing endpoint to be idempotent as well");
+            LOG.info("Endpoint is configured with noop=true so forcing endpoint to be idempotent as well");
             setIdempotent(true);
         }
 
         // if idempotent and no repository set then create a default one
         if (isIdempotentSet() && isIdempotent() && idempotentRepository == null) {
-            log.info("Using default memory based idempotent repository with cache max size: {}", DEFAULT_IDEMPOTENT_CACHE_SIZE);
+            LOG.info("Using default memory based idempotent repository with cache max size: {}", DEFAULT_IDEMPOTENT_CACHE_SIZE);
             idempotentRepository = MemoryIdempotentRepository.memoryIdempotentRepository(DEFAULT_IDEMPOTENT_CACHE_SIZE);
         }
 
@@ -138,8 +142,8 @@ public abstract class RemoteFileEndpoint<T> extends GenericFileEndpoint<T> {
 
     @Override
     public PollingConsumer createPollingConsumer() throws Exception {
-        if (log.isDebugEnabled()) {
-            log.debug("Creating GenericFilePollingConsumer with queueSize: {} blockWhenFull: {} blockTimeout: {}",
+        if (LOG.isDebugEnabled()) {
+            LOG.debug("Creating GenericFilePollingConsumer with queueSize: {} blockWhenFull: {} blockTimeout: {}",
                 getPollingConsumerQueueSize(), isPollingConsumerBlockWhenFull(), getPollingConsumerBlockTimeout());
         }
         GenericFilePollingConsumer result = new GenericFilePollingConsumer(this);
diff --git a/components/camel-ftp/src/main/java/org/apache/camel/component/file/remote/RemoteFileProducer.java b/components/camel-ftp/src/main/java/org/apache/camel/component/file/remote/RemoteFileProducer.java
index 329886a..9ba3f17 100644
--- a/components/camel-ftp/src/main/java/org/apache/camel/component/file/remote/RemoteFileProducer.java
+++ b/components/camel-ftp/src/main/java/org/apache/camel/component/file/remote/RemoteFileProducer.java
@@ -20,12 +20,15 @@ import org.apache.camel.Exchange;
 import org.apache.camel.component.file.GenericFileOperationFailedException;
 import org.apache.camel.component.file.GenericFileProducer;
 import org.apache.camel.util.URISupport;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Generic remote file producer for all the FTP variations.
  */
 public class RemoteFileProducer<T> extends GenericFileProducer<T> {
 
+    private static final Logger LOG = LoggerFactory.getLogger(RemoteFileProducer.class);
     private boolean loggedIn;
     
     private transient String remoteFileProducerToString;
@@ -80,14 +83,14 @@ public class RemoteFileProducer<T> extends GenericFileProducer<T> {
         loggedIn = false;
         if (isStopping() || isStopped()) {
             // if we are stopping then ignore any exception during a poll
-            log.debug("Exception occurred during stopping: {}", exception.getMessage());
+            LOG.debug("Exception occurred during stopping: {}", exception.getMessage());
         } else {
-            log.warn("Writing file failed with: {}", exception.getMessage());
+            LOG.warn("Writing file failed with: {}", exception.getMessage());
             try {
                 disconnect();
             } catch (Exception e) {
                 // ignore exception
-                log.debug("Ignored exception during disconnect: {}", e.getMessage());
+                LOG.debug("Ignored exception during disconnect: {}", e.getMessage());
             }
             // rethrow the original exception*/
             throw exception;
@@ -97,7 +100,7 @@ public class RemoteFileProducer<T> extends GenericFileProducer<T> {
     public void disconnect() throws GenericFileOperationFailedException {
         loggedIn = false;
         if (getOperations().isConnected()) {
-            log.debug("Disconnecting from: {}", getEndpoint());
+            LOG.debug("Disconnecting from: {}", getEndpoint());
             getOperations().disconnect();
         }
     }
@@ -116,11 +119,11 @@ public class RemoteFileProducer<T> extends GenericFileProducer<T> {
                     // mark as not logged in, since the noop failed
                     loggedIn = false;
                 }
-                log.trace("preWriteCheck send noop success: {}", noop);
+                LOG.trace("preWriteCheck send noop success: {}", noop);
             } else {
                 // okay send noop is disabled then we would regard the op as success
                 noop = true;
-                log.trace("preWriteCheck send noop disabled");
+                LOG.trace("preWriteCheck send noop disabled");
             }
         }
 
@@ -142,22 +145,22 @@ public class RemoteFileProducer<T> extends GenericFileProducer<T> {
         try {
             boolean isLast = exchange.getProperty(Exchange.BATCH_COMPLETE, false, Boolean.class);
             if (isLast && getEndpoint().isDisconnectOnBatchComplete()) {
-                log.trace("postWriteCheck disconnect on batch complete from: {}", getEndpoint());
+                LOG.trace("postWriteCheck disconnect on batch complete from: {}", getEndpoint());
                 disconnect();
             }
             if (getEndpoint().isDisconnect()) {
-                log.trace("postWriteCheck disconnect from: {}", getEndpoint());
+                LOG.trace("postWriteCheck disconnect from: {}", getEndpoint());
                 disconnect();
             }
         } catch (GenericFileOperationFailedException e) {
             // ignore just log a warning
-            log.warn("Exception occurred during disconnecting from: " + getEndpoint() + " " + e.getMessage());
+            LOG.warn("Exception occurred during disconnecting from: " + getEndpoint() + " " + e.getMessage());
         }
     }
 
     @Override
     protected void doStart() throws Exception {
-        log.debug("Starting");
+        LOG.debug("Starting");
         // do not connect when component starts, just wait until we process as we will
         // connect at that time if needed
         super.doStart();
@@ -168,20 +171,20 @@ public class RemoteFileProducer<T> extends GenericFileProducer<T> {
         try {
             disconnect();
         } catch (Exception e) {
-            log.debug("Exception occurred during disconnecting from: " + getEndpoint() + " " + e.getMessage());
+            LOG.debug("Exception occurred during disconnecting from: " + getEndpoint() + " " + e.getMessage());
         }
         super.doStop();
     }
 
     protected void connectIfNecessary(Exchange exchange) throws GenericFileOperationFailedException {
         if (!loggedIn || !getOperations().isConnected()) {
-            log.debug("Not already connected/logged in. Connecting to: {}", getEndpoint());
+            LOG.debug("Not already connected/logged in. Connecting to: {}", getEndpoint());
             RemoteFileConfiguration config = getEndpoint().getConfiguration();
             loggedIn = getOperations().connect(config, exchange);
             if (!loggedIn) {
                 return;
             }
-            log.debug("Connected and logged in to: {}", getEndpoint());
+            LOG.debug("Connected and logged in to: {}", getEndpoint());
         }
     }
 
diff --git a/components/camel-ftp/src/main/java/org/apache/camel/component/file/remote/SftpConsumer.java b/components/camel-ftp/src/main/java/org/apache/camel/component/file/remote/SftpConsumer.java
index 8569234..b069399 100644
--- a/components/camel-ftp/src/main/java/org/apache/camel/component/file/remote/SftpConsumer.java
+++ b/components/camel-ftp/src/main/java/org/apache/camel/component/file/remote/SftpConsumer.java
@@ -32,12 +32,16 @@ import org.apache.camel.util.FileUtil;
 import org.apache.camel.util.ObjectHelper;
 import org.apache.camel.util.StringHelper;
 import org.apache.camel.util.URISupport;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Secure FTP consumer
  */
 public class SftpConsumer extends RemoteFileConsumer<SftpRemoteFile> {
 
+    private static final Logger LOG = LoggerFactory.getLogger(SftpConsumer.class);
+
     private String endpointPath;
 
     private transient String sftpConsumerToString;
@@ -55,13 +59,13 @@ public class SftpConsumer extends RemoteFileConsumer<SftpRemoteFile> {
         try {
             super.doStart();
             if (endpoint.isAutoCreate()) {
-                log.debug("Auto creating directory: {}", endpoint.getConfiguration().getDirectory());
+                LOG.debug("Auto creating directory: {}", endpoint.getConfiguration().getDirectory());
                 try {
                     connectIfNecessary();
                     operations.buildDirectory(endpoint.getConfiguration().getDirectory(), true);
                 } catch (GenericFileOperationFailedException e) {
                     // log a WARN as we want to start the consumer.
-                    log.warn("Error auto creating directory: " + endpoint.getConfiguration().getDirectory()
+                    LOG.warn("Error auto creating directory: " + endpoint.getConfiguration().getDirectory()
                             + " due " + e.getMessage() + ". This exception is ignored.", e);
                 }
             }
@@ -103,7 +107,7 @@ public class SftpConsumer extends RemoteFileConsumer<SftpRemoteFile> {
 
     @Override
     protected boolean doPollDirectory(String absolutePath, String dirName, List<GenericFile<SftpRemoteFile>> fileList, int depth) {
-        log.trace("doPollDirectory from absolutePath: {}, dirName: {}", absolutePath, dirName);
+        LOG.trace("doPollDirectory from absolutePath: {}, dirName: {}", absolutePath, dirName);
 
         depth++;
 
@@ -121,7 +125,7 @@ public class SftpConsumer extends RemoteFileConsumer<SftpRemoteFile> {
                 dir = absolutePath;
             }
 
-            log.trace("Polling directory: {}", dir);
+            LOG.trace("Polling directory: {}", dir);
             if (isUseList()) {
                 if (isStepwise()) {
                     files = operations.listFiles();
@@ -140,7 +144,7 @@ public class SftpConsumer extends RemoteFileConsumer<SftpRemoteFile> {
             }
         } catch (GenericFileOperationFailedException e) {
             if (ignoreCannotRetrieveFile(null, null, e)) {
-                log.debug("Cannot list files in directory {} due directory does not exists or file permission error.", dir);
+                LOG.debug("Cannot list files in directory {} due directory does not exists or file permission error.", dir);
             } else {
                 throw e;
             }
@@ -148,11 +152,11 @@ public class SftpConsumer extends RemoteFileConsumer<SftpRemoteFile> {
 
         if (files == null || files.isEmpty()) {
             // no files in this directory to poll
-            log.trace("No files found in directory: {}", dir);
+            LOG.trace("No files found in directory: {}", dir);
             return true;
         } else {
             // we found some files
-            log.trace("Found {} in directory: {}", files.size(), dir);
+            LOG.trace("Found {} in directory: {}", files.size(), dir);
         }
         
         if (getEndpoint().isPreSort()) {
@@ -161,8 +165,8 @@ public class SftpConsumer extends RemoteFileConsumer<SftpRemoteFile> {
 
         for (SftpRemoteFile file : files) {
 
-            if (log.isTraceEnabled()) {
-                log.trace("SftpFile[fileName={}, longName={}, dir={}]", file.getFilename(), file.getLongname(), file.isDirectory());
+            if (LOG.isTraceEnabled()) {
+                LOG.trace("SftpFile[fileName={}, longName={}, dir={}]", file.getFilename(), file.getLongname(), file.isDirectory());
             }
 
             // check if we can continue polling in files
@@ -205,7 +209,7 @@ public class SftpConsumer extends RemoteFileConsumer<SftpRemoteFile> {
             }
         }
 
-        log.trace("Done file: {} does not exist", doneFileName);
+        LOG.trace("Done file: {} does not exist", doneFileName);
         return false;
     }
 
diff --git a/components/camel-ganglia/src/main/java/org/apache/camel/component/ganglia/GangliaProducer.java b/components/camel-ganglia/src/main/java/org/apache/camel/component/ganglia/GangliaProducer.java
index 24d6f7c..30e7489 100644
--- a/components/camel-ganglia/src/main/java/org/apache/camel/component/ganglia/GangliaProducer.java
+++ b/components/camel-ganglia/src/main/java/org/apache/camel/component/ganglia/GangliaProducer.java
@@ -22,9 +22,13 @@ import info.ganglia.gmetric4j.gmetric.GMetricType;
 import org.apache.camel.Exchange;
 import org.apache.camel.Message;
 import org.apache.camel.support.DefaultProducer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class GangliaProducer extends DefaultProducer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(GangliaProducer.class);
+
     private final Publisher publisher;
     private final GangliaEndpoint gangliaEndpoint;
 
@@ -82,15 +86,15 @@ public class GangliaProducer extends DefaultProducer {
         String value = message.getBody(String.class);
         if ((value == null || value.length() == 0)
             && (type == GMetricType.FLOAT || type == GMetricType.DOUBLE)) {
-            log.debug("Metric {} string value was null, using NaN", metricName);
+            LOG.debug("Metric {} string value was null, using NaN", metricName);
             value = "NaN";
         }
 
-        if (log.isDebugEnabled()) {
-            log.debug("Sending metric {} to Ganglia: {}", metricName, value);
+        if (LOG.isDebugEnabled()) {
+            LOG.debug("Sending metric {} to Ganglia: {}", metricName, value);
         }
         publisher.publish(groupName,
             metricName, value, type, slope, tmax, dmax, units);
-        log.trace("Sending metric done");
+        LOG.trace("Sending metric done");
     }
 }
diff --git a/components/camel-git/src/main/java/org/apache/camel/component/git/consumer/AbstractGitConsumer.java b/components/camel-git/src/main/java/org/apache/camel/component/git/consumer/AbstractGitConsumer.java
index 303d5e4..0fb735e 100644
--- a/components/camel-git/src/main/java/org/apache/camel/component/git/consumer/AbstractGitConsumer.java
+++ b/components/camel-git/src/main/java/org/apache/camel/component/git/consumer/AbstractGitConsumer.java
@@ -25,9 +25,13 @@ import org.apache.camel.support.ScheduledPollConsumer;
 import org.eclipse.jgit.api.Git;
 import org.eclipse.jgit.lib.Repository;
 import org.eclipse.jgit.storage.file.FileRepositoryBuilder;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public abstract class AbstractGitConsumer extends ScheduledPollConsumer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(AbstractGitConsumer.class);
+
     private final GitEndpoint endpoint;
 
     private Repository repo;
@@ -61,7 +65,7 @@ public abstract class AbstractGitConsumer extends ScheduledPollConsumer {
                     .findGitDir() // scan up the file system tree
                     .build();
         } catch (IOException e) {
-            log.error("There was an error, cannot open {} repository", endpoint.getLocalPath());
+            LOG.error("There was an error, cannot open {} repository", endpoint.getLocalPath());
             throw e;
         }
         return repo;
diff --git a/components/camel-git/src/main/java/org/apache/camel/component/git/producer/GitProducer.java b/components/camel-git/src/main/java/org/apache/camel/component/git/producer/GitProducer.java
index cc77803..3d5d5b9 100644
--- a/components/camel-git/src/main/java/org/apache/camel/component/git/producer/GitProducer.java
+++ b/components/camel-git/src/main/java/org/apache/camel/component/git/producer/GitProducer.java
@@ -47,9 +47,13 @@ import org.eclipse.jgit.transport.PushResult;
 import org.eclipse.jgit.transport.RemoteConfig;
 import org.eclipse.jgit.transport.URIish;
 import org.eclipse.jgit.transport.UsernamePasswordCredentialsProvider;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class GitProducer extends DefaultProducer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(GitProducer.class);
+
     private final GitEndpoint endpoint;
 
     private Repository repo;
@@ -208,7 +212,7 @@ public class GitProducer extends DefaultProducer {
                 throw new IllegalArgumentException("The local repository directory already exists");
             }
         } catch (Exception e) {
-            log.error("There was an error in Git {} operation", operation);
+            LOG.error("There was an error in Git {} operation", operation);
             throw e;
         } finally {
             if (ObjectHelper.isNotEmpty(result)) {
@@ -228,7 +232,7 @@ public class GitProducer extends DefaultProducer {
                 git.checkout().setCreateBranch(true).setName(endpoint.getBranchName()).setStartPoint(endpoint.getTagName()).call();
             }
         } catch (Exception e) {
-            log.error("There was an error in Git {} operation", operation);
+            LOG.error("There was an error in Git {} operation", operation);
             throw e;
         }
     }
@@ -241,7 +245,7 @@ public class GitProducer extends DefaultProducer {
         try {
             result = Git.init().setDirectory(new File(endpoint.getLocalPath(), "")).setBare(false).call();
         } catch (Exception e) {
-            log.error("There was an error in Git {} operation", operation);
+            LOG.error("There was an error in Git {} operation", operation);
             throw e;
         } finally {
             if (ObjectHelper.isNotEmpty(result)) {
@@ -263,7 +267,7 @@ public class GitProducer extends DefaultProducer {
             }
             git.add().addFilepattern(fileName).call();
         } catch (Exception e) {
-            log.error("There was an error in Git {} operation", operation);
+            LOG.error("There was an error in Git {} operation", operation);
             throw e;
         }
     }
@@ -281,7 +285,7 @@ public class GitProducer extends DefaultProducer {
             }
             git.rm().addFilepattern(fileName).call();
         } catch (Exception e) {
-            log.error("There was an error in Git {} operation", operation);
+            LOG.error("There was an error in Git {} operation", operation);
             throw e;
         }
     }
@@ -315,7 +319,7 @@ public class GitProducer extends DefaultProducer {
                 git.commit().setAllowEmpty(allowEmpty).setMessage(commitMessage).call();
             }
         } catch (Exception e) {
-            log.error("There was an error in Git {} operation", operation);
+            LOG.error("There was an error in Git {} operation", operation);
             throw e;
         }
     }
@@ -349,7 +353,7 @@ public class GitProducer extends DefaultProducer {
                 git.commit().setAllowEmpty(allowEmpty).setAll(true).setMessage(commitMessage).call();
             }
         } catch (Exception e) {
-            log.error("There was an error in Git {} operation", operation);
+            LOG.error("There was an error in Git {} operation", operation);
             throw e;
         }
     }
@@ -361,7 +365,7 @@ public class GitProducer extends DefaultProducer {
         try {
             git.branchCreate().setName(endpoint.getBranchName()).call();
         } catch (Exception e) {
-            log.error("There was an error in Git {} operation", operation);
+            LOG.error("There was an error in Git {} operation", operation);
             throw e;
         }
     }
@@ -373,7 +377,7 @@ public class GitProducer extends DefaultProducer {
         try {
             git.branchDelete().setBranchNames(endpoint.getBranchName()).call();
         } catch (Exception e) {
-            log.error("There was an error in Git {} operation", operation);
+            LOG.error("There was an error in Git {} operation", operation);
             throw e;
         }
     }
@@ -386,7 +390,7 @@ public class GitProducer extends DefaultProducer {
             }
             status = git.status().call();
         } catch (Exception e) {
-            log.error("There was an error in Git {} operation", operation);
+            LOG.error("There was an error in Git {} operation", operation);
             throw e;
         }
         updateExchange(exchange, status);
@@ -400,7 +404,7 @@ public class GitProducer extends DefaultProducer {
             }
             revCommit = git.log().call();
         } catch (Exception e) {
-            log.error("There was an error in Git {} operation", operation);
+            LOG.error("There was an error in Git {} operation", operation);
             throw e;
         }
         updateExchange(exchange, revCommit);
@@ -422,7 +426,7 @@ public class GitProducer extends DefaultProducer {
                 result = git.push().setRemote(endpoint.getRemoteName()).call();
             }
         } catch (Exception e) {
-            log.error("There was an error in Git {} operation", operation);
+            LOG.error("There was an error in Git {} operation", operation);
             throw e;
         }
         updateExchange(exchange, result);
@@ -444,7 +448,7 @@ public class GitProducer extends DefaultProducer {
                 result = git.push().setRemote(endpoint.getRemoteName()).add(Constants.R_TAGS + endpoint.getTagName()).call();
             }
         } catch (Exception e) {
-            log.error("There was an error in Git {} operation", operation);
+            LOG.error("There was an error in Git {} operation", operation);
             throw e;
         }
         updateExchange(exchange, result);
@@ -466,7 +470,7 @@ public class GitProducer extends DefaultProducer {
                 result = git.pull().setRemote(endpoint.getRemoteName()).call();
             }
         } catch (Exception e) {
-            log.error("There was an error in Git {} operation", operation);
+            LOG.error("There was an error in Git {} operation", operation);
             throw e;
         }
         updateExchange(exchange, result);
@@ -483,7 +487,7 @@ public class GitProducer extends DefaultProducer {
             git.checkout().setName("master").call();
             result = git.merge().include(mergeBase).setFastForward(FastForwardMode.FF).setCommit(true).call();
         } catch (Exception e) {
-            log.error("There was an error in Git {} operation", operation);
+            LOG.error("There was an error in Git {} operation", operation);
             throw e;
         }
         updateExchange(exchange, result);
@@ -496,7 +500,7 @@ public class GitProducer extends DefaultProducer {
         try {
             git.tag().setName(endpoint.getTagName()).call();
         } catch (Exception e) {
-            log.error("There was an error in Git {} operation", operation);
+            LOG.error("There was an error in Git {} operation", operation);
             throw e;
         }
     }
@@ -508,7 +512,7 @@ public class GitProducer extends DefaultProducer {
         try {
             git.tagDelete().setTags(endpoint.getTagName()).call();
         } catch (Exception e) {
-            log.error("There was an error in Git {} operation", operation);
+            LOG.error("There was an error in Git {} operation", operation);
             throw e;
         }
     }
@@ -518,7 +522,7 @@ public class GitProducer extends DefaultProducer {
         try {
             result = git.branchList().setListMode(ListMode.ALL).call();
         } catch (Exception e) {
-            log.error("There was an error in Git {} operation", operation);
+            LOG.error("There was an error in Git {} operation", operation);
             throw e;
         }
         updateExchange(exchange, result);
@@ -529,7 +533,7 @@ public class GitProducer extends DefaultProducer {
         try {
             result = git.tagList().call();
         } catch (Exception e) {
-            log.error("There was an error in Git {} operation", operation);
+            LOG.error("There was an error in Git {} operation", operation);
             throw e;
         }
         updateExchange(exchange, result);
@@ -553,7 +557,7 @@ public class GitProducer extends DefaultProducer {
             }
             result = git.cherryPick().include(commit).call();
         } catch (Exception e) {
-            log.error("There was an error in Git {} operation", operation);
+            LOG.error("There was an error in Git {} operation", operation);
             throw e;
         }
         updateExchange(exchange, result);
@@ -567,7 +571,7 @@ public class GitProducer extends DefaultProducer {
             }
             result = git.clean().setCleanDirectories(true).call();
         } catch (Exception e) {
-            log.error("There was an error in Git {} operation", operation);
+            LOG.error("There was an error in Git {} operation", operation);
             throw e;
         }
         updateExchange(exchange, result);
@@ -578,7 +582,7 @@ public class GitProducer extends DefaultProducer {
         try {
             result = git.gc().call();
         } catch (Exception e) {
-            log.error("There was an error in Git {} operation", operation);
+            LOG.error("There was an error in Git {} operation", operation);
             throw e;
         }
         updateExchange(exchange, result);
@@ -598,7 +602,7 @@ public class GitProducer extends DefaultProducer {
             remoteAddCommand.setName(endpoint.getRemoteName());
             result = remoteAddCommand.call();
         } catch (Exception e) {
-            log.error("There was an error in Git {} operation", operation);
+            LOG.error("There was an error in Git {} operation", operation);
             throw e;
         }
         updateExchange(exchange, result);
@@ -609,7 +613,7 @@ public class GitProducer extends DefaultProducer {
         try {
             result = git.remoteList().call();
         } catch (Exception e) {
-            log.error("There was an error in Git {} operation", operation);
+            LOG.error("There was an error in Git {} operation", operation);
             throw e;
         }
         updateExchange(exchange, result);
@@ -626,7 +630,7 @@ public class GitProducer extends DefaultProducer {
                 .findGitDir() // scan up the file system tree
                 .build();
         } catch (IOException e) {
-            log.error("There was an error, cannot open {} repository", endpoint.getLocalPath());
+            LOG.error("There was an error, cannot open {} repository", endpoint.getLocalPath());
             throw e;
         }
         return repo;
diff --git a/components/camel-google-bigquery/src/main/java/org/apache/camel/component/google/bigquery/GoogleBigQueryProducer.java b/components/camel-google-bigquery/src/main/java/org/apache/camel/component/google/bigquery/GoogleBigQueryProducer.java
index 2415db1..b98938b 100644
--- a/components/camel-google-bigquery/src/main/java/org/apache/camel/component/google/bigquery/GoogleBigQueryProducer.java
+++ b/components/camel-google-bigquery/src/main/java/org/apache/camel/component/google/bigquery/GoogleBigQueryProducer.java
@@ -27,12 +27,16 @@ import com.google.api.services.bigquery.model.TableDataInsertAllResponse;
 import com.google.api.services.bigquery.model.TableRow;
 import org.apache.camel.Exchange;
 import org.apache.camel.support.DefaultProducer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Generic BigQuery Producer
  */
 public class GoogleBigQueryProducer extends DefaultProducer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(GoogleBigQueryProducer.class);
+
     private final GoogleBigQueryConfiguration configuration;
     private Bigquery bigquery;
 
@@ -110,7 +114,7 @@ public class GoogleBigQueryProducer extends DefaultProducer {
         }
 
         if (totalProcessed == 0) {
-            log.debug("The incoming message is either null or empty for exchange {}", exchange.getExchangeId());
+            LOG.debug("The incoming message is either null or empty for exchange {}", exchange.getExchangeId());
         }
     }
 
@@ -149,8 +153,8 @@ public class GoogleBigQueryProducer extends DefaultProducer {
             apiRequest.set("template_suffix", suffix);
         }
 
-        if (log.isTraceEnabled()) {
-            log.trace("Sending {} messages to bigquery table {}, suffix {}, partition {}",
+        if (LOG.isTraceEnabled()) {
+            LOG.trace("Sending {} messages to bigquery table {}, suffix {}, partition {}",
                     apiRequestRows.size(), tableId, suffix, partitionDecorator);
         }
 
@@ -160,12 +164,12 @@ public class GoogleBigQueryProducer extends DefaultProducer {
             throw new Exception("InsertAll into " + tableId + " failed: " + apiResponse.getInsertErrors());
         }
 
-        if (log.isTraceEnabled()) {
-            log.trace("Sent {} messages to bigquery table {}, suffix {}, partition {}",
+        if (LOG.isTraceEnabled()) {
+            LOG.trace("Sent {} messages to bigquery table {}, suffix {}, partition {}",
                 apiRequestRows.size(), tableId, suffix, partitionDecorator);
         }
-        if (log.isDebugEnabled()) {
-            log.debug("uploader thread/id: {} / {} . api call completed.", Thread.currentThread().getId(), exchangeId);
+        if (LOG.isDebugEnabled()) {
+            LOG.debug("uploader thread/id: {} / {} . api call completed.", Thread.currentThread().getId(), exchangeId);
         }
         return apiRequestData.size();
     }
diff --git a/components/camel-google-bigquery/src/main/java/org/apache/camel/component/google/bigquery/sql/GoogleBigQuerySQLProducer.java b/components/camel-google-bigquery/src/main/java/org/apache/camel/component/google/bigquery/sql/GoogleBigQuerySQLProducer.java
index 723e823..e209b35 100644
--- a/components/camel-google-bigquery/src/main/java/org/apache/camel/component/google/bigquery/sql/GoogleBigQuerySQLProducer.java
+++ b/components/camel-google-bigquery/src/main/java/org/apache/camel/component/google/bigquery/sql/GoogleBigQuerySQLProducer.java
@@ -33,12 +33,16 @@ import org.apache.camel.Message;
 import org.apache.camel.RuntimeExchangeException;
 import org.apache.camel.component.google.bigquery.GoogleBigQueryConstants;
 import org.apache.camel.support.DefaultProducer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Generic BigQuery Producer
  */
 public class GoogleBigQuerySQLProducer extends DefaultProducer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(GoogleBigQuerySQLProducer.class);
+
     private final GoogleBigQuerySQLConfiguration configuration;
     private Bigquery bigquery;
     private String query;
@@ -69,7 +73,7 @@ public class GoogleBigQuerySQLProducer extends DefaultProducer {
         Map<String, Object> queryParameters = extractParameters(exchange);
         exchange.getMessage().setHeader(GoogleBigQueryConstants.TRANSLATED_QUERY, translatedQuery);
         Long affectedRows = executeSQL(translatedQuery, queryParameters);
-        log.debug("The query {} affected {} rows", query, affectedRows);
+        LOG.debug("The query {} affected {} rows", query, affectedRows);
         exchange.getMessage().setBody(affectedRows);
     }
 
@@ -80,8 +84,8 @@ public class GoogleBigQuerySQLProducer extends DefaultProducer {
 
         setQueryParameters(queryParameters, apiQueryRequest);
 
-        if (log.isTraceEnabled()) {
-            log.trace("Sending query to bigquery standard sql: {}", translatedQuery);
+        if (LOG.isTraceEnabled()) {
+            LOG.trace("Sending query to bigquery standard sql: {}", translatedQuery);
         }
 
         QueryResponse apiResponse = apiQuery.execute();
@@ -90,8 +94,8 @@ public class GoogleBigQuerySQLProducer extends DefaultProducer {
             throw new Exception("Query " + translatedQuery + " failed: " + apiResponse.getErrors());
         }
 
-        if (log.isTraceEnabled()) {
-            log.trace("Result of query {} is {}", translatedQuery, apiResponse.toPrettyString());
+        if (LOG.isTraceEnabled()) {
+            LOG.trace("Result of query {} is {}", translatedQuery, apiResponse.toPrettyString());
         }
         return apiResponse.getNumDmlAffectedRows();
     }
diff --git a/components/camel-google-calendar/src/main/java/org/apache/camel/component/google/calendar/stream/GoogleCalendarStreamConsumer.java b/components/camel-google-calendar/src/main/java/org/apache/camel/component/google/calendar/stream/GoogleCalendarStreamConsumer.java
index 6434323..27f24dd 100644
--- a/components/camel-google-calendar/src/main/java/org/apache/camel/component/google/calendar/stream/GoogleCalendarStreamConsumer.java
+++ b/components/camel-google-calendar/src/main/java/org/apache/camel/component/google/calendar/stream/GoogleCalendarStreamConsumer.java
@@ -126,11 +126,8 @@ public class GoogleCalendarStreamConsumer extends ScheduledBatchPollingConsumer
             // update pending number of exchanges
             pendingExchanges = total - index - 1;
 
-            getAsyncProcessor().process(exchange, new AsyncCallback() {
-                @Override
-                public void done(boolean doneSync) {
-                    log.trace("Processing exchange done");
-                }
+            getAsyncProcessor().process(exchange, doneSync -> {
+                // noop
             });
         }
         return total;
diff --git a/components/camel-gora/src/main/java/org/apache/camel/component/gora/GoraConsumer.java b/components/camel-gora/src/main/java/org/apache/camel/component/gora/GoraConsumer.java
index be0101b..6c51052 100644
--- a/components/camel-gora/src/main/java/org/apache/camel/component/gora/GoraConsumer.java
+++ b/components/camel-gora/src/main/java/org/apache/camel/component/gora/GoraConsumer.java
@@ -88,8 +88,6 @@ public class GoraConsumer extends ScheduledPollConsumer {
         //proceed with query
         final Result result = query.execute();
 
-        log.trace("Processing exchange [{}]...", exchange);
-
         try {
             getProcessor().process(exchange);
         } finally {
diff --git a/components/camel-grpc/src/main/java/org/apache/camel/component/grpc/GrpcConsumer.java b/components/camel-grpc/src/main/java/org/apache/camel/component/grpc/GrpcConsumer.java
index 29f88f2..96e2c63 100644
--- a/components/camel-grpc/src/main/java/org/apache/camel/component/grpc/GrpcConsumer.java
+++ b/components/camel-grpc/src/main/java/org/apache/camel/component/grpc/GrpcConsumer.java
@@ -41,12 +41,16 @@ import org.apache.camel.spi.ClassResolver;
 import org.apache.camel.support.DefaultConsumer;
 import org.apache.camel.support.ResourceHelper;
 import org.apache.camel.util.ObjectHelper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Represents gRPC server consumer implementation
  */
 public class GrpcConsumer extends DefaultConsumer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(GrpcConsumer.class);
+
     protected final GrpcConfiguration configuration;
     protected final GrpcEndpoint endpoint;
 
@@ -66,17 +70,17 @@ public class GrpcConsumer extends DefaultConsumer {
     protected void doStart() throws Exception {
         super.doStart();
         if (server == null) {
-            log.info("Starting the gRPC server");
+            LOG.info("Starting the gRPC server");
             initializeServer();
             server.start();
-            log.info("gRPC server started and listening on port: {}", server.getPort());
+            LOG.info("gRPC server started and listening on port: {}", server.getPort());
         }
     }
 
     @Override
     protected void doStop() throws Exception {
         if (server != null) {
-            log.debug("Terminating gRPC server");
+            LOG.debug("Terminating gRPC server");
             server.shutdown().shutdownNow();
             server = null;
         }
@@ -98,7 +102,7 @@ public class GrpcConsumer extends DefaultConsumer {
         }
 
         if (!ObjectHelper.isEmpty(configuration.getHost()) && !ObjectHelper.isEmpty(configuration.getPort())) {
-            log.debug("Building gRPC server on {}:{}", configuration.getHost(), configuration.getPort());
+            LOG.debug("Building gRPC server on {}:{}", configuration.getHost(), configuration.getPort());
             serverBuilder = NettyServerBuilder.forAddress(new InetSocketAddress(configuration.getHost(), configuration.getPort()));
         } else {
             throw new IllegalArgumentException("No server start properties (host, port) specified");
@@ -170,7 +174,7 @@ public class GrpcConsumer extends DefaultConsumer {
             });
             return false;
         } else {
-            log.warn("Consumer not ready to process exchanges. The exchange {} will be discarded", exchange);
+            LOG.warn("Consumer not ready to process exchanges. The exchange {} will be discarded", exchange);
             callback.done(true);
             return true;
         }
diff --git a/components/camel-grpc/src/main/java/org/apache/camel/component/grpc/GrpcProducer.java b/components/camel-grpc/src/main/java/org/apache/camel/component/grpc/GrpcProducer.java
index 3e99b9c..0b8f9bc 100644
--- a/components/camel-grpc/src/main/java/org/apache/camel/component/grpc/GrpcProducer.java
+++ b/components/camel-grpc/src/main/java/org/apache/camel/component/grpc/GrpcProducer.java
@@ -40,12 +40,16 @@ import org.apache.camel.support.DefaultAsyncProducer;
 import org.apache.camel.support.ResourceHelper;
 import org.apache.camel.support.service.ServiceHelper;
 import org.apache.camel.util.ObjectHelper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Represents asynchronous and synchronous gRPC producer implementations.
  */
 public class GrpcProducer extends DefaultAsyncProducer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(GrpcProducer.class);
+
     protected final GrpcConfiguration configuration;
     protected final GrpcEndpoint endpoint;
     private ManagedChannel channel;
@@ -107,10 +111,10 @@ public class GrpcProducer extends DefaultAsyncProducer {
             }
             
             if (endpoint.isSynchronous()) {
-                log.debug("Getting synchronous method stub from channel");
+                LOG.debug("Getting synchronous method stub from channel");
                 grpcStub = GrpcUtils.constructGrpcBlockingStub(endpoint.getServicePackage(), endpoint.getServiceName(), channel, callCreds, endpoint.getCamelContext());
             } else {
-                log.debug("Getting asynchronous method stub from channel");
+                LOG.debug("Getting asynchronous method stub from channel");
                 grpcStub = GrpcUtils.constructGrpcAsyncStub(endpoint.getServicePackage(), endpoint.getServiceName(), channel, callCreds, endpoint.getCamelContext());
             }
             forwarder = GrpcExchangeForwarderFactory.createExchangeForwarder(configuration, grpcStub);
@@ -134,7 +138,7 @@ public class GrpcProducer extends DefaultAsyncProducer {
             forwarder.shutdown();
             forwarder = null;
 
-            log.debug("Terminating channel to the remote gRPC server");
+            LOG.debug("Terminating channel to the remote gRPC server");
             channel.shutdown().shutdownNow();
             channel = null;
             grpcStub = null;
@@ -147,7 +151,7 @@ public class GrpcProducer extends DefaultAsyncProducer {
         NettyChannelBuilder channelBuilder;
         
         if (!ObjectHelper.isEmpty(configuration.getHost()) && !ObjectHelper.isEmpty(configuration.getPort())) {
-            log.info("Creating channel to the remote gRPC server {}:{}", configuration.getHost(), configuration.getPort());
+            LOG.info("Creating channel to the remote gRPC server {}:{}", configuration.getHost(), configuration.getPort());
             channelBuilder = NettyChannelBuilder.forAddress(configuration.getHost(), configuration.getPort());
         } else {
             throw new IllegalArgumentException("No connection properties (host or port) specified");
diff --git a/components/camel-guava-eventbus/src/main/java/org/apache/camel/component/guava/eventbus/GuavaEventBusConsumer.java b/components/camel-guava-eventbus/src/main/java/org/apache/camel/component/guava/eventbus/GuavaEventBusConsumer.java
index 7bbb97e..f93b64f 100644
--- a/components/camel-guava-eventbus/src/main/java/org/apache/camel/component/guava/eventbus/GuavaEventBusConsumer.java
+++ b/components/camel-guava-eventbus/src/main/java/org/apache/camel/component/guava/eventbus/GuavaEventBusConsumer.java
@@ -33,6 +33,8 @@ import org.slf4j.LoggerFactory;
  */
 public class GuavaEventBusConsumer extends DefaultConsumer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(GuavaEventBusConsumer.class);
+
     private final EventBus eventBus;
     private final Object eventHandler;
 
@@ -54,13 +56,13 @@ public class GuavaEventBusConsumer extends DefaultConsumer {
     @Override
     protected void doStart() throws Exception {
         super.doStart();
-        log.debug("Registering event handler: {} to EventBus: {}", eventHandler, eventBus);
+        LOG.debug("Registering event handler: {} to EventBus: {}", eventHandler, eventBus);
         eventBus.register(eventHandler);
     }
 
     @Override
     protected void doStop() throws Exception {
-        log.debug("Unregistering event handler: {} from EventBus: {}", eventHandler, eventBus);
+        LOG.debug("Unregistering event handler: {} from EventBus: {}", eventHandler, eventBus);
         eventBus.unregister(eventHandler);
         super.doStop();
     }
diff --git a/components/camel-guava-eventbus/src/main/java/org/apache/camel/component/guava/eventbus/GuavaEventBusProducer.java b/components/camel-guava-eventbus/src/main/java/org/apache/camel/component/guava/eventbus/GuavaEventBusProducer.java
index 9c76389..1c4f083 100644
--- a/components/camel-guava-eventbus/src/main/java/org/apache/camel/component/guava/eventbus/GuavaEventBusProducer.java
+++ b/components/camel-guava-eventbus/src/main/java/org/apache/camel/component/guava/eventbus/GuavaEventBusProducer.java
@@ -20,6 +20,8 @@ import com.google.common.eventbus.EventBus;
 import org.apache.camel.Endpoint;
 import org.apache.camel.Exchange;
 import org.apache.camel.support.DefaultProducer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Guava EventBus (http://docs.guava-libraries.googlecode.com/git/javadoc/com/google/common/eventbus/EventBus.html)
@@ -27,6 +29,8 @@ import org.apache.camel.support.DefaultProducer;
  */
 public class GuavaEventBusProducer extends DefaultProducer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(GuavaEventBusProducer.class);
+
     private final EventBus eventBus;
 
     public GuavaEventBusProducer(Endpoint endpoint, EventBus eventBus) {
@@ -38,10 +42,10 @@ public class GuavaEventBusProducer extends DefaultProducer {
     public void process(Exchange exchange) throws Exception {
         Object body = exchange.getIn().getBody();
         if (body != null) {
-            log.debug("Posting: {} to EventBus: {}", body, eventBus);
+            LOG.debug("Posting: {} to EventBus: {}", body, eventBus);
             eventBus.post(body);
         } else {
-            log.debug("Body is null, cannot post to EventBus");
+            LOG.debug("Body is null, cannot post to EventBus");
         }
     }
 
diff --git a/components/camel-hazelcast/src/main/java/org/apache/camel/component/hazelcast/queue/HazelcastQueueConsumer.java b/components/camel-hazelcast/src/main/java/org/apache/camel/component/hazelcast/queue/HazelcastQueueConsumer.java
index d0a9e34..256df91 100644
--- a/components/camel-hazelcast/src/main/java/org/apache/camel/component/hazelcast/queue/HazelcastQueueConsumer.java
+++ b/components/camel-hazelcast/src/main/java/org/apache/camel/component/hazelcast/queue/HazelcastQueueConsumer.java
@@ -91,10 +91,7 @@ public class HazelcastQueueConsumer extends HazelcastDefaultConsumer {
                             getExceptionHandler().handleException("Error during processing", exchange, e);
                         }
                     } catch (InterruptedException e) {
-                        if (log.isDebugEnabled()) {
-                            log.debug("Hazelcast Queue Consumer Interrupted: {}", e, e);
-                            continue;
-                        }
+                        // ignore
                     }
                 }
             }
diff --git a/components/camel-hazelcast/src/main/java/org/apache/camel/component/hazelcast/seda/HazelcastSedaConsumer.java b/components/camel-hazelcast/src/main/java/org/apache/camel/component/hazelcast/seda/HazelcastSedaConsumer.java
index 070dcb0..dbb00b2 100644
--- a/components/camel-hazelcast/src/main/java/org/apache/camel/component/hazelcast/seda/HazelcastSedaConsumer.java
+++ b/components/camel-hazelcast/src/main/java/org/apache/camel/component/hazelcast/seda/HazelcastSedaConsumer.java
@@ -30,12 +30,16 @@ import org.apache.camel.Processor;
 import org.apache.camel.support.AsyncProcessorConverterHelper;
 import org.apache.camel.support.DefaultConsumer;
 import org.apache.camel.support.DefaultExchangeHolder;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Implementation of Hazelcast SEDA {@link Consumer} component.
  */
 public class HazelcastSedaConsumer extends DefaultConsumer implements Runnable {
 
+    private static final Logger LOG = LoggerFactory.getLogger(HazelcastSedaConsumer.class);
+
     private final HazelcastSedaEndpoint endpoint;
     private final AsyncProcessor processor;
     private ExecutorService executor;
@@ -80,7 +84,7 @@ public class HazelcastSedaConsumer extends DefaultConsumer implements Runnable {
                     transactionCtx = endpoint.getHazelcastInstance().newTransactionContext();
 
                     if (transactionCtx != null) {
-                        log.trace("Begin transaction: {}", transactionCtx.getTxnId());
+                        LOG.trace("Begin transaction: {}", transactionCtx.getTxnId());
                         transactionCtx.beginTransaction();
                         queue = transactionCtx.getQueue(endpoint.getConfiguration().getQueueName());
                     }
@@ -111,28 +115,28 @@ public class HazelcastSedaConsumer extends DefaultConsumer implements Runnable {
                         }
 
                     } catch (Exception e) {
-                        log.error("Hzlq Exception caught: {}", e, e);
+                        LOG.error("Hzlq Exception caught: {}", e, e);
                         // Rollback
                         if (transactionCtx != null) {
-                            log.trace("Rollback transaction: {}", transactionCtx.getTxnId());
+                            LOG.trace("Rollback transaction: {}", transactionCtx.getTxnId());
                             transactionCtx.rollbackTransaction();
                         }
                     }
                 }
                 // It's OK, I commit
                 if (exchange.getException() == null && transactionCtx != null) {
-                    log.trace("Commit transaction: {}", transactionCtx.getTxnId());
+                    LOG.trace("Commit transaction: {}", transactionCtx.getTxnId());
                     transactionCtx.commitTransaction();
                 }
             } catch (InterruptedException e) {
-                if (log.isDebugEnabled()) {
-                    log.debug("Hzlq Consumer Interrupted: {}", e, e);
+                if (LOG.isDebugEnabled()) {
+                    LOG.debug("Hzlq Consumer Interrupted: {}", e, e);
                 }
                 continue;
             } catch (Throwable e) {
                 // Rollback
                 if (transactionCtx != null) {
-                    log.trace("Rollback transaction: {}", transactionCtx.getTxnId());
+                    LOG.trace("Rollback transaction: {}", transactionCtx.getTxnId());
                     try {
                         transactionCtx.rollbackTransaction();
                     } catch (Throwable ignore) {
diff --git a/components/camel-hbase/src/main/java/org/apache/camel/component/hbase/HBaseConsumer.java b/components/camel-hbase/src/main/java/org/apache/camel/component/hbase/HBaseConsumer.java
index e5f29b2..aafb896 100644
--- a/components/camel-hbase/src/main/java/org/apache/camel/component/hbase/HBaseConsumer.java
+++ b/components/camel-hbase/src/main/java/org/apache/camel/component/hbase/HBaseConsumer.java
@@ -41,12 +41,16 @@ import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.filter.Filter;
 import org.apache.hadoop.hbase.filter.FilterList;
 import org.apache.hadoop.hbase.filter.PageFilter;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * The HBase consumer.
  */
 public class HBaseConsumer extends ScheduledBatchPollingConsumer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(HBaseConsumer.class);
+
     private final HBaseEndpoint endpoint;
     private HBaseRow rowModel;
 
@@ -150,7 +154,7 @@ public class HBaseConsumer extends ScheduledBatchPollingConsumer {
 
         // limit if needed
         if (maxMessagesPerPoll > 0 && total > maxMessagesPerPoll) {
-            log.debug("Limiting to maximum messages to poll {} as there were {} messages in this poll.", maxMessagesPerPoll, total);
+            LOG.debug("Limiting to maximum messages to poll {} as there were {} messages in this poll.", maxMessagesPerPoll, total);
             total = maxMessagesPerPoll;
         }
 
@@ -165,7 +169,7 @@ public class HBaseConsumer extends ScheduledBatchPollingConsumer {
             // update pending number of exchanges
             pendingExchanges = total - index - 1;
 
-            log.trace("Processing exchange [{}]...", exchange);
+            LOG.trace("Processing exchange [{}]...", exchange);
             getProcessor().process(exchange);
             if (exchange.getException() != null) {
                 // if we failed then throw exception
diff --git a/components/camel-hbase/src/main/java/org/apache/camel/component/hbase/processor/idempotent/HBaseIdempotentRepository.java b/components/camel-hbase/src/main/java/org/apache/camel/component/hbase/processor/idempotent/HBaseIdempotentRepository.java
index e90dfee..03f8333 100644
--- a/components/camel-hbase/src/main/java/org/apache/camel/component/hbase/processor/idempotent/HBaseIdempotentRepository.java
+++ b/components/camel-hbase/src/main/java/org/apache/camel/component/hbase/processor/idempotent/HBaseIdempotentRepository.java
@@ -32,9 +32,13 @@ import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.client.Table;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class HBaseIdempotentRepository extends ServiceSupport implements IdempotentRepository {
 
+    private static final Logger LOG = LoggerFactory.getLogger(HBaseIdempotentRepository.class);
+
     private final String tableName;
     private final String family;
     private final String qualifier;
@@ -63,7 +67,7 @@ public class HBaseIdempotentRepository extends ServiceSupport implements Idempot
             table.put(put);
             return true;
         } catch (Exception e) {
-            log.warn("Error adding object {} to HBase repository.", o);
+            LOG.warn("Error adding object {} to HBase repository.", o);
             return false;
         }
     }
@@ -76,7 +80,7 @@ public class HBaseIdempotentRepository extends ServiceSupport implements Idempot
             get.addColumn(HBaseHelper.getHBaseFieldAsBytes(family), HBaseHelper.getHBaseFieldAsBytes(qualifier));
             return table.exists(get);
         } catch (Exception e) {
-            log.warn("Error reading object {} from HBase repository.", o);
+            LOG.warn("Error reading object {} from HBase repository.", o);
             return false;
         }
     }
@@ -93,7 +97,7 @@ public class HBaseIdempotentRepository extends ServiceSupport implements Idempot
                 return false;
             }
         } catch (Exception e) {
-            log.warn("Error removing object {} from HBase repository.", o);
+            LOG.warn("Error removing object {} from HBase repository.", o);
             return false;
         }
     }
@@ -114,7 +118,7 @@ public class HBaseIdempotentRepository extends ServiceSupport implements Idempot
                 table.delete(d);
             } 
         } catch (Exception e) {
-            log.warn("Error clear HBase repository {}", table);
+            LOG.warn("Error clear HBase repository {}", table);
         }
     }    
 
diff --git a/components/camel-hdfs/src/main/java/org/apache/camel/component/hdfs/HdfsConsumer.java b/components/camel-hdfs/src/main/java/org/apache/camel/component/hdfs/HdfsConsumer.java
index 34f2fb3..df22168 100644
--- a/components/camel-hdfs/src/main/java/org/apache/camel/component/hdfs/HdfsConsumer.java
+++ b/components/camel-hdfs/src/main/java/org/apache/camel/component/hdfs/HdfsConsumer.java
@@ -39,9 +39,13 @@ import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathFilter;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public final class HdfsConsumer extends ScheduledPollConsumer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(HdfsConsumer.class);
+
     private final HdfsConfiguration endpointConfig;
     private final StringBuilder hdfsPath;
     private final Processor processor;
@@ -80,18 +84,18 @@ public final class HdfsConsumer extends ScheduledPollConsumer {
         String hdfsFsDescription = endpointConfig.getFileSystemLabel(hdfsPath.toString());
         // if we are starting up then log at info level, and if runtime then log at debug level to not flood the log
         if (onStartup) {
-            log.info("Connecting to hdfs file-system {} (may take a while if connection is not available)", hdfsFsDescription);
+            LOG.info("Connecting to hdfs file-system {} (may take a while if connection is not available)", hdfsFsDescription);
         } else {
-            log.debug("Connecting to hdfs file-system {} (may take a while if connection is not available)", hdfsFsDescription);
+            LOG.debug("Connecting to hdfs file-system {} (may take a while if connection is not available)", hdfsFsDescription);
         }
 
         // hadoop will cache the connection by default so its faster to get in the poll method
         HdfsInfo answer = hdfsInfoFactory.newHdfsInfo(this.hdfsPath.toString());
 
         if (onStartup) {
-            log.info("Connected to hdfs file-system {}", hdfsFsDescription);
+            LOG.info("Connected to hdfs file-system {}", hdfsFsDescription);
         } else {
-            log.debug("Connected to hdfs file-system {}", hdfsFsDescription);
+            LOG.debug("Connected to hdfs file-system {}", hdfsFsDescription);
         }
         return answer;
     }
@@ -140,16 +144,16 @@ public final class HdfsConsumer extends ScheduledPollConsumer {
                 .filter(Objects::nonNull)
                 .collect(Collectors.toList());
 
-        log.info("Processing [{}] valid files out of [{}] available.", hdfsFiles.size(), fileStatuses.length);
+        LOG.info("Processing [{}] valid files out of [{}] available.", hdfsFiles.size(), fileStatuses.length);
 
         for (int i = 0; i < hdfsFiles.size(); i++) {
             HdfsInputStream hdfsFile = hdfsFiles.get(i);
             try {
                 int messageCount = processHdfsInputStream(hdfsFile, totalMessageCount);
-                log.debug("Processed [{}] files out of [{}].", i, hdfsFiles.size());
-                log.debug("File [{}] was split to [{}] messages.", i, messageCount);
+                LOG.debug("Processed [{}] files out of [{}].", i, hdfsFiles.size());
+                LOG.debug("File [{}] was split to [{}] messages.", i, messageCount);
             } finally {
-                IOHelper.close(hdfsFile, "hdfs file", log);
+                IOHelper.close(hdfsFile, "hdfs file", LOG);
             }
         }
 
@@ -188,7 +192,7 @@ public final class HdfsConsumer extends ScheduledPollConsumer {
 
         updateNewExchange(exchange, messageCount.get(), hdfsFile);
 
-        log.debug("Processing file [{}]", fileName);
+        LOG.debug("Processing file [{}]", fileName);
         try {
             processor.process(exchange);
             totalMessageCount.incrementAndGet();
@@ -218,8 +222,8 @@ public final class HdfsConsumer extends ScheduledPollConsumer {
 
     private boolean hasMatchingOwner(FileStatus fileStatus) {
         if (endpointConfig.getOwner() != null && !endpointConfig.getOwner().equals(fileStatus.getOwner())) {
-            if (log.isDebugEnabled()) {
-                log.debug("Skipping file: {} as not matching owner: {}", fileStatus.getPath(), endpointConfig.getOwner());
+            if (LOG.isDebugEnabled()) {
+                LOG.debug("Skipping file: {} as not matching owner: {}", fileStatus.getPath(), endpointConfig.getOwner());
             }
             return false;
         }
diff --git a/components/camel-hdfs/src/main/java/org/apache/camel/component/hdfs/HdfsProducer.java b/components/camel-hdfs/src/main/java/org/apache/camel/component/hdfs/HdfsProducer.java
index 0877eef..ecfe3d1 100644
--- a/components/camel-hdfs/src/main/java/org/apache/camel/component/hdfs/HdfsProducer.java
+++ b/components/camel-hdfs/src/main/java/org/apache/camel/component/hdfs/HdfsProducer.java
@@ -31,9 +31,13 @@ import org.apache.camel.RuntimeCamelException;
 import org.apache.camel.support.DefaultProducer;
 import org.apache.camel.util.IOHelper;
 import org.apache.camel.util.StringHelper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class HdfsProducer extends DefaultProducer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(HdfsProducer.class);
+
     private final HdfsConfiguration config;
     private final StringBuilder hdfsPath;
     private final AtomicBoolean idle = new AtomicBoolean(false);
@@ -109,12 +113,12 @@ public class HdfsProducer extends DefaultProducer {
             Optional<SplitStrategy> idleStrategy = tryFindIdleStrategy(config.getSplitStrategies());
             if (idleStrategy.isPresent()) {
                 scheduler = getEndpoint().getCamelContext().getExecutorServiceManager().newSingleThreadScheduledExecutor(this, "HdfsIdleCheck");
-                log.debug("Creating IdleCheck task scheduled to run every {} millis", config.getCheckIdleInterval());
+                LOG.debug("Creating IdleCheck task scheduled to run every {} millis", config.getCheckIdleInterval());
                 scheduler.scheduleAtFixedRate(new IdleCheck(idleStrategy.get()), config.getCheckIdleInterval(), config.getCheckIdleInterval(), TimeUnit.MILLISECONDS);
             }
         } catch (Exception e) {
-            log.warn("Failed to start the HDFS producer. Caused by: [{}]", e.getMessage());
-            log.debug("", e);
+            LOG.warn("Failed to start the HDFS producer. Caused by: [{}]", e.getMessage());
+            LOG.debug("", e);
             throw new RuntimeCamelException(e);
         } finally {
             HdfsComponent.setJAASConfiguration(auth);
@@ -135,18 +139,18 @@ public class HdfsProducer extends DefaultProducer {
 
         // if we are starting up then log at info level, and if runtime then log at debug level to not flood the log
         if (onStartup) {
-            log.info("Connecting to hdfs file-system {} (may take a while if connection is not available)", hdfsFsDescription);
+            LOG.info("Connecting to hdfs file-system {} (may take a while if connection is not available)", hdfsFsDescription);
         } else {
-            log.debug("Connecting to hdfs file-system {} (may take a while if connection is not available)", hdfsFsDescription);
+            LOG.debug("Connecting to hdfs file-system {} (may take a while if connection is not available)", hdfsFsDescription);
         }
 
         HdfsInfoFactory hdfsInfoFactory = new HdfsInfoFactory(config);
         HdfsOutputStream answer = HdfsOutputStream.createOutputStream(actualPath.toString(), hdfsInfoFactory);
 
         if (onStartup) {
-            log.info("Connected to hdfs file-system {}", hdfsFsDescription);
+            LOG.info("Connected to hdfs file-system {}", hdfsFsDescription);
         } else {
-            log.debug("Connected to hdfs file-system {}", hdfsFsDescription);
+            LOG.debug("Connected to hdfs file-system {}", hdfsFsDescription);
         }
 
         return answer;
@@ -169,7 +173,7 @@ public class HdfsProducer extends DefaultProducer {
             scheduler = null;
         }
         if (oStream != null) {
-            IOHelper.close(oStream, "output stream", log);
+            IOHelper.close(oStream, "output stream", LOG);
             oStream = null;
         }
     }
@@ -193,7 +197,7 @@ public class HdfsProducer extends DefaultProducer {
         // if an explicit filename is specified, close any existing stream and append the filename to the hdfsPath
         if (exchange.getIn().getHeader(Exchange.FILE_NAME) != null) {
             if (oStream != null) {
-                IOHelper.close(oStream, "output stream", log);
+                IOHelper.close(oStream, "output stream", LOG);
             }
             StringBuilder actualPath = getHdfsPathUsingFileNameHeader(exchange);
             oStream = HdfsOutputStream.createOutputStream(actualPath.toString(), hdfsInfoFactory);
@@ -204,14 +208,14 @@ public class HdfsProducer extends DefaultProducer {
 
         if (isSplitRequired(config.getSplitStrategies())) {
             if (oStream != null) {
-                IOHelper.close(oStream, "output stream", log);
+                IOHelper.close(oStream, "output stream", LOG);
             }
             StringBuilder actualPath = newFileName();
             oStream = HdfsOutputStream.createOutputStream(actualPath.toString(), hdfsInfoFactory);
         }
 
         String path = oStream.getActualPath();
-        log.trace("Writing body to hdfs-file {}", path);
+        LOG.trace("Writing body to hdfs-file {}", path);
         oStream.append(key, body, exchange);
 
         idle.set(false);
@@ -227,7 +231,7 @@ public class HdfsProducer extends DefaultProducer {
         // if no idle checker then we need to explicit close the stream after usage
         if (close) {
             try {
-                HdfsProducer.this.log.trace("Closing stream");
+                LOG.trace("Closing stream");
                 oStream.close();
                 oStream = null;
             } catch (IOException e) {
@@ -235,14 +239,11 @@ public class HdfsProducer extends DefaultProducer {
             }
         }
 
-        log.debug("Wrote body to hdfs-file {}", path);
+        LOG.debug("Wrote body to hdfs-file {}", path);
     }
 
     /**
      * helper method to construct the hdfsPath from the CamelFileName String or Expression
-     *
-     * @param exchange
-     * @return
      */
     private StringBuilder getHdfsPathUsingFileNameHeader(Exchange exchange) {
         StringBuilder actualPath = new StringBuilder(hdfsPath);
@@ -288,12 +289,12 @@ public class HdfsProducer extends DefaultProducer {
                 return;
             }
 
-            HdfsProducer.this.log.trace("IdleCheck running");
+            LOG.trace("IdleCheck running");
 
             if (System.currentTimeMillis() - oStream.getLastAccess() > strategy.value && !idle.get() && !oStream.isBusy().get()) {
                 idle.set(true);
                 try {
-                    HdfsProducer.this.log.trace("Closing stream as idle");
+                    LOG.trace("Closing stream as idle");
                     oStream.close();
                 } catch (IOException e) {
                     // ignore
diff --git a/components/camel-hipchat/src/main/java/org/apache/camel/component/hipchat/HipchatComponent.java b/components/camel-hipchat/src/main/java/org/apache/camel/component/hipchat/HipchatComponent.java
index 69392e0..c2e5772 100644
--- a/components/camel-hipchat/src/main/java/org/apache/camel/component/hipchat/HipchatComponent.java
+++ b/components/camel-hipchat/src/main/java/org/apache/camel/component/hipchat/HipchatComponent.java
@@ -24,6 +24,8 @@ import org.apache.camel.Endpoint;
 import org.apache.camel.spi.annotations.Component;
 import org.apache.camel.support.DefaultComponent;
 import org.apache.camel.util.URISupport;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Represents the component that manages {@link HipchatEndpoint}. Hipchat is an Atlassian software for team chat.
@@ -36,6 +38,8 @@ import org.apache.camel.util.URISupport;
 @Component("hipchat")
 public class HipchatComponent extends DefaultComponent {
 
+    private static final Logger LOG = LoggerFactory.getLogger(HipchatComponent.class);
+
     public HipchatComponent() {
     }
 
@@ -51,7 +55,7 @@ public class HipchatComponent extends DefaultComponent {
             throw new HipchatException("OAuth 2 auth token must be specified");
         }
         parseUri(remaining, endpoint);
-        log.debug("Using Hipchat API URL: {}", endpoint.getConfiguration().hipChatUrl());
+        LOG.debug("Using Hipchat API URL: {}", endpoint.getConfiguration().hipChatUrl());
         return endpoint;
     }
 
diff --git a/components/camel-hipchat/src/main/java/org/apache/camel/component/hipchat/HipchatConsumer.java b/components/camel-hipchat/src/main/java/org/apache/camel/component/hipchat/HipchatConsumer.java
index 9df0183..b7e90df 100644
--- a/components/camel-hipchat/src/main/java/org/apache/camel/component/hipchat/HipchatConsumer.java
+++ b/components/camel-hipchat/src/main/java/org/apache/camel/component/hipchat/HipchatConsumer.java
@@ -31,12 +31,17 @@ import org.apache.camel.support.ScheduledPollConsumer;
 import org.apache.camel.util.URISupport;
 import org.apache.http.client.methods.CloseableHttpResponse;
 import org.apache.http.client.methods.HttpGet;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * The Hipchat consumer consumes messages from a list of users.
  */
 public class HipchatConsumer extends ScheduledPollConsumer {
     public static final long DEFAULT_CONSUMER_DELAY = 5 * 1000;
+
+    private static final Logger LOG = LoggerFactory.getLogger(HipchatConsumer.class);
+
     private static final MapType MAP_TYPE = TypeFactory.defaultInstance().constructMapType(Map.class, String.class, Object.class);
     private static final ObjectMapper MAPPER = new ObjectMapper();
     
@@ -59,7 +64,7 @@ public class HipchatConsumer extends ScheduledPollConsumer {
 
     private void processExchangeForUser(String user, Exchange exchange) throws Exception {
         String urlPath = String.format(getMostRecentMessageUrl(), user);
-        log.debug("Polling HipChat Api " + urlPath + " for new messages at " + Calendar.getInstance(TimeZone.getTimeZone("UTC")).getTime());
+        LOG.debug("Polling HipChat Api " + urlPath + " for new messages at " + Calendar.getInstance(TimeZone.getTimeZone("UTC")).getTime());
         HttpGet httpGet = new HttpGet(getConfig().hipChatUrl() + urlPath);
         CloseableHttpResponse response = executeGet(httpGet);
         exchange.getIn().setHeader(HipchatConstants.FROM_USER, user);
@@ -69,7 +74,7 @@ public class HipchatConsumer extends ScheduledPollConsumer {
     private void processApiResponse(Exchange exchange, CloseableHttpResponse response) throws Exception {
         try {
             Map<String, Object> jsonMap = MAPPER.readValue(response.getEntity().getContent(), MAP_TYPE);
-            log.debug("Hipchat response " + response + ", json: " + MAPPER.writeValueAsString(jsonMap));
+            LOG.debug("Hipchat response " + response + ", json: " + MAPPER.writeValueAsString(jsonMap));
             if (jsonMap != null && jsonMap.size() > 0) {
                 List<Map<String, Object>> items = (List<Map<String, Object>>) jsonMap.get(HipchatApiConstants.API_ITEMS);
                 if (items != null && items.size() > 0) {
@@ -77,7 +82,7 @@ public class HipchatConsumer extends ScheduledPollConsumer {
                         Map<String, Object> item = items.get(0);
                         String date = (String) item.get(HipchatApiConstants.API_DATE);
                         String message = (String) item.get(HipchatApiConstants.API_MESSAGE);
-                        log.debug("Setting exchange body: " + message + ", header " + HipchatConstants.MESSAGE_DATE + ": " + date);
+                        LOG.debug("Setting exchange body: " + message + ", header " + HipchatConstants.MESSAGE_DATE + ": " + date);
                         exchange.getIn().setHeader(HipchatConstants.FROM_USER_RESPONSE_STATUS, response.getStatusLine());
                         exchange.getIn().setHeader(HipchatConstants.MESSAGE_DATE, date);
                         exchange.getIn().setBody(message);
diff --git a/components/camel-hipchat/src/main/java/org/apache/camel/component/hipchat/HipchatProducer.java b/components/camel-hipchat/src/main/java/org/apache/camel/component/hipchat/HipchatProducer.java
index 64057d7..d2140e8 100644
--- a/components/camel-hipchat/src/main/java/org/apache/camel/component/hipchat/HipchatProducer.java
+++ b/components/camel-hipchat/src/main/java/org/apache/camel/component/hipchat/HipchatProducer.java
@@ -31,6 +31,9 @@ import org.apache.http.client.methods.CloseableHttpResponse;
 import org.apache.http.client.methods.HttpPost;
 import org.apache.http.entity.ContentType;
 import org.apache.http.entity.StringEntity;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 
 import static org.apache.camel.util.UnsafeUriCharactersEncoder.encodeHttpURI;
 
@@ -39,6 +42,8 @@ import static org.apache.camel.util.UnsafeUriCharactersEncoder.encodeHttpURI;
  */
 public class HipchatProducer extends DefaultProducer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(HipchatProducer.class);
+
     private static final ObjectMapper MAPPER = new ObjectMapper();
     
     private transient String hipchatProducerToString;
@@ -67,18 +72,18 @@ public class HipchatProducer extends DefaultProducer {
         if (backGroundColor != null) {
             jsonParam.put(HipchatApiConstants.API_MESSAGE_COLOR, backGroundColor);
         }
-        log.info("Sending message to room: " + room + ", " + MAPPER.writeValueAsString(jsonParam));
+        LOG.info("Sending message to room: " + room + ", " + MAPPER.writeValueAsString(jsonParam));
         StatusLine statusLine = post(encodeHttpURI(urlPath), jsonParam);
-        log.debug("Response status for send room message: {}", statusLine);
+        LOG.debug("Response status for send room message: {}", statusLine);
         return statusLine;
     }
 
     private StatusLine sendUserMessage(String user, Exchange exchange) throws IOException, InvalidPayloadException {
         String urlPath = String.format(getConfig().withAuthToken(HipchatApiConstants.URI_PATH_USER_MESSAGE), user);
         Map<String, String> jsonParam = getCommonHttpPostParam(exchange);
-        log.info("Sending message to user: " + user + ", " + MAPPER.writeValueAsString(jsonParam));
+        LOG.info("Sending message to user: " + user + ", " + MAPPER.writeValueAsString(jsonParam));
         StatusLine statusLine = post(urlPath, jsonParam);
-        log.debug("Response status for send user message: {}", statusLine);
+        LOG.debug("Response status for send user message: {}", statusLine);
         return statusLine;
     }
 
diff --git a/components/camel-http/src/main/java/org/apache/camel/component/http/HttpComponent.java b/components/camel-http/src/main/java/org/apache/camel/component/http/HttpComponent.java
index 3e5f7e4..471d17d 100644
--- a/components/camel-http/src/main/java/org/apache/camel/component/http/HttpComponent.java
+++ b/components/camel-http/src/main/java/org/apache/camel/component/http/HttpComponent.java
@@ -66,6 +66,8 @@ import org.apache.http.impl.client.HttpClientBuilder;
 import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
 import org.apache.http.protocol.HttpContext;
 import org.apache.http.ssl.SSLContexts;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Defines the HTTP Component
@@ -74,6 +76,8 @@ import org.apache.http.ssl.SSLContexts;
 @Component("http,https")
 public class HttpComponent extends HttpCommonComponent implements RestProducerFactory, SSLContextParametersAware {
 
+    private static final Logger LOG = LoggerFactory.getLogger(HttpComponent.class);
+
     @Metadata(label = "advanced", description = "To use the custom HttpClientConfigurer to perform configuration of the HttpClient that will be used.")
     protected HttpClientConfigurer httpClientConfigurer;
     @Metadata(label = "advanced", description = "To use a custom and shared HttpClientConnectionManager to manage connections."
@@ -270,7 +274,7 @@ public class HttpComponent extends HttpCommonComponent implements RestProducerFa
         // create the endpoint and set the http uri to be null
         String endpointUriString = endpointUri.toString();
 
-        log.debug("Creating endpoint uri {}", endpointUriString);
+        LOG.debug("Creating endpoint uri {}", endpointUriString);
         final HttpClientConnectionManager localConnectionManager = createConnectionManager(parameters, sslContextParameters);
         HttpEndpoint endpoint = new HttpEndpoint(endpointUriString, this, clientBuilder, localConnectionManager, configurer);
 
@@ -393,7 +397,7 @@ public class HttpComponent extends HttpCommonComponent implements RestProducerFa
         if (localConnectionsPerRoute > 0) {
             answer.setDefaultMaxPerRoute(localConnectionsPerRoute);
         }
-        log.info("Created ClientConnectionManager {}", answer);
+        LOG.info("Created ClientConnectionManager {}", answer);
 
         return answer;
     }
@@ -652,7 +656,7 @@ public class HttpComponent extends HttpCommonComponent implements RestProducerFa
     public void doStop() throws Exception {
         // shutdown connection manager
         if (clientConnectionManager != null) {
-            log.info("Shutting down ClientConnectionManager: {}", clientConnectionManager);
+            LOG.info("Shutting down ClientConnectionManager: {}", clientConnectionManager);
             clientConnectionManager.shutdown();
             clientConnectionManager = null;
         }
diff --git a/components/camel-http/src/main/java/org/apache/camel/component/http/HttpEndpoint.java b/components/camel-http/src/main/java/org/apache/camel/component/http/HttpEndpoint.java
index 315dd54..86dc5a1 100644
--- a/components/camel-http/src/main/java/org/apache/camel/component/http/HttpEndpoint.java
+++ b/components/camel-http/src/main/java/org/apache/camel/component/http/HttpEndpoint.java
@@ -49,6 +49,8 @@ import org.apache.http.impl.client.HttpClientBuilder;
 import org.apache.http.pool.ConnPoolControl;
 import org.apache.http.pool.PoolStats;
 import org.apache.http.protocol.HttpContext;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * For calling out to external HTTP servers using Apache HTTP Client 4.x.
@@ -58,6 +60,8 @@ import org.apache.http.protocol.HttpContext;
 @ManagedResource(description = "Managed HttpEndpoint")
 public class HttpEndpoint extends HttpCommonEndpoint {
 
+    private static final Logger LOG = LoggerFactory.getLogger(HttpEndpoint.class);
+
     @UriParam(label = "security", description = "To configure security using SSLContextParameters."
         + " Important: Only one instance of org.apache.camel.util.jsse.SSLContextParameters is supported per HttpComponent."
         + " If you need to use 2 or more different instances, you need to define a new HttpComponent per instance you need.")
@@ -203,7 +207,7 @@ public class HttpEndpoint extends HttpCommonEndpoint {
                 if (scheme == null) {
                     scheme = HttpHelper.isSecureConnection(getEndpointUri()) ? "https" : "http";
                 }
-                log.debug("CamelContext properties http.proxyHost, http.proxyPort, and http.proxyScheme detected. Using http proxy host: {} port: {} scheme: {}", host, port, scheme);
+                LOG.debug("CamelContext properties http.proxyHost, http.proxyPort, and http.proxyScheme detected. Using http proxy host: {} port: {} scheme: {}", host, port, scheme);
                 HttpHost proxy = new HttpHost(host, port, scheme);
                 clientBuilder.setProxy(proxy);
             }
@@ -226,7 +230,7 @@ public class HttpEndpoint extends HttpCommonEndpoint {
             clientBuilder.setDefaultCookieStore(new NoopCookieStore());
         }
 
-        log.debug("Setup the HttpClientBuilder {}", clientBuilder);
+        LOG.debug("Setup the HttpClientBuilder {}", clientBuilder);
         return clientBuilder.build();
     }
 
diff --git a/components/camel-http/src/main/java/org/apache/camel/component/http/HttpProducer.java b/components/camel-http/src/main/java/org/apache/camel/component/http/HttpProducer.java
index 716be67..500a153 100644
--- a/components/camel-http/src/main/java/org/apache/camel/component/http/HttpProducer.java
+++ b/components/camel-http/src/main/java/org/apache/camel/component/http/HttpProducer.java
@@ -72,11 +72,16 @@ import org.apache.http.protocol.BasicHttpContext;
 import org.apache.http.protocol.HTTP;
 import org.apache.http.protocol.HttpContext;
 import org.apache.http.util.EntityUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 
 import static org.apache.http.HttpHeaders.HOST;
 
 public class HttpProducer extends DefaultProducer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(HttpProducer.class);
+
     private HttpClient httpClient;
     private HttpContext httpContext;
     private boolean throwException;
@@ -190,12 +195,12 @@ public class HttpProducer extends DefaultProducer {
         // lets store the result in the output message.
         HttpResponse httpResponse = null;
         try {
-            if (log.isDebugEnabled()) {
-                log.debug("Executing http {} method: {}", httpRequest.getMethod(), httpRequest.getURI());
+            if (LOG.isDebugEnabled()) {
+                LOG.debug("Executing http {} method: {}", httpRequest.getMethod(), httpRequest.getURI());
             }
             httpResponse = executeMethod(httpRequest);
             int responseCode = httpResponse.getStatusLine().getStatusCode();
-            log.debug("Http responseCode: {}", responseCode);
+            LOG.debug("Http responseCode: {}", responseCode);
 
             if (!throwException) {
                 // if we do not use failed exception then populate response for all response codes
@@ -427,7 +432,7 @@ public class HttpProducer extends DefaultProducer {
             }
             throw ex;
         } finally {
-            IOHelper.close(is, "Extracting response body", log);
+            IOHelper.close(is, "Extracting response body", LOG);
         }
     }
 
@@ -459,14 +464,14 @@ public class HttpProducer extends DefaultProducer {
             method = new HttpGetWithBodyMethod(url, requestEntity);
         }
 
-        log.trace("Using URL: {} with method: {}", url, method);
+        LOG.trace("Using URL: {} with method: {}", url, method);
 
         if (methodToUse.isEntityEnclosing()) {
             // only create entity for http payload if the HTTP method carries payload (such as POST)
             HttpEntity requestEntity = createRequestEntity(exchange);
             ((HttpEntityEnclosingRequestBase) method).setEntity(requestEntity);
             if (requestEntity != null && requestEntity.getContentType() == null) {
-                log.debug("No Content-Type provided for URL: {} with exchange: {}", url, exchange);
+                LOG.debug("No Content-Type provided for URL: {} with exchange: {}", url, exchange);
             }
         }
 
diff --git a/components/camel-hystrix/src/main/java/org/apache/camel/component/hystrix/metrics/HystrixEventStreamService.java b/components/camel-hystrix/src/main/java/org/apache/camel/component/hystrix/metrics/HystrixEventStreamService.java
index c1b4aa3..9956e20 100644
--- a/components/camel-hystrix/src/main/java/org/apache/camel/component/hystrix/metrics/HystrixEventStreamService.java
+++ b/components/camel-hystrix/src/main/java/org/apache/camel/component/hystrix/metrics/HystrixEventStreamService.java
@@ -27,6 +27,8 @@ import org.apache.camel.api.management.ManagedOperation;
 import org.apache.camel.api.management.ManagedResource;
 import org.apache.camel.component.hystrix.metrics.servlet.HystrixEventStreamServlet;
 import org.apache.camel.support.service.ServiceSupport;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * To gather hystrix metrics and offer the metrics over JMX and Java APIs.
@@ -37,6 +39,8 @@ import org.apache.camel.support.service.ServiceSupport;
 @ManagedResource(description = "Managed Hystrix EventStreamService")
 public class HystrixEventStreamService extends ServiceSupport implements StaticService, HystrixMetricsPoller.MetricsAsJsonPollerListener {
 
+    private static final Logger LOG = LoggerFactory.getLogger(HystrixEventStreamService.class);
+
     public static final int METRICS_QUEUE_SIZE = 1000;
 
     private int delay = 500;
@@ -109,7 +113,7 @@ public class HystrixEventStreamService extends ServiceSupport implements StaticS
 
     @Override
     protected void doStart() throws Exception {
-        log.info("Starting HystrixMetricsPoller with delay: {} and queue size: {}", delay, queueSize);
+        LOG.info("Starting HystrixMetricsPoller with delay: {} and queue size: {}", delay, queueSize);
         queue = new LinkedBlockingQueue<>(queueSize);
         poller = new HystrixMetricsPoller(this, delay);
         poller.start();
@@ -118,19 +122,19 @@ public class HystrixEventStreamService extends ServiceSupport implements StaticS
     @Override
     protected void doStop() throws Exception {
         if (poller != null) {
-            log.info("Shutting down HystrixMetricsPoller");
+            LOG.info("Shutting down HystrixMetricsPoller");
             poller.shutdown();
         }
     }
 
     @Override
     public void handleJsonMetric(String json) {
-        log.debug("handleJsonMetric: {}", json);
+        LOG.debug("handleJsonMetric: {}", json);
 
         // ensure there is space on the queue by polling until at least single slot is free
         int drain = queue.size() - queueSize + 1;
         if (drain > 0) {
-            log.debug("Draining queue to make room: {}", drain);
+            LOG.debug("Draining queue to make room: {}", drain);
             for (int i = 0; i < drain; i++) {
                 queue.poll();
             }
diff --git a/components/camel-iec60870/src/main/java/org/apache/camel/component/iec60870/AbstractIecComponent.java b/components/camel-iec60870/src/main/java/org/apache/camel/component/iec60870/AbstractIecComponent.java
index 51954dc..5ccb58b 100644
--- a/components/camel-iec60870/src/main/java/org/apache/camel/component/iec60870/AbstractIecComponent.java
+++ b/components/camel-iec60870/src/main/java/org/apache/camel/component/iec60870/AbstractIecComponent.java
@@ -25,11 +25,16 @@ import org.apache.camel.Endpoint;
 import org.apache.camel.component.iec60870.client.ClientOptions;
 import org.apache.camel.support.DefaultComponent;
 import org.eclipse.neoscada.protocol.iec60870.ProtocolOptions;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 
 import static java.util.Objects.requireNonNull;
 
 public abstract class AbstractIecComponent<T1, T2 extends BaseOptions<T2>> extends DefaultComponent {
 
+    private static final Logger LOG = LoggerFactory.getLogger(AbstractIecComponent.class);
+
     private final Map<ConnectionId, T1> connections = new HashMap<>();
 
     private final Class<T2> connectionOptionsClazz;
@@ -71,7 +76,7 @@ public abstract class AbstractIecComponent<T1, T2 extends BaseOptions<T2>> exten
     @Override
     protected Endpoint createEndpoint(final String uri, final String remaining, final Map<String, Object> parameters) throws Exception {
 
-        log.info("Create endpoint - uri: {}, remaining: {}, parameters: {}", uri, remaining, parameters);
+        LOG.info("Create endpoint - uri: {}, remaining: {}, parameters: {}", uri, remaining, parameters);
 
         final T1 connection = lookupConnection(uri, parameters);
         final ObjectAddress address = parseAddress(uri);
@@ -121,7 +126,7 @@ public abstract class AbstractIecComponent<T1, T2 extends BaseOptions<T2>> exten
 
     private T1 lookupConnection(final String fullUri, final Map<String, Object> parameters) throws Exception {
 
-        log.debug("parse connection - '{}'", fullUri);
+        LOG.debug("parse connection - '{}'", fullUri);
 
         if (fullUri == null || fullUri.isEmpty()) {
             throw new IllegalArgumentException("Invalid URI: " + fullUri);
@@ -129,18 +134,18 @@ public abstract class AbstractIecComponent<T1, T2 extends BaseOptions<T2>> exten
 
         final ConnectionId id = parseConnectionId(fullUri, parameters);
 
-        log.debug("parse connection - fullUri: {} -> {}", fullUri, id);
+        LOG.debug("parse connection - fullUri: {} -> {}", fullUri, id);
 
         synchronized (this) {
-            log.debug("Locating connection - {}", id);
+            LOG.debug("Locating connection - {}", id);
 
             T1 connection = this.connections.get(id);
 
-            log.debug("Result - {} -> {}", id, connection);
+            LOG.debug("Result - {} -> {}", id, connection);
 
             if (connection == null) {
                 final T2 options = parseOptions(id, parameters);
-                log.debug("Creating new connection: {}", options);
+                LOG.debug("Creating new connection: {}", options);
 
                 connection = createConnection(id, options);
                 this.connections.put(id, connection);
diff --git a/components/camel-iec60870/src/main/java/org/apache/camel/component/iec60870/client/ClientComponent.java b/components/camel-iec60870/src/main/java/org/apache/camel/component/iec60870/client/ClientComponent.java
index 66fd0e2..acf4c27 100644
--- a/components/camel-iec60870/src/main/java/org/apache/camel/component/iec60870/client/ClientComponent.java
+++ b/components/camel-iec60870/src/main/java/org/apache/camel/component/iec60870/client/ClientComponent.java
@@ -52,8 +52,6 @@ public class ClientComponent extends AbstractIecComponent<ClientConnectionMultip
 
     @Override
     protected ClientConnectionMultiplexor createConnection(final ConnectionId id, final ClientOptions options) {
-        log.debug("Create new connection - id: {}", id);
-
         return new ClientConnectionMultiplexor(new ClientConnection(id.getHost(), id.getPort(), options));
     }
 
diff --git a/components/camel-iec60870/src/main/java/org/apache/camel/component/iec60870/client/ClientConsumer.java b/components/camel-iec60870/src/main/java/org/apache/camel/component/iec60870/client/ClientConsumer.java
index 70fe201..43a80d2 100644
--- a/components/camel-iec60870/src/main/java/org/apache/camel/component/iec60870/client/ClientConsumer.java
+++ b/components/camel-iec60870/src/main/java/org/apache/camel/component/iec60870/client/ClientConsumer.java
@@ -25,9 +25,13 @@ import org.apache.camel.component.iec60870.ObjectAddress;
 import org.apache.camel.support.DefaultConsumer;
 import org.apache.camel.support.DefaultMessage;
 import org.eclipse.neoscada.protocol.iec60870.asdu.types.Value;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class ClientConsumer extends DefaultConsumer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(ClientConsumer.class);
+
     private final ClientConnection connection;
     private final ClientEndpoint endpoint;
 
@@ -54,9 +58,9 @@ public class ClientConsumer extends DefaultConsumer {
         try {
             final Exchange exchange = getEndpoint().createExchange();
             exchange.setIn(mapMessage(value));
-            getAsyncProcessor().process(exchange);
+            getProcessor().process(exchange);
         } catch (final Exception e) {
-            log.debug("Failed to process message", e);
+            LOG.debug("Failed to process message", e);
         }
     }
 
diff --git a/components/camel-iec60870/src/main/java/org/apache/camel/component/iec60870/server/ServerComponent.java b/components/camel-iec60870/src/main/java/org/apache/camel/component/iec60870/server/ServerComponent.java
index 64274c4..2e0b66a 100644
--- a/components/camel-iec60870/src/main/java/org/apache/camel/component/iec60870/server/ServerComponent.java
+++ b/components/camel-iec60870/src/main/java/org/apache/camel/component/iec60870/server/ServerComponent.java
@@ -48,8 +48,6 @@ public class ServerComponent extends AbstractIecComponent<ServerConnectionMultip
 
     @Override
     protected ServerConnectionMultiplexor createConnection(final ConnectionId id, final ServerOptions options) {
-        log.debug("Create new server - id: {}", id);
-
         try {
             return new ServerConnectionMultiplexor(new ServerInstance(id.getHost(), id.getPort(), options));
         } catch (final UnknownHostException e) {
diff --git a/components/camel-iec60870/src/main/java/org/apache/camel/component/iec60870/server/ServerConsumer.java b/components/camel-iec60870/src/main/java/org/apache/camel/component/iec60870/server/ServerConsumer.java
index be11ca3..2444361 100644
--- a/components/camel-iec60870/src/main/java/org/apache/camel/component/iec60870/server/ServerConsumer.java
+++ b/components/camel-iec60870/src/main/java/org/apache/camel/component/iec60870/server/ServerConsumer.java
@@ -26,9 +26,13 @@ import org.apache.camel.component.iec60870.ObjectAddress;
 import org.apache.camel.support.DefaultConsumer;
 import org.apache.camel.support.DefaultMessage;
 import org.eclipse.neoscada.protocol.iec60870.server.data.model.WriteModel.Request;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class ServerConsumer extends DefaultConsumer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(ServerConsumer.class);
+
     private final ServerInstance server;
     private final ServerEndpoint endpoint;
 
@@ -73,7 +77,7 @@ public class ServerConsumer extends DefaultConsumer {
 
             // we failed triggering the process
 
-            log.debug("Failed to process message", e);
+            LOG.debug("Failed to process message", e);
 
             // create a future
 
diff --git a/components/camel-ignite/src/main/java/org/apache/camel/component/ignite/cache/IgniteCacheContinuousQueryConsumer.java b/components/camel-ignite/src/main/java/org/apache/camel/component/ignite/cache/IgniteCacheContinuousQueryConsumer.java
index 4150e80..15c762e 100644
--- a/components/camel-ignite/src/main/java/org/apache/camel/component/ignite/cache/IgniteCacheContinuousQueryConsumer.java
+++ b/components/camel-ignite/src/main/java/org/apache/camel/component/ignite/cache/IgniteCacheContinuousQueryConsumer.java
@@ -31,12 +31,16 @@ import org.apache.camel.support.DefaultConsumer;
 import org.apache.ignite.IgniteCache;
 import org.apache.ignite.cache.query.ContinuousQuery;
 import org.apache.ignite.cache.query.QueryCursor;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A consumer that generates {@link Exchange}s for items received from a continuous query.
  */
 public class IgniteCacheContinuousQueryConsumer extends DefaultConsumer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(IgniteCacheContinuousQueryConsumer.class);
+
     private IgniteCacheEndpoint endpoint;
 
     private IgniteCache<Object, Object> cache;
@@ -55,18 +59,18 @@ public class IgniteCacheContinuousQueryConsumer extends DefaultConsumer {
 
         launchContinuousQuery();
 
-        log.info("Started Ignite Cache Continuous Query consumer for cache {} with query: {}.", cache.getName(), endpoint.getQuery());
+        LOG.info("Started Ignite Cache Continuous Query consumer for cache {} with query: {}.", cache.getName(), endpoint.getQuery());
 
         maybeFireExistingQueryResults();
     }
 
     private void maybeFireExistingQueryResults() {
         if (!endpoint.isFireExistingQueryResults()) {
-            log.info(String.format("Skipping existing cache results for cache name = %s.", endpoint.getCacheName()));
+            LOG.info(String.format("Skipping existing cache results for cache name = %s.", endpoint.getCacheName()));
             return;
         }
 
-        log.info(String.format("Processing existing cache results for cache name = %s.", endpoint.getCacheName()));
+        LOG.info(String.format("Processing existing cache results for cache name = %s.", endpoint.getCacheName()));
 
         for (Entry<Object, Object> entry : cursor) {
             Exchange exchange = createExchange(entry.getValue());
@@ -94,8 +98,8 @@ public class IgniteCacheContinuousQueryConsumer extends DefaultConsumer {
         continuousQuery.setLocalListener(new CacheEntryUpdatedListener<Object, Object>() {
             @Override
             public void onUpdated(Iterable<CacheEntryEvent<? extends Object, ? extends Object>> events) throws CacheEntryListenerException {
-                if (log.isTraceEnabled()) {
-                    log.info("Processing Continuous Query event(s): {}.", events);
+                if (LOG.isTraceEnabled()) {
+                    LOG.info("Processing Continuous Query event(s): {}.", events);
                 }
 
                 if (!endpoint.isOneExchangePerUpdate()) {
@@ -122,7 +126,7 @@ public class IgniteCacheContinuousQueryConsumer extends DefaultConsumer {
 
         cursor.close();
         
-        log.info("Stopped Ignite Cache Continuous Query consumer for cache {} with query: {}.", cache.getName(), endpoint.getQuery());
+        LOG.info("Stopped Ignite Cache Continuous Query consumer for cache {} with query: {}.", cache.getName(), endpoint.getQuery());
     }
 
     private void fireSingleExchange(CacheEntryEvent<? extends Object, ? extends Object> entry) {
diff --git a/components/camel-ignite/src/main/java/org/apache/camel/component/ignite/events/IgniteEventsConsumer.java b/components/camel-ignite/src/main/java/org/apache/camel/component/ignite/events/IgniteEventsConsumer.java
index d0177a8..82482ba 100644
--- a/components/camel-ignite/src/main/java/org/apache/camel/component/ignite/events/IgniteEventsConsumer.java
+++ b/components/camel-ignite/src/main/java/org/apache/camel/component/ignite/events/IgniteEventsConsumer.java
@@ -28,12 +28,16 @@ import org.apache.camel.support.DefaultConsumer;
 import org.apache.ignite.IgniteEvents;
 import org.apache.ignite.events.Event;
 import org.apache.ignite.lang.IgnitePredicate;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Ignite Events consumer.
  */
 public class IgniteEventsConsumer extends DefaultConsumer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(IgniteEventsConsumer.class);
+
     private IgniteEventsEndpoint endpoint;
     private IgniteEvents events;
     private int[] eventTypes = new int[0];
@@ -47,8 +51,8 @@ public class IgniteEventsConsumer extends DefaultConsumer {
             Message in = exchange.getIn();
             in.setBody(event);
             try {
-                if (log.isTraceEnabled()) {
-                    log.trace("Processing Ignite Event: {}.", event);
+                if (LOG.isTraceEnabled()) {
+                    LOG.trace("Processing Ignite Event: {}.", event);
                 }
                 getAsyncProcessor().process(exchange, new AsyncCallback() {
                     @Override
@@ -57,7 +61,7 @@ public class IgniteEventsConsumer extends DefaultConsumer {
                     }
                 });
             } catch (Exception e) {
-                log.error(String.format("Exception while processing Ignite Event: %s.", event), e);
+                LOG.error(String.format("Exception while processing Ignite Event: %s.", event), e);
             }
             return true;
         }
@@ -82,7 +86,7 @@ public class IgniteEventsConsumer extends DefaultConsumer {
 
         events.localListen(predicate, eventTypes);
         
-        log.info("Started local Ignite Events consumer for events: {}.", Arrays.asList(eventTypes));
+        LOG.info("Started local Ignite Events consumer for events: {}.", Arrays.asList(eventTypes));
     }
 
     @Override
@@ -91,7 +95,7 @@ public class IgniteEventsConsumer extends DefaultConsumer {
 
         events.stopLocalListen(predicate, eventTypes);
         
-        log.info("Stopped local Ignite Events consumer for events: {}.", Arrays.asList(eventTypes));
+        LOG.info("Stopped local Ignite Events consumer for events: {}.", Arrays.asList(eventTypes));
     }
 
 }
diff --git a/components/camel-ignite/src/main/java/org/apache/camel/component/ignite/events/IgniteEventsEndpoint.java b/components/camel-ignite/src/main/java/org/apache/camel/component/ignite/events/IgniteEventsEndpoint.java
index b173b09..586af7c 100644
--- a/components/camel-ignite/src/main/java/org/apache/camel/component/ignite/events/IgniteEventsEndpoint.java
+++ b/components/camel-ignite/src/main/java/org/apache/camel/component/ignite/events/IgniteEventsEndpoint.java
@@ -36,6 +36,8 @@ import org.apache.ignite.Ignite;
 import org.apache.ignite.IgniteEvents;
 import org.apache.ignite.cluster.ClusterGroup;
 import org.apache.ignite.events.EventType;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * The Ignite Events endpoint is one of camel-ignite endpoints which allows you to
@@ -48,6 +50,8 @@ import org.apache.ignite.events.EventType;
     consumerOnly = true)
 public class IgniteEventsEndpoint extends AbstractIgniteEndpoint {
 
+    private static final Logger LOG = LoggerFactory.getLogger(IgniteEventsEndpoint.class);
+
     @UriPath
     private String endpointId;
 
@@ -73,7 +77,7 @@ public class IgniteEventsEndpoint extends AbstractIgniteEndpoint {
         IgniteEventsConsumer consumer = new IgniteEventsConsumer(this, processor, events);
         configureConsumer(consumer);
 
-        log.info("Created Ignite Events consumer for event types: {}.", events);
+        LOG.info("Created Ignite Events consumer for event types: {}.", events);
 
         return consumer;
     }
@@ -82,11 +86,11 @@ public class IgniteEventsEndpoint extends AbstractIgniteEndpoint {
         Ignite ignite = ignite();
         IgniteEvents events;
         if (clusterGroupExpression == null) {
-            log.info("Ignite Events endpoint for event types {} using no Cluster Group.", this.events);
+            LOG.info("Ignite Events endpoint for event types {} using no Cluster Group.", this.events);
             events = ignite.events();
         } else {
             ClusterGroup group = clusterGroupExpression.getClusterGroup(ignite);
-            log.info("Ignite Events endpoint for event types {} using Cluster Group: {}.", this.events, group);
+            LOG.info("Ignite Events endpoint for event types {} using Cluster Group: {}.", this.events, group);
             events = ignite.events(group);
         }
         return events;
diff --git a/components/camel-ignite/src/main/java/org/apache/camel/component/ignite/idgen/IgniteIdGenEndpoint.java b/components/camel-ignite/src/main/java/org/apache/camel/component/ignite/idgen/IgniteIdGenEndpoint.java
index d05dafb..d3e5b98 100644
--- a/components/camel-ignite/src/main/java/org/apache/camel/component/ignite/idgen/IgniteIdGenEndpoint.java
+++ b/components/camel-ignite/src/main/java/org/apache/camel/component/ignite/idgen/IgniteIdGenEndpoint.java
@@ -28,6 +28,8 @@ import org.apache.camel.spi.UriParam;
 import org.apache.camel.spi.UriPath;
 import org.apache.camel.util.ObjectHelper;
 import org.apache.ignite.IgniteAtomicSequence;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * The Ignite ID Generator endpoint is one of camel-ignite endpoints which allows you to interact with
@@ -37,6 +39,8 @@ import org.apache.ignite.IgniteAtomicSequence;
 @UriEndpoint(firstVersion = "2.17.0", scheme = "ignite-idgen", title = "Ignite ID Generator", syntax = "ignite-idgen:name", label = "nosql,cache,compute", producerOnly = true)
 public class IgniteIdGenEndpoint extends AbstractIgniteEndpoint {
 
+    private static final Logger LOG = LoggerFactory.getLogger(IgniteIdGenEndpoint.class);
+
     @UriPath
     @Metadata(required = true)
     private String name;
@@ -63,7 +67,7 @@ public class IgniteIdGenEndpoint extends AbstractIgniteEndpoint {
 
         if (atomicSeq == null) {
             atomicSeq = ignite().atomicSequence(name, initialValue, true);
-            log.info("Created AtomicSequence of ID Generator with name {}.", name);
+            LOG.info("Created AtomicSequence of ID Generator with name {}.", name);
         }
 
         if (batchSize != null) {
diff --git a/components/camel-ignite/src/main/java/org/apache/camel/component/ignite/messaging/IgniteMessagingConsumer.java b/components/camel-ignite/src/main/java/org/apache/camel/component/ignite/messaging/IgniteMessagingConsumer.java
index 8065b0d..3fd767c 100644
--- a/components/camel-ignite/src/main/java/org/apache/camel/component/ignite/messaging/IgniteMessagingConsumer.java
+++ b/components/camel-ignite/src/main/java/org/apache/camel/component/ignite/messaging/IgniteMessagingConsumer.java
@@ -26,12 +26,16 @@ import org.apache.camel.component.ignite.IgniteConstants;
 import org.apache.camel.support.DefaultConsumer;
 import org.apache.ignite.IgniteMessaging;
 import org.apache.ignite.lang.IgniteBiPredicate;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Ignite Messaging consumer.
  */
 public class IgniteMessagingConsumer extends DefaultConsumer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(IgniteMessagingConsumer.class);
+
     private IgniteMessagingEndpoint endpoint;
     private IgniteMessaging messaging;
 
@@ -46,12 +50,12 @@ public class IgniteMessagingConsumer extends DefaultConsumer {
             in.setHeader(IgniteConstants.IGNITE_MESSAGING_TOPIC, endpoint.getTopic());
             in.setHeader(IgniteConstants.IGNITE_MESSAGING_UUID, uuid);
             try {
-                if (log.isTraceEnabled()) {
-                    log.trace("Processing Ignite message for subscription {} with payload {}.", uuid, payload);
+                if (LOG.isTraceEnabled()) {
+                    LOG.trace("Processing Ignite message for subscription {} with payload {}.", uuid, payload);
                 }
                 getProcessor().process(exchange);
             } catch (Exception e) {
-                log.error(String.format("Exception while processing Ignite Message from topic %s", endpoint.getTopic()), e);
+                LOG.error(String.format("Exception while processing Ignite Message from topic %s", endpoint.getTopic()), e);
             }
             return true;
         }
@@ -69,7 +73,7 @@ public class IgniteMessagingConsumer extends DefaultConsumer {
 
         messaging.localListen(endpoint.getTopic(), predicate);
         
-        log.info("Started Ignite Messaging consumer for topic {}.", endpoint.getTopic());
+        LOG.info("Started Ignite Messaging consumer for topic {}.", endpoint.getTopic());
     }
 
     @Override
@@ -78,7 +82,7 @@ public class IgniteMessagingConsumer extends DefaultConsumer {
 
         messaging.stopLocalListen(endpoint.getTopic(), predicate);
         
-        log.info("Stopped Ignite Messaging consumer for topic {}.", endpoint.getTopic());
+        LOG.info("Stopped Ignite Messaging consumer for topic {}.", endpoint.getTopic());
     }
 
 }
diff --git a/components/camel-infinispan/src/main/java/org/apache/camel/component/infinispan/InfinispanComponent.java b/components/camel-infinispan/src/main/java/org/apache/camel/component/infinispan/InfinispanComponent.java
index 2d404a0..91ada25 100644
--- a/components/camel-infinispan/src/main/java/org/apache/camel/component/infinispan/InfinispanComponent.java
+++ b/components/camel-infinispan/src/main/java/org/apache/camel/component/infinispan/InfinispanComponent.java
@@ -27,9 +27,14 @@ import org.apache.camel.support.DefaultComponent;
 import org.infinispan.commons.api.BasicCacheContainer;
 import org.infinispan.configuration.global.GlobalConfigurationBuilder;
 import org.infinispan.manager.DefaultCacheManager;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 @Component("infinispan")
 public class InfinispanComponent extends DefaultComponent {
+
+    private static final Logger LOG = LoggerFactory.getLogger(InfinispanComponent.class);
+
     @Metadata(description = "Default configuration")
     private InfinispanConfiguration configuration;
     @Metadata(description = "Default Cache container")
@@ -81,7 +86,7 @@ public class InfinispanComponent extends DefaultComponent {
                     new org.infinispan.configuration.cache.ConfigurationBuilder().build());
 
                 setCacheFromComponent = false;
-                log.debug("Default cacheContainer has been created");
+                LOG.debug("Default cacheContainer has been created");
             }
             conf.setCacheContainer(cacheContainer);
 
diff --git a/components/camel-infinispan/src/main/java/org/apache/camel/component/infinispan/InfinispanConsumer.java b/components/camel-infinispan/src/main/java/org/apache/camel/component/infinispan/InfinispanConsumer.java
index 5dc7066..c6d2588 100644
--- a/components/camel-infinispan/src/main/java/org/apache/camel/component/infinispan/InfinispanConsumer.java
+++ b/components/camel-infinispan/src/main/java/org/apache/camel/component/infinispan/InfinispanConsumer.java
@@ -32,7 +32,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 public class InfinispanConsumer extends DefaultConsumer {
-    private static final transient Logger LOGGER = LoggerFactory.getLogger(InfinispanProducer.class);
+    private static final transient Logger LOG = LoggerFactory.getLogger(InfinispanProducer.class);
     private final InfinispanConfiguration configuration;
     private final InfinispanManager manager;
     private final String cacheName;
@@ -65,7 +65,7 @@ public class InfinispanConsumer extends DefaultConsumer {
         try {
             getProcessor().process(exchange);
         } catch (Exception e) {
-            LOGGER.error("Error processing event ", e);
+            LOG.error("Error processing event ", e);
         }
     }
 
diff --git a/components/camel-influxdb/src/main/java/org/apache/camel/component/influxdb/InfluxDbEndpoint.java b/components/camel-influxdb/src/main/java/org/apache/camel/component/influxdb/InfluxDbEndpoint.java
index d1f5c7c..cb69a11 100644
--- a/components/camel-influxdb/src/main/java/org/apache/camel/component/influxdb/InfluxDbEndpoint.java
+++ b/components/camel-influxdb/src/main/java/org/apache/camel/component/influxdb/InfluxDbEndpoint.java
@@ -26,6 +26,8 @@ import org.apache.camel.spi.UriPath;
 import org.apache.camel.support.CamelContextHelper;
 import org.apache.camel.support.DefaultEndpoint;
 import org.influxdb.InfluxDB;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * The influxdb component allows you to interact with <a href="https://influxdata.com/time-series-platform/influxdb/">InfluxDB</a>, a time series database.
@@ -33,6 +35,8 @@ import org.influxdb.InfluxDB;
 @UriEndpoint(firstVersion = "2.18.0", scheme = "influxdb", title = "InfluxDB", syntax = "influxdb:connectionBean", label = "database", producerOnly = true)
 public class InfluxDbEndpoint extends DefaultEndpoint {
 
+    private static final Logger LOG = LoggerFactory.getLogger(InfluxDbEndpoint.class);
+
     private InfluxDB influxDB;
 
     @UriPath
@@ -66,7 +70,7 @@ public class InfluxDbEndpoint extends DefaultEndpoint {
     @Override
     protected void doStart() throws Exception {
         influxDB = CamelContextHelper.mandatoryLookup(getCamelContext(), connectionBean, InfluxDB.class);
-        log.debug("Resolved the connection with the name {} as {}", connectionBean, influxDB);
+        LOG.debug("Resolved the connection with the name {} as {}", connectionBean, influxDB);
         super.doStart();  
     }
     
diff --git a/components/camel-influxdb/src/main/java/org/apache/camel/component/influxdb/InfluxDbProducer.java b/components/camel-influxdb/src/main/java/org/apache/camel/component/influxdb/InfluxDbProducer.java
index 29ab60e..e2ab8e6 100644
--- a/components/camel-influxdb/src/main/java/org/apache/camel/component/influxdb/InfluxDbProducer.java
+++ b/components/camel-influxdb/src/main/java/org/apache/camel/component/influxdb/InfluxDbProducer.java
@@ -27,6 +27,8 @@ import org.influxdb.dto.Point;
 import org.influxdb.dto.Pong;
 import org.influxdb.dto.Query;
 import org.influxdb.dto.QueryResult;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Producer for the InfluxDB components
@@ -34,6 +36,8 @@ import org.influxdb.dto.QueryResult;
  */
 public class InfluxDbProducer extends DefaultProducer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(InfluxDbProducer.class);
+
     InfluxDbEndpoint endpoint;
     InfluxDB connection;
 
@@ -79,10 +83,10 @@ public class InfluxDbProducer extends DefaultProducer {
             Point p = exchange.getIn().getMandatoryBody(Point.class);
 
             try {
-                log.debug("Writing point {}", p.lineProtocol());
+                LOG.debug("Writing point {}", p.lineProtocol());
                 
                 if (!connection.databaseExists(dataBaseName)) {
-                    log.debug("Database {} doesn't exist. Creating it...", dataBaseName);
+                    LOG.debug("Database {} doesn't exist. Creating it...", dataBaseName);
                     connection.createDatabase(dataBaseName);
                 }
                 connection.write(dataBaseName, retentionPolicy, p);
@@ -93,7 +97,7 @@ public class InfluxDbProducer extends DefaultProducer {
             BatchPoints batchPoints = exchange.getIn().getMandatoryBody(BatchPoints.class);
 
             try {
-                log.debug("Writing BatchPoints {}", batchPoints.lineProtocol());
+                LOG.debug("Writing BatchPoints {}", batchPoints.lineProtocol());
 
                 connection.write(batchPoints);
             } catch (Exception ex) {
diff --git a/components/camel-irc/src/main/java/org/apache/camel/component/irc/IrcComponent.java b/components/camel-irc/src/main/java/org/apache/camel/component/irc/IrcComponent.java
index 47e2be9..9376297 100644
--- a/components/camel-irc/src/main/java/org/apache/camel/component/irc/IrcComponent.java
+++ b/components/camel-irc/src/main/java/org/apache/camel/component/irc/IrcComponent.java
@@ -28,6 +28,8 @@ import org.apache.camel.support.jsse.SSLContextParameters;
 import org.schwering.irc.lib.IRCConnection;
 import org.schwering.irc.lib.IRCEventListener;
 import org.schwering.irc.lib.ssl.SSLIRCConnection;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Defines the <a href="http://camel.apache.org/irc.html">IRC Component</a>
@@ -35,6 +37,8 @@ import org.schwering.irc.lib.ssl.SSLIRCConnection;
 @Component("irc,ircs")
 public class IrcComponent extends DefaultComponent implements SSLContextParametersAware {
 
+    private static final Logger LOG = LoggerFactory.getLogger(IrcComponent.class);
+
     private final transient Map<String, IRCConnection> connectionCache = new HashMap<>();
 
     @Metadata(label = "security", defaultValue = "false")
@@ -57,8 +61,8 @@ public class IrcComponent extends DefaultComponent implements SSLContextParamete
     public synchronized IRCConnection getIRCConnection(IrcConfiguration configuration) {
         final IRCConnection connection;
         if (connectionCache.containsKey(configuration.getCacheKey())) {
-            if (log.isDebugEnabled()) {
-                log.debug("Returning Cached Connection to {}:{}", configuration.getHostname(), configuration.getNickname());
+            if (LOG.isDebugEnabled()) {
+                LOG.debug("Returning Cached Connection to {}:{}", configuration.getHostname(), configuration.getNickname());
             }
             connection = connectionCache.get(configuration.getCacheKey());
         } else {
@@ -74,8 +78,8 @@ public class IrcComponent extends DefaultComponent implements SSLContextParamete
 
         if (configuration.getUsingSSL()) {
 
-            if (log.isDebugEnabled()) {
-                log.debug("Creating SSL Connection to {} destination(s): {} nick: {} user: {}",
+            if (LOG.isDebugEnabled()) {
+                LOG.debug("Creating SSL Connection to {} destination(s): {} nick: {} user: {}",
                     new Object[]{configuration.getHostname(), configuration.getSpaceSeparatedChannelNames(), configuration.getNickname(), configuration.getUsername()});
             }
 
@@ -96,8 +100,8 @@ public class IrcComponent extends DefaultComponent implements SSLContextParamete
                 conn = sconn;
             }
         } else {
-            if (log.isDebugEnabled()) {
-                log.debug("Creating Connection to {} destination(s): {} nick: {} user: {}",
+            if (LOG.isDebugEnabled()) {
+                LOG.debug("Creating Connection to {} destination(s): {} nick: {} user: {}",
                         new Object[]{configuration.getHostname(), configuration.getSpaceSeparatedChannelNames(), configuration.getNickname(), configuration.getUsername()});
             }
 
@@ -108,8 +112,8 @@ public class IrcComponent extends DefaultComponent implements SSLContextParamete
         conn.setColors(configuration.isColors());
         conn.setPong(true);
 
-        if (log.isDebugEnabled()) {
-            log.debug("Adding IRC event logging listener");
+        if (LOG.isDebugEnabled()) {
+            LOG.debug("Adding IRC event logging listener");
             ircLogger = createIrcLogger(configuration.getHostname());
             conn.addIRCEventListener(ircLogger);
         }
@@ -127,7 +131,7 @@ public class IrcComponent extends DefaultComponent implements SSLContextParamete
             connection.doQuit();
             connection.close();
         } catch (Exception e) {
-            log.warn("Error during closing connection.", e);
+            LOG.warn("Error during closing connection.", e);
         }
     }
 
@@ -143,7 +147,7 @@ public class IrcComponent extends DefaultComponent implements SSLContextParamete
     }
 
     protected IRCEventListener createIrcLogger(String hostname) {
-        return new IrcLogger(log, hostname);
+        return new IrcLogger(LOG, hostname);
     }
 
     @Override
diff --git a/components/camel-irc/src/main/java/org/apache/camel/component/irc/IrcConsumer.java b/components/camel-irc/src/main/java/org/apache/camel/component/irc/IrcConsumer.java
index 4292219..596da1c 100644
--- a/components/camel-irc/src/main/java/org/apache/camel/component/irc/IrcConsumer.java
+++ b/components/camel-irc/src/main/java/org/apache/camel/component/irc/IrcConsumer.java
@@ -24,9 +24,13 @@ import org.schwering.irc.lib.IRCConnection;
 import org.schwering.irc.lib.IRCEventAdapter;
 import org.schwering.irc.lib.IRCModeParser;
 import org.schwering.irc.lib.IRCUser;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class IrcConsumer extends DefaultConsumer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(IrcConsumer.class);
+
     private final IrcConfiguration configuration;
     private final IrcEndpoint endpoint;
     private final IRCConnection connection;
@@ -43,7 +47,7 @@ public class IrcConsumer extends DefaultConsumer {
     protected void doStop() throws Exception {
         if (connection != null) {
             for (IrcChannel channel : endpoint.getConfiguration().getChannelList()) {
-                log.debug("Parting: {}", channel);
+                LOG.debug("Parting: {}", channel);
                 connection.doPart(channel.getName());
             }
             connection.removeIRCEventListener(listener);
@@ -57,7 +61,7 @@ public class IrcConsumer extends DefaultConsumer {
         listener = getListener();
         connection.addIRCEventListener(listener);
 
-        log.debug("Sleeping for {} seconds before sending commands.", configuration.getCommandTimeout() / 1000);
+        LOG.debug("Sleeping for {} seconds before sending commands.", configuration.getCommandTimeout() / 1000);
         // sleep for a few seconds as the server sometimes takes a moment to fully connect, print banners, etc after connection established
         try {
             Thread.sleep(configuration.getCommandTimeout());
@@ -65,7 +69,7 @@ public class IrcConsumer extends DefaultConsumer {
             // ignore
         }
         if (ObjectHelper.isNotEmpty(configuration.getNickPassword())) {
-            log.debug("Identifying and enforcing nick with NickServ.");
+            LOG.debug("Identifying and enforcing nick with NickServ.");
             // Identify nick and enforce, https://meta.wikimedia.org/wiki/IRC/Instructions#Register_your_nickname.2C_identify.2C_and_enforce
             connection.doPrivmsg("nickserv", "identify " + configuration.getNickPassword());
             connection.doPrivmsg("nickserv", "set enforce on");
diff --git a/components/camel-irc/src/main/java/org/apache/camel/component/irc/IrcEndpoint.java b/components/camel-irc/src/main/java/org/apache/camel/component/irc/IrcEndpoint.java
index 3b2c860f..302e6c4 100644
--- a/components/camel-irc/src/main/java/org/apache/camel/component/irc/IrcEndpoint.java
+++ b/components/camel-irc/src/main/java/org/apache/camel/component/irc/IrcEndpoint.java
@@ -28,6 +28,8 @@ import org.schwering.irc.lib.IRCConnection;
 import org.schwering.irc.lib.IRCConstants;
 import org.schwering.irc.lib.IRCModeParser;
 import org.schwering.irc.lib.IRCUser;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * The irc component implements an <a href="https://en.wikipedia.org/wiki/Internet_Relay_Chat">IRC</a> (Internet Relay Chat) transport.
@@ -41,6 +43,8 @@ import org.schwering.irc.lib.IRCUser;
     label = "chat")
 public class IrcEndpoint extends DefaultEndpoint {
 
+    private static final Logger LOG = LoggerFactory.getLogger(IrcEndpoint.class);
+
     @UriParam
     private IrcConfiguration configuration;
     private IrcBinding binding;
@@ -174,9 +178,9 @@ public class IrcEndpoint extends DefaultEndpoint {
 
         // hackish but working approach to prevent an endless loop. Abort after 4 nick attempts.
         if (nick.endsWith("----")) {
-            log.error("Unable to set nick: {} disconnecting", nick);
+            LOG.error("Unable to set nick: {} disconnecting", nick);
         } else {
-            log.warn("Unable to set nick: " + nick + " Retrying with " + nick + "-");
+            LOG.warn("Unable to set nick: " + nick + " Retrying with " + nick + "-");
             connection.doNick(nick);
             // if the nick failure was doing startup channels weren't joined. So join
             // the channels now. It's a no-op if the channels are already joined.
@@ -205,13 +209,13 @@ public class IrcEndpoint extends DefaultEndpoint {
         String key = channel.getKey();
 
         if (ObjectHelper.isNotEmpty(key)) {
-            if (log.isDebugEnabled()) {
-                log.debug("Joining: {} using {} with secret key", channel, connection.getClass().getName());
+            if (LOG.isDebugEnabled()) {
+                LOG.debug("Joining: {} using {} with secret key", channel, connection.getClass().getName());
             }
             connection.doJoin(chn, key);
         } else {
-            if (log.isDebugEnabled()) {
-                log.debug("Joining: {} using {}", channel, connection.getClass().getName());
+            if (LOG.isDebugEnabled()) {
+                LOG.debug("Joining: {} using {}", channel, connection.getClass().getName());
             }
             connection.doJoin(chn);
         }
diff --git a/components/camel-irc/src/main/java/org/apache/camel/component/irc/IrcProducer.java b/components/camel-irc/src/main/java/org/apache/camel/component/irc/IrcProducer.java
index fd2f1bb..c0858e4 100644
--- a/components/camel-irc/src/main/java/org/apache/camel/component/irc/IrcProducer.java
+++ b/components/camel-irc/src/main/java/org/apache/camel/component/irc/IrcProducer.java
@@ -22,12 +22,16 @@ import org.apache.camel.support.DefaultProducer;
 import org.schwering.irc.lib.IRCConnection;
 import org.schwering.irc.lib.IRCEventAdapter;
 import org.schwering.irc.lib.IRCUser;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class IrcProducer extends DefaultProducer {
 
     public static final String[] COMMANDS = new String[] {"AWAY", "INVITE", "ISON", "JOIN", "KICK", "LIST", "NAMES",
         "PRIVMSG", "MODE", "NICK", "NOTICE", "PART", "PONG", "QUIT", "TOPIC", "WHO", "WHOIS", "WHOWAS", "USERHOST"};
 
+    private static final Logger LOG = LoggerFactory.getLogger(IrcProducer.class);
+
     private final IrcConfiguration configuration;
     private IRCConnection connection;
     private IrcEndpoint endpoint;
@@ -51,14 +55,14 @@ public class IrcProducer extends DefaultProducer {
 
         if (msg != null) {
             if (isMessageACommand(msg)) {
-                log.debug("Sending command: {}", msg);
+                LOG.debug("Sending command: {}", msg);
                 connection.send(msg);
             } else if (sendTo != null) {
-                log.debug("Sending to: {} message: {}", sendTo, msg);
+                LOG.debug("Sending to: {} message: {}", sendTo, msg);
                 connection.doPrivmsg(sendTo, msg);
             } else {
                 for (IrcChannel channel : endpoint.getConfiguration().getChannelList()) {
-                    log.debug("Sending to: {} message: {}", channel, msg);
+                    LOG.debug("Sending to: {} message: {}", channel, msg);
                     connection.doPrivmsg(channel.getName(), msg);
                 }
             }
@@ -70,7 +74,7 @@ public class IrcProducer extends DefaultProducer {
         super.doStart();
         listener = getListener();
         connection.addIRCEventListener(listener);
-        log.debug("Sleeping for {} seconds before sending commands.", configuration.getCommandTimeout() / 1000);
+        LOG.debug("Sleeping for {} seconds before sending commands.", configuration.getCommandTimeout() / 1000);
         // sleep for a few seconds as the server sometimes takes a moment to fully connect, print banners, etc after connection established
         try {
             Thread.sleep(configuration.getCommandTimeout());
@@ -84,7 +88,7 @@ public class IrcProducer extends DefaultProducer {
     protected void doStop() throws Exception {
         if (connection != null) {
             for (IrcChannel channel : endpoint.getConfiguration().getChannelList()) {
-                log.debug("Parting: {}", channel);
+                LOG.debug("Parting: {}", channel);
                 connection.doPart(channel.getName());
             }
             connection.removeIRCEventListener(listener);
diff --git a/components/camel-ironmq/src/main/java/org/apache/camel/component/ironmq/IronMQConsumer.java b/components/camel-ironmq/src/main/java/org/apache/camel/component/ironmq/IronMQConsumer.java
index 414edaf..89350bb 100644
--- a/components/camel-ironmq/src/main/java/org/apache/camel/component/ironmq/IronMQConsumer.java
+++ b/components/camel-ironmq/src/main/java/org/apache/camel/component/ironmq/IronMQConsumer.java
@@ -31,12 +31,16 @@ import org.apache.camel.support.ExchangeHelper;
 import org.apache.camel.support.ScheduledBatchPollingConsumer;
 import org.apache.camel.util.CastUtils;
 import org.apache.camel.util.ObjectHelper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * The IronMQ consumer.
  */
 public class IronMQConsumer extends ScheduledBatchPollingConsumer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(IronMQConsumer.class);
+
     private final io.iron.ironmq.Queue ironQueue;
     
     public IronMQConsumer(Endpoint endpoint, Processor processor, io.iron.ironmq.Queue ironQueue) {
@@ -51,15 +55,15 @@ public class IronMQConsumer extends ScheduledBatchPollingConsumer {
         pendingExchanges = 0;
         try {
             Messages messages = null;
-            log.trace("Receiving messages with request [messagePerPoll {}, timeout {}]...", getMaxMessagesPerPoll(), getEndpoint().getConfiguration().getTimeout());
+            LOG.trace("Receiving messages with request [messagePerPoll {}, timeout {}]...", getMaxMessagesPerPoll(), getEndpoint().getConfiguration().getTimeout());
             messages = this.ironQueue.reserve(getMaxMessagesPerPoll(), getEndpoint().getConfiguration().getTimeout(), getEndpoint().getConfiguration().getWait());
-            log.trace("Received {} messages", messages.getSize());
+            LOG.trace("Received {} messages", messages.getSize());
 
             Queue<Exchange> exchanges = createExchanges(messages.getMessages());
             int noProcessed = processBatch(CastUtils.cast(exchanges));
             // delete all processed messages in one batch;
             if (getEndpoint().getConfiguration().isBatchDelete()) {
-                log.trace("Batch deleting {} messages", messages.getSize());
+                LOG.trace("Batch deleting {} messages", messages.getSize());
                 this.ironQueue.deleteMessages(messages);
             }
             return noProcessed;
@@ -69,7 +73,7 @@ public class IronMQConsumer extends ScheduledBatchPollingConsumer {
     }
 
     protected Queue<Exchange> createExchanges(Message[] messages) {
-        log.trace("Received {} messages in this poll", messages.length);
+        LOG.trace("Received {} messages in this poll", messages.length);
 
         Queue<Exchange> answer = new LinkedList<>();
         for (Message message : messages) {
@@ -116,7 +120,7 @@ public class IronMQConsumer extends ScheduledBatchPollingConsumer {
                 });
             }
 
-            log.trace("Processing exchange [{}]...", exchange);
+            LOG.trace("Processing exchange [{}]...", exchange);
 
             getProcessor().process(exchange);
         }
@@ -131,9 +135,9 @@ public class IronMQConsumer extends ScheduledBatchPollingConsumer {
      */
     protected void processCommit(Exchange exchange, String messageid, String reservationId) {
         try {
-            log.trace("Deleting message with messageId {} and reservationId {}...", messageid, reservationId);
+            LOG.trace("Deleting message with messageId {} and reservationId {}...", messageid, reservationId);
             this.ironQueue.deleteMessage(messageid, reservationId);
-            log.trace("Message deleted");
+            LOG.trace("Message deleted");
         } catch (Exception e) {
             getExceptionHandler().handleException("Error occurred during delete of message. This exception is ignored.", exchange, e);
         }
@@ -147,9 +151,9 @@ public class IronMQConsumer extends ScheduledBatchPollingConsumer {
     protected void processRollback(Exchange exchange) {
         Exception cause = exchange.getException();
         if (cause != null) {
-            log.warn("Exchange failed, so rolling back message status: {}", exchange, cause);
+            LOG.warn("Exchange failed, so rolling back message status: {}", exchange, cause);
         } else {
-            log.warn("Exchange failed, so rolling back message status: {}", exchange);
+            LOG.warn("Exchange failed, so rolling back message status: {}", exchange);
         }
     }
 
diff --git a/components/camel-ironmq/src/main/java/org/apache/camel/component/ironmq/IronMQEndpoint.java b/components/camel-ironmq/src/main/java/org/apache/camel/component/ironmq/IronMQEndpoint.java
index b413251..56fdd2f 100644
--- a/components/camel-ironmq/src/main/java/org/apache/camel/component/ironmq/IronMQEndpoint.java
+++ b/components/camel-ironmq/src/main/java/org/apache/camel/component/ironmq/IronMQEndpoint.java
@@ -30,6 +30,8 @@ import org.apache.camel.spi.UriEndpoint;
 import org.apache.camel.spi.UriParam;
 import org.apache.camel.support.DefaultScheduledPollConsumerScheduler;
 import org.apache.camel.support.ScheduledPollEndpoint;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * The ironmq provides integration with <a href="https://www.iron.io/">IronMQ</a> an elastic and durable hosted message queue as a service.
@@ -37,6 +39,8 @@ import org.apache.camel.support.ScheduledPollEndpoint;
 @UriEndpoint(firstVersion = "2.17.0", scheme = "ironmq", syntax = "ironmq:queueName", title = "IronMQ", label = "cloud,messaging")
 public class IronMQEndpoint extends ScheduledPollEndpoint {
 
+    private static final Logger LOG = LoggerFactory.getLogger(IronMQEndpoint.class);
+
     @UriParam
     private IronMQConfiguration configuration;
 
@@ -117,7 +121,7 @@ public class IronMQEndpoint extends ScheduledPollEndpoint {
             cloud = new Cloud(configuration.getIronMQCloud());
         } catch (MalformedURLException e) {
             cloud = Cloud.ironAWSUSEast;
-            log.warn("Unable to parse ironMQCloud {} will use {}", configuration.getIronMQCloud(), cloud.getHost());
+            LOG.warn("Unable to parse ironMQCloud {} will use {}", configuration.getIronMQCloud(), cloud.getHost());
         }
         client = new Client(configuration.getProjectId(), configuration.getToken(), cloud);
         return client;
diff --git a/components/camel-ironmq/src/main/java/org/apache/camel/component/ironmq/IronMQProducer.java b/components/camel-ironmq/src/main/java/org/apache/camel/component/ironmq/IronMQProducer.java
index 61e61e1..d1d9d90 100644
--- a/components/camel-ironmq/src/main/java/org/apache/camel/component/ironmq/IronMQProducer.java
+++ b/components/camel-ironmq/src/main/java/org/apache/camel/component/ironmq/IronMQProducer.java
@@ -21,12 +21,16 @@ import org.apache.camel.Exchange;
 import org.apache.camel.InvalidPayloadException;
 import org.apache.camel.Message;
 import org.apache.camel.support.DefaultProducer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * The IronMQ producer.
  */
 public class IronMQProducer extends DefaultProducer {
 
+    private static final Logger LOG = LoggerFactory.getLogger(IronMQProducer.class);
+
     private final Queue ironQueue;
     
     public IronMQProducer(IronMQEndpoint endpoint, Queue ironQueue) {
@@ -52,8 +56,8 @@ public class IronMQProducer extends DefaultProducer {
             } else {
                 throw new InvalidPayloadException(exchange, String.class);
             }
-            log.trace("Send request [{}] from exchange [{}]...", body, exchange);
-            log.trace("Received messageId [{}]", messageId);
+            LOG.trace("Send request [{}] from exchange [{}]...", body, exchange);
+            LOG.trace("Received messageId [{}]", messageId);
             Message message = getMessageForResponse(exchange);
             message.setHeader(IronMQConstants.MESSAGE_ID, messageId);
         }
diff --git a/components/camel-jackson/src/main/java/org/apache/camel/component/jackson/JacksonDataFormat.java b/components/camel-jackson/src/main/java/org/apache/camel/component/jackson/JacksonDataFormat.java
index 2c50391..505ef69 100644
--- a/components/camel-jackson/src/main/java/org/apache/camel/component/jackson/JacksonDataFormat.java
+++ b/components/camel-jackson/src/main/java/org/apache/camel/component/jackson/JacksonDataFormat.java
@@ -44,6 +44,8 @@ import org.apache.camel.spi.annotations.Dataformat;
 import org.apache.camel.support.CamelContextHelper;
 import org.apache.camel.support.ObjectHelper;
 import org.apache.camel.support.service.ServiceSupport;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A <a href="http://camel.apache.org/data-format.html">data format</a>
@@ -53,6 +55,8 @@ import org.apache.camel.support.service.ServiceSupport;
 @Dataformat("json-jackson")
 public class JacksonDataFormat extends ServiceSupport implements DataFormat, DataFormatName, CamelContextAware {
 
+    private static final Logger LOG = LoggerFactory.getLogger(JacksonDataFormat.class);
+
     private CamelContext camelContext;
     private ObjectMapper objectMapper;
     private boolean useDefaultObjectMapper = true;
@@ -498,18 +502,18 @@ public class JacksonDataFormat extends ServiceSupport implements DataFormat, Dat
                     Set<ObjectMapper> set = camelContext.getRegistry().findByType(ObjectMapper.class);
                     if (set.size() == 1) {
                         objectMapper = set.iterator().next();
-                        log.info("Found single ObjectMapper in Registry to use: {}", objectMapper);
+                        LOG.info("Found single ObjectMapper in Registry to use: {}", objectMapper);
                         objectMapperFoundRegistry = true;
                     } else if (set.size() > 1) {
-                        log.debug("Found {} ObjectMapper in Registry cannot use as default as there are more than one instance.", set.size());
+                        LOG.debug("Found {} ObjectMapper in Registry cannot use as default as there are more than one instance.", set.size());
                     }
                 } else {
-                    log.warn("The option autoDiscoverObjectMapper is set to false, Camel won't search in the registry");
+                    LOG.warn("The option autoDiscoverObjectMapper is set to false, Camel won't search in the registry");
                 }
             }
             if (objectMapper == null) {
                 objectMapper = new ObjectMapper();
-                log.debug("Creating new ObjectMapper to use: {}", objectMapper);
+                LOG.debug("Creating new ObjectMapper to use: {}", objectMapper);
             }
         }
 
@@ -517,7 +521,7 @@ public class JacksonDataFormat extends ServiceSupport implements DataFormat, Dat
             if (enableJaxbAnnotationModule) {
                 // Enables JAXB processing
                 JaxbAnnotationModule module = new JaxbAnnotationModule();
-                log.debug("Registering JaxbAnnotationModule: {}", module);
+                LOG.debug("Registering JaxbAnnotationModule: {}", module);
                 objectMapper.registerModule(module);
             }
 
@@ -583,7 +587,7 @@ public class JacksonDataFormat extends ServiceSupport implements DataFormat, Dat
 
             if (modules != null) {
                 for (Module module : modules) {
-                    log.debug("Registering module: {}", module);
+                    LOG.debug("Registering module: {}", module);
                     objectMapper.registerModules(module);
                 }
             }
@@ -593,7 +597,7 @@ public class JacksonDataFormat extends ServiceSupport implements DataFormat, Dat
                     String name = o.toString();
                     Class<Module> clazz = camelContext.getClassResolver().resolveMandatoryClass(name, Module.class);
                     Module module = camelContext.getInjector().newInstance(clazz);
-                    log.debug("Registering module: {} -> {}", name, module);
+                    LOG.debug("Registering module: {} -> {}", name, module);
                     objectMapper.registerModule(module);
                 }
             }
@@ -605,16 +609,16 @@ public class JacksonDataFormat extends ServiceSupport implements DataFormat, Dat
                         name = name.substring(1);
                     }
                     Module module = CamelContextHelper.mandatoryLookup(camelContext, name, Module.class);
-                    log.debug("Registering module: {} -> {}", name, module);
+                    LOG.debug("Registering module: {} -> {}", name, module);
                     objectMapper.registerModule(module);
                 }
             }
             if (org.apache.camel.util.ObjectHelper.isNotEmpty(timezone)) {
-                log.debug("Setting timezone to Object Mapper: {}", timezone);
+                LOG.debug("Setting timezone to Object Mapper: {}", timezone);
                 objectMapper.setTimeZone(timezone);
             }
         } else {
-            log.warn("The objectMapper was already found in the registry, no customizations will be applied");
+            LOG.warn("The objectMapper was already found in the registry, no customizations will be applied");
         }
     }
 
diff --git a/components/camel-jacksonxml/src/main/java/org/apache/camel/component/jacksonxml/JacksonXMLDataFormat.java b/components/camel-jacksonxml/src/main/java/org/apache/camel/component/jacksonxml/JacksonXMLDataFormat.java
index c9964c0..8f8286e 100644
--- a/components/camel-jacksonxml/src/main/java/org/apache/camel/component/jacksonxml/JacksonXMLDataFormat.java
+++ b/components/camel-jacksonxml/src/main/java/org/apache/camel/component/jacksonxml/JacksonXMLDataFormat.java
@@ -43,6 +43,8 @@ import org.apache.camel.spi.annotations.Dataformat;
 import org.apache.camel.support.CamelContextHelper;
 import org.apache.camel.support.ObjectHelper;
 import org.apache.camel.support.service.ServiceSupport;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A <a href="http://camel.apache.org/data-format.html">data format</a>
@@ -52,6 +54,8 @@ import org.apache.camel.support.service.ServiceSupport;
 @Dataformat("jacksonxml")
 public class JacksonXMLDataFormat extends ServiceSupport implements DataFormat, DataFormatName, CamelContextAware {
 
+    private static final Logger LOG = LoggerFactory.getLogger(JacksonXMLDataFormat.class);
+
     private CamelContext camelContext;
     private XmlMapper xmlMapper;
     private Class<? extends Collection> collectionType;
@@ -472,7 +476,7 @@ public class JacksonXMLDataFormat extends ServiceSupport implements DataFormat,
         if (enableJaxbAnnotationModule) {
             // Enables JAXB processing
             JaxbAnnotationModule module = new JaxbAnnotationModule();
-            log.info("Registering module: {}", module);
+            LOG.info("Registering module: {}", module);
             xmlMapper.registerModule(module);
         }
 
@@ -538,7 +542,7 @@ public class JacksonXMLDataFormat extends ServiceSupport implements DataFormat,
 
         if (modules != null) {
             for (Module module : modules) {
-                log.info("Registering module: {}", module);
+                LOG.info("Registering module: {}", module);
                 xmlMapper.registerModules(module);
             }
         }
@@ -548,7 +552,7 @@ public class JacksonXMLDataFormat extends ServiceSupport implements DataFormat,
                 String name = o.toString();
                 Class<Module> clazz = camelContext.getClassResolver().resolveMandatoryClass(name, Module.class);
                 Module module = camelContext.getInjector().newInstance(clazz);
-                log.info("Registering module: {} -> {}", name, module);
+                LOG.info("Registering module: {} -> {}", name, module);
                 xmlMapper.registerModule(module);
             }
         }
@@ -560,12 +564,12 @@ public class JacksonXMLDataFormat extends ServiceSupport implements DataFormat,
                     name = name.substring(1);
                 }
                 Module module = CamelContextHelper.mandatoryLookup(camelContext, name, Module.class);
-                log.info("Registering module: {} -> {}", name, module);
+                LOG.info("Registering module: {} -> {}", name, module);
                 xmlMapper.registerModule(module);
             }
         }
         if (org.apache.camel.util.ObjectHelper.isNotEmpty(timezone)) {
-            log.debug("Setting timezone to XML Mapper: {}", timezone);
+            LOG.debug("Setting timezone to XML Mapper: {}", timezone);
             xmlMapper.setTimeZone(timezone);
         }
     }
diff --git a/components/camel-jaxb/src/main/java/org/apache/camel/converter/jaxb/JaxbDataFormat.java b/components/camel-jaxb/src/main/java/org/apache/camel/converter/jaxb/JaxbDataFormat.java
index 1a0a0ea..de1c442 100644
--- a/components/camel-jaxb/src/main/java/org/apache/camel/converter/jaxb/JaxbDataFormat.java
+++ b/components/camel-jaxb/src/main/java/org/apache/camel/converter/jaxb/JaxbDataFormat.java
@@ -48,6 +48,8 @@ import javax.xml.transform.stream.StreamSource;
 import javax.xml.validation.Schema;
 import javax.xml.validation.SchemaFactory;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.xml.sax.SAXException;
 
 import org.apache.camel.CamelContext;
@@ -72,6 +74,8 @@ import org.apache.camel.util.ObjectHelper;
 @Dataformat("jaxb")
 public class JaxbDataFormat extends ServiceSupport implements DataFormat, DataFormatName, CamelContextAware {
 
+    private static final Logger LOG = LoggerFactory.getLogger(JaxbDataFormat.class);
+
     private static final BlockingQueue<SchemaFactory> SCHEMA_FACTORY_POOL = new LinkedBlockingQueue<>();
 
     private SchemaFactory schemaFactory;
@@ -160,8 +164,8 @@ public class JaxbDataFormat extends ServiceSupport implements DataFormat, DataFo
             }
             if (customProperties != null) {
                 for (Entry<String, Object> property : customProperties.entrySet()) {
-                    if (log.isDebugEnabled()) {
-                        log.debug("Using JAXB Provider Property {}={}", property.getKey(), property.getValue());
+                    if (LOG.isDebugEnabled()) {
+                        LOG.debug("Using JAXB Provider Property {}={}", property.getKey(), property.getValue());
                     }
                     marshaller.setProperty(property.getKey(), property.getValue());
                 }
@@ -247,7 +251,7 @@ public class JaxbDataFormat extends ServiceSupport implements DataFormat, DataFo
                         throw e;
                     }
                     
-                    log.debug("Unable to create JAXBElement object for type " + element.getClass() + " due to " + e.getMessage(), e);
+                    LOG.debug("Unable to create JAXBElement object for type " + element.getClass() + " due to " + e.getMessage(), e);
                 }
             }
         }
@@ -255,8 +259,8 @@ public class JaxbDataFormat extends ServiceSupport implements DataFormat, DataFo
         // cannot marshal
         if (!mustBeJAXBElement) {
             // write the graph as is to the output stream
-            if (log.isDebugEnabled()) {
-                log.debug("Attempt to marshalling non JAXBElement with type {} as InputStream", ObjectHelper.classCanonicalName(graph));
+            if (LOG.isDebugEnabled()) {
+                LOG.debug("Attempt to marshalling non JAXBElement with type {} as InputStream", ObjectHelper.classCanonicalName(graph));
             }
             InputStream is = exchange.getContext().getTypeConverter().mandatoryConvertTo(InputStream.class, exchange, graph);
             IOHelper.copyAndCloseInput(is, stream);
@@ -524,7 +528,7 @@ public class JaxbDataFormat extends ServiceSupport implements DataFormat, DataFo
             cachedSchema = createSchema(getSources());
         }
 
-        log.debug("JaxbDataFormat [prettyPrint={}, objectFactory={}]", prettyPrint, objectFactory);
+        LOG.debug("JaxbDataFormat [prettyPrint={}, objectFactory={}]", prettyPrint, objectFactory);
     }
 
     @Override
@@ -540,14 +544,14 @@ public class JaxbDataFormat extends ServiceSupport implements DataFormat, DataFo
             // load the class which has been JAXB annotated
             ClassLoader cl = camelContext.getApplicationContextClassLoader();
             if (cl != null) {
-                log.debug("Creating JAXBContext with contextPath: " + contextPath + " and ApplicationContextClassLoader: " + cl);
+                LOG.debug("Creating JAXBContext with contextPath: " + contextPath + " and ApplicationContextClassLoader: " + cl);
                 return JAXBContext.newInstance(contextPath, cl);
             } else {
-                log.debug("Creating JAXBContext with contextPath: {}", contextPath);
+                LOG.debug("Creating JAXBContext with contextPath: {}", contextPath);
                 return JAXBContext.newInstance(contextPath);
             }
         } else {
-            log.debug("Creating JAXBContext");
+            LOG.debug("Creating JAXBContext");
             return JAXBContext.newInstance();
         }
     }
diff --git a/components/camel-jcache/src/main/java/org/apache/camel/component/jcache/JCacheConsumer.java b/components/camel-jcache/src/main/java/org/apache/camel/component/jcache/JCacheConsumer.java
index 96ffbbb..3921358 100644
--- a/components/camel-jcache/src/main/java/org/apache/camel/component/jcache/JCacheConsumer.java
+++ b/components/camel-jcache/src/main/java/org/apache/camel/component/jcache/JCacheConsumer.java
@@ -35,7 +35,7 @@ import org.slf4j.LoggerFactory;
  * The JCache consumer.
  */
 public class JCacheConsumer extends DefaultConsumer {
-    private static final Logger LOGGER = LoggerFactory.getLogger(JCacheConsumer.class);
+    private static final Logger LOG = LoggerFactory.getLogger(JCacheConsumer.class);
 
     private CacheEntryListenerConfiguration<Object, Object> entryListenerConfiguration;
 
@@ -96,7 +96,7 @@ public class JCacheConsumer extends DefaultConsumer {
                                 try {
                                     getProcessor().process(exchange);
                                 } catch (Exception e) {
-                                    LOGGER.error("Error processing event ", e);
+                                    LOG.error("Error processing event ", e);
                                 }
                             }
                         }
diff --git a/components/camel-jcache/src/main/java/org/apache/camel/component/jcache/processor/aggregate/JCacheAggregationRepository.java b/components/camel-jcache/src/main/java/org/apache/camel/component/jcache/processor/aggregate/JCacheAggregationRepository.java
index 7fb8af9..2896ea2 100644
--- a/components/camel-jcache/src/main/java/org/apache/camel/component/jcache/processor/aggregate/JCacheAggregationRepository.java
+++ b/components/camel-jcache/src/main/java/org/apache/camel/component/jcache/processor/aggregate/JCacheAggregationRepository.java
@@ -33,9 +33,13 @@ import org.apache.camel.support.DefaultExchange;
 import org.apache.camel.support.DefaultExchangeHolder;
 import org.apache.camel.support.service.ServiceSupport;
 import org.apache.camel.util.ObjectHelper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class JCacheAggregationRepository extends ServiceSupport implements  OptimisticLockingAggregationRepository {
 
+    private static final Logger LOG = LoggerFactory.getLogger(JCacheAggregationRepository.class);
+
     private JCacheConfiguration configuration;
... 11483 lines suppressed ...