You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@camel.apache.org by da...@apache.org on 2022/03/03 09:39:22 UTC

[camel] 01/01: CAMEL-17727: camel-kafka - Add readiness health check for kafka consumer

This is an automated email from the ASF dual-hosted git repository.

davsclaus pushed a commit to branch kafka-health
in repository https://gitbox.apache.org/repos/asf/camel.git

commit 9b795174d054b628a252971f3509ccda6709b2d7
Author: Claus Ibsen <cl...@gmail.com>
AuthorDate: Thu Mar 3 10:38:32 2022 +0100

    CAMEL-17727: camel-kafka - Add readiness health check for kafka consumer
---
 components/camel-kafka/pom.xml                     |   4 +
 .../camel/component/kafka/KafkaConsumer.java       |  20 ++-
 .../component/kafka/KafkaConsumerHealthCheck.java  |  73 ++++++++++
 .../integration/KafkaConsumerHealthCheckIT.java    | 156 +++++++++++++++++++++
 4 files changed, 252 insertions(+), 1 deletion(-)

diff --git a/components/camel-kafka/pom.xml b/components/camel-kafka/pom.xml
index 115c6b7..ab10fab 100644
--- a/components/camel-kafka/pom.xml
+++ b/components/camel-kafka/pom.xml
@@ -38,6 +38,10 @@
             <groupId>org.apache.camel</groupId>
             <artifactId>camel-support</artifactId>
         </dependency>
+        <dependency>
+            <groupId>org.apache.camel</groupId>
+            <artifactId>camel-health</artifactId>
+        </dependency>
 
         <!-- kafka java client -->
         <dependency>
diff --git a/components/camel-kafka/src/main/java/org/apache/camel/component/kafka/KafkaConsumer.java b/components/camel-kafka/src/main/java/org/apache/camel/component/kafka/KafkaConsumer.java
index 692a505..185d8ac 100644
--- a/components/camel-kafka/src/main/java/org/apache/camel/component/kafka/KafkaConsumer.java
+++ b/components/camel-kafka/src/main/java/org/apache/camel/component/kafka/KafkaConsumer.java
@@ -24,9 +24,12 @@ import java.util.concurrent.ExecutorService;
 import java.util.concurrent.TimeUnit;
 import java.util.regex.Pattern;
 
+import org.apache.camel.CamelContextAware;
 import org.apache.camel.Processor;
 import org.apache.camel.ResumeAware;
 import org.apache.camel.component.kafka.consumer.support.KafkaConsumerResumeStrategy;
+import org.apache.camel.health.HealthCheck;
+import org.apache.camel.health.HealthCheckAware;
 import org.apache.camel.spi.StateRepository;
 import org.apache.camel.support.BridgeExceptionHandlerToErrorHandler;
 import org.apache.camel.support.DefaultConsumer;
@@ -37,7 +40,7 @@ import org.apache.kafka.clients.consumer.ConsumerConfig;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-public class KafkaConsumer extends DefaultConsumer implements ResumeAware<KafkaConsumerResumeStrategy> {
+public class KafkaConsumer extends DefaultConsumer implements ResumeAware<KafkaConsumerResumeStrategy>, HealthCheckAware {
 
     private static final Logger LOG = LoggerFactory.getLogger(KafkaConsumer.class);
 
@@ -67,6 +70,17 @@ public class KafkaConsumer extends DefaultConsumer implements ResumeAware<KafkaC
     @Override
     protected void doBuild() throws Exception {
         super.doBuild();
+
+        // build health-check
+        String rid = getRouteId();
+        if (rid == null) {
+            // not from a route so need some other uuid
+            rid = endpoint.getCamelContext().getUuidGenerator().generateUuid();
+        }
+        HealthCheck hc = new KafkaConsumerHealthCheck(this, rid);
+        CamelContextAware.trySetCamelContext(hc, endpoint.getCamelContext());
+        setHealthCheck(hc);
+
         if (endpoint.getComponent().getPollExceptionStrategy() != null) {
             pollExceptionStrategy = endpoint.getComponent().getPollExceptionStrategy();
         } else {
@@ -101,6 +115,10 @@ public class KafkaConsumer extends DefaultConsumer implements ResumeAware<KafkaC
         return props;
     }
 
+    List<KafkaFetchRecords> getTasks() {
+        return tasks;
+    }
+
     @Override
     protected void doStart() throws Exception {
         LOG.info("Starting Kafka consumer on topic: {} with breakOnFirstError: {}", endpoint.getConfiguration().getTopic(),
diff --git a/components/camel-kafka/src/main/java/org/apache/camel/component/kafka/KafkaConsumerHealthCheck.java b/components/camel-kafka/src/main/java/org/apache/camel/component/kafka/KafkaConsumerHealthCheck.java
new file mode 100644
index 0000000..1ffd581
--- /dev/null
+++ b/components/camel-kafka/src/main/java/org/apache/camel/component/kafka/KafkaConsumerHealthCheck.java
@@ -0,0 +1,73 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.component.kafka;
+
+import java.util.List;
+import java.util.Map;
+
+import org.apache.camel.health.HealthCheckResultBuilder;
+import org.apache.camel.impl.health.AbstractHealthCheck;
+
+/**
+ * Kafka consumer readiness health-check
+ */
+public class KafkaConsumerHealthCheck extends AbstractHealthCheck {
+
+    private final KafkaConsumer kafkaConsumer;
+    private final String routeId;
+
+    public KafkaConsumerHealthCheck(KafkaConsumer kafkaConsumer, String routeId) {
+        super("camel", "kafka-consumer-" + routeId);
+        this.kafkaConsumer = kafkaConsumer;
+        this.routeId = routeId;
+    }
+
+    @Override
+    public boolean isLiveness() {
+        // this health check is only readiness
+        return false;
+    }
+
+    @Override
+    protected void doCall(HealthCheckResultBuilder builder, Map<String, Object> options) {
+        List<KafkaFetchRecords> tasks = kafkaConsumer.getTasks();
+        for (KafkaFetchRecords task : tasks) {
+            if (!task.isConnected()) {
+                builder.down();
+                builder.message("KafkaConsumer is not connected");
+
+                KafkaConfiguration cfg = kafkaConsumer.getEndpoint().getConfiguration();
+                if (cfg.getBrokers() != null) {
+                    builder.detail("bootstrap.servers", cfg.getBrokers());
+                }
+                if (cfg.getClientId() != null) {
+                    builder.detail("client.id", cfg.getClientId());
+                }
+                if (cfg.getGroupId() != null) {
+                    builder.detail("group.id", cfg.getGroupId());
+                }
+                if (routeId != null) {
+                    // camel route id
+                    builder.detail("route.id", routeId);
+                }
+                builder.detail("topic", cfg.getTopic());
+
+                return; // break on first DOWN
+            }
+        }
+    }
+}
diff --git a/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/integration/KafkaConsumerHealthCheckIT.java b/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/integration/KafkaConsumerHealthCheckIT.java
new file mode 100644
index 0000000..f337b88
--- /dev/null
+++ b/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/integration/KafkaConsumerHealthCheckIT.java
@@ -0,0 +1,156 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.component.kafka.integration;
+
+import java.io.IOException;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Map;
+import java.util.Optional;
+import java.util.Properties;
+import java.util.stream.StreamSupport;
+
+import org.apache.camel.BindToRegistry;
+import org.apache.camel.Endpoint;
+import org.apache.camel.EndpointInject;
+import org.apache.camel.builder.RouteBuilder;
+import org.apache.camel.component.kafka.KafkaConstants;
+import org.apache.camel.component.kafka.MockConsumerInterceptor;
+import org.apache.camel.component.kafka.serde.DefaultKafkaHeaderDeserializer;
+import org.apache.camel.component.mock.MockEndpoint;
+import org.apache.camel.health.HealthCheck;
+import org.apache.camel.health.HealthCheckHelper;
+import org.apache.kafka.clients.producer.ProducerRecord;
+import org.apache.kafka.common.header.internals.RecordHeader;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.MethodOrderer;
+import org.junit.jupiter.api.Order;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestMethodOrder;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+
+@TestMethodOrder(MethodOrderer.OrderAnnotation.class)
+public class KafkaConsumerHealthCheckIT extends BaseEmbeddedKafkaTestSupport {
+    public static final String TOPIC = "test-health";
+
+    private static final Logger LOG = LoggerFactory.getLogger(KafkaConsumerHealthCheckIT.class);
+
+    @BindToRegistry("myHeaderDeserializer")
+    private MyKafkaHeaderDeserializer deserializer = new MyKafkaHeaderDeserializer();
+
+    @EndpointInject("kafka:" + TOPIC
+                    + "?groupId=group1&autoOffsetReset=earliest&keyDeserializer=org.apache.kafka.common.serialization.StringDeserializer&"
+                    + "valueDeserializer=org.apache.kafka.common.serialization.StringDeserializer"
+                    + "&autoCommitIntervalMs=1000&sessionTimeoutMs=30000&autoCommitEnable=true&interceptorClasses=org.apache.camel.component.kafka.MockConsumerInterceptor")
+    private Endpoint from;
+
+    @EndpointInject("mock:result")
+    private MockEndpoint to;
+
+    private org.apache.kafka.clients.producer.KafkaProducer<String, String> producer;
+
+    @BeforeEach
+    public void before() {
+        Properties props = getDefaultProperties();
+        producer = new org.apache.kafka.clients.producer.KafkaProducer<>(props);
+        MockConsumerInterceptor.recordsCaptured.clear();
+    }
+
+    @AfterEach
+    public void after() {
+        if (producer != null) {
+            producer.close();
+        }
+        // clean all test topics
+        kafkaAdminClient.deleteTopics(Collections.singletonList(TOPIC)).all();
+    }
+
+    @Override
+    protected RouteBuilder createRouteBuilder() throws Exception {
+        return new RouteBuilder() {
+
+            @Override
+            public void configure() {
+                from(from).process(exchange -> LOG.trace("Captured on the processor: {}", exchange.getMessage().getBody()))
+                        .routeId("test-health-it").to(to);
+            }
+        };
+    }
+
+    @Order(1)
+    @Test
+    public void kafkaConsumerHealthCheck() throws InterruptedException, IOException {
+        // health-check should be ready
+        Collection<HealthCheck.Result> res = HealthCheckHelper.invokeReadiness(context);
+        boolean up = res.stream().allMatch(r -> r.getState().equals(HealthCheck.State.UP));
+        Assertions.assertTrue(up, "readiness check");
+
+        String propagatedHeaderKey = "PropagatedCustomHeader";
+        byte[] propagatedHeaderValue = "propagated header value".getBytes();
+        String skippedHeaderKey = "CamelSkippedHeader";
+        to.expectedMessageCount(5);
+        to.expectedBodiesReceivedInAnyOrder("message-0", "message-1", "message-2", "message-3", "message-4");
+        // The LAST_RECORD_BEFORE_COMMIT header should not be configured on any
+        // exchange because autoCommitEnable=true
+        to.expectedHeaderValuesReceivedInAnyOrder(KafkaConstants.LAST_RECORD_BEFORE_COMMIT, null, null, null, null, null);
+        to.expectedHeaderReceived(propagatedHeaderKey, propagatedHeaderValue);
+
+        for (int k = 0; k < 5; k++) {
+            String msg = "message-" + k;
+            ProducerRecord<String, String> data = new ProducerRecord<>(TOPIC, "1", msg);
+            data.headers().add(new RecordHeader("CamelSkippedHeader", "skipped header value".getBytes()));
+            data.headers().add(new RecordHeader(propagatedHeaderKey, propagatedHeaderValue));
+            producer.send(data);
+        }
+
+        to.assertIsSatisfied(3000);
+
+        assertEquals(5, StreamSupport.stream(MockConsumerInterceptor.recordsCaptured.get(0).records(TOPIC).spliterator(), false)
+                .count());
+
+        Map<String, Object> headers = to.getExchanges().get(0).getIn().getHeaders();
+        assertFalse(headers.containsKey(skippedHeaderKey), "Should not receive skipped header");
+        assertTrue(headers.containsKey(propagatedHeaderKey), "Should receive propagated header");
+
+        // stop route
+        try {
+            context.getRouteController().stopAllRoutes();
+        } catch (Exception e) {
+            // ignore
+        }
+
+        // health-check should not be ready
+        res = HealthCheckHelper.invokeReadiness(context);
+        Optional<HealthCheck.Result> down = res.stream().filter(r -> r.getState().equals(HealthCheck.State.DOWN)).findFirst();
+        Assertions.assertTrue(down.isPresent());
+        String msg = down.get().getMessage().get();
+        Assertions.assertEquals("KafkaConsumer is not connected", msg);
+        Map<String, Object> map = down.get().getDetails();
+        Assertions.assertEquals(TOPIC, map.get("topic"));
+        Assertions.assertEquals("test-health-it", map.get("route.id"));
+    }
+
+    private static class MyKafkaHeaderDeserializer extends DefaultKafkaHeaderDeserializer {
+    }
+}