You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kafka.apache.org by gu...@apache.org on 2018/12/22 06:32:35 UTC

[kafka] branch trunk updated: MINOR: Increase throughput for VerifiableProducer in test (#6060)

This is an automated email from the ASF dual-hosted git repository.

guozhang pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/kafka.git


The following commit(s) were added to refs/heads/trunk by this push:
     new 639427a  MINOR: Increase throughput for VerifiableProducer in test (#6060)
639427a is described below

commit 639427a38f0760d4a41fd477dc37f249d92a2020
Author: Bill Bejeck <bb...@gmail.com>
AuthorDate: Sat Dec 22 01:32:19 2018 -0500

    MINOR: Increase throughput for VerifiableProducer in test (#6060)
    
    Previous PR #6043 reduced throughput for VerifiableProducer in base class, but the streams_standby_replica_test needs higher throughput for consumer to complete verification in 60 seconds
    
    Update system test and kicked off branch builder with 25 repeats https://jenkins.confluent.io/job/system-test-kafka-branch-builder/2201/
    
    Reviewers: Guozhang Wang <wa...@gmail.com>
---
 tests/kafkatest/tests/streams/base_streams_test.py            | 4 ++--
 tests/kafkatest/tests/streams/streams_standby_replica_test.py | 2 +-
 2 files changed, 3 insertions(+), 3 deletions(-)

diff --git a/tests/kafkatest/tests/streams/base_streams_test.py b/tests/kafkatest/tests/streams/base_streams_test.py
index 6e005dd..9a9704e 100644
--- a/tests/kafkatest/tests/streams/base_streams_test.py
+++ b/tests/kafkatest/tests/streams/base_streams_test.py
@@ -38,14 +38,14 @@ class BaseStreamsTest(KafkaTest):
                                   client_id,
                                   max_messages=num_messages)
 
-    def get_producer(self, topic, num_messages, repeating_keys=None):
+    def get_producer(self, topic, num_messages, throughput=1000, repeating_keys=None):
         return VerifiableProducer(self.test_context,
                                   1,
                                   self.kafka,
                                   topic,
                                   max_messages=num_messages,
                                   acks=1,
-                                  throughput=1000,
+                                  throughput=throughput,
                                   repeating_keys=repeating_keys)
 
     def assert_produce_consume(self,
diff --git a/tests/kafkatest/tests/streams/streams_standby_replica_test.py b/tests/kafkatest/tests/streams/streams_standby_replica_test.py
index 416a110..8425e14 100644
--- a/tests/kafkatest/tests/streams/streams_standby_replica_test.py
+++ b/tests/kafkatest/tests/streams/streams_standby_replica_test.py
@@ -46,7 +46,7 @@ class StreamsStandbyTask(BaseStreamsTest):
                                                                                     self.streams_sink_topic_1,
                                                                                     self.streams_sink_topic_2))
 
-        producer = self.get_producer(self.streams_source_topic, self.num_messages, repeating_keys=6)
+        producer = self.get_producer(self.streams_source_topic, self.num_messages, throughput=15000, repeating_keys=6)
         producer.start()
 
         processor_1 = StreamsStandbyTaskService(self.test_context, self.kafka, configs)