You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kafka.apache.org by gu...@apache.org on 2017/11/03 15:51:47 UTC

kafka git commit: MINOR: Update docs for new version

Repository: kafka
Updated Branches:
  refs/heads/trunk 4fac83ba1 -> 487436b1a


MINOR: Update docs for new version

1. Update the Streams hello world examples with the new API.
2. Update the version references in various places.
3. Update version templates to 1.1.x.

Author: Guozhang Wang <wa...@gmail.com>

Reviewers: Ismael Juma <is...@juma.me.uk>, Damian Guy <da...@gmail.com>, Derrick Or <de...@gmail.com>

Closes #4169 from guozhangwang/KMINOR-streams-docs


Project: http://git-wip-us.apache.org/repos/asf/kafka/repo
Commit: http://git-wip-us.apache.org/repos/asf/kafka/commit/487436b1
Tree: http://git-wip-us.apache.org/repos/asf/kafka/tree/487436b1
Diff: http://git-wip-us.apache.org/repos/asf/kafka/diff/487436b1

Branch: refs/heads/trunk
Commit: 487436b1a46b728904e543456c8bcc0d3ceea55a
Parents: 4fac83b
Author: Guozhang Wang <wa...@gmail.com>
Authored: Fri Nov 3 08:51:44 2017 -0700
Committer: Guozhang Wang <wa...@gmail.com>
Committed: Fri Nov 3 08:51:44 2017 -0700

----------------------------------------------------------------------
 docs/documentation.html |  8 ++++----
 docs/js/templateData.js |  6 +++---
 docs/streams/index.html | 47 +++++++++++++++++++++++++-------------------
 3 files changed, 34 insertions(+), 27 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kafka/blob/487436b1/docs/documentation.html
----------------------------------------------------------------------
diff --git a/docs/documentation.html b/docs/documentation.html
index 7f297cc..0536535 100644
--- a/docs/documentation.html
+++ b/docs/documentation.html
@@ -26,8 +26,8 @@
 	<div class="right">
 		<!--#include virtual="../includes/_docs_banner.htm" -->
     <h1>Documentation</h1>
-    <h3>Kafka 0.11.0 Documentation</h3>
-    Prior releases: <a href="/07/documentation.html">0.7.x</a>, <a href="/08/documentation.html">0.8.0</a>, <a href="/081/documentation.html">0.8.1.X</a>, <a href="/082/documentation.html">0.8.2.X</a>, <a href="/090/documentation.html">0.9.0.X</a>, <a href="/0100/documentation.html">0.10.0.X</a>, <a href="/0101/documentation.html">0.10.1.X</a>, <a href="/0102/documentation.html">0.10.2.X</a>.
+    <h3>Kafka 1.1 Documentation</h3>
+    Prior releases: <a href="/07/documentation.html">0.7.x</a>, <a href="/08/documentation.html">0.8.0</a>, <a href="/081/documentation.html">0.8.1.X</a>, <a href="/082/documentation.html">0.8.2.X</a>, <a href="/090/documentation.html">0.9.0.X</a>, <a href="/0100/documentation.html">0.10.0.X</a>, <a href="/0101/documentation.html">0.10.1.X</a>, <a href="/0102/documentation.html">0.10.2.X</a>, <a href="/0110/documentation.html">0.11.0.X</a>, <a href="/10/documentation.html">1.0.X</a>.
 
     <!--#include virtual="toc.html" -->
 
@@ -69,7 +69,7 @@
     <h2><a id="connect" href="#connect">8. Kafka Connect</a></h2>
     <!--#include virtual="connect.html" -->
 
-    <h2><a id="streams" href="/0110/documentation/streams">9. Kafka Streams</a></h2>
+    <h2><a id="streams" href="/11/documentation/streams">9. Kafka Streams</a></h2>
     <p>
         Kafka Streams is a client library for processing and analyzing data stored in Kafka. It builds upon important stream processing concepts such as properly distinguishing between event time and processing time, windowing support, exactly-once processing semantics and simple yet efficient management of application state.
     </p>
@@ -77,7 +77,7 @@
         Kafka Streams has a <b>low barrier to entry</b>: You can quickly write and run a small-scale proof-of-concept on a single machine; and you only need to run additional instances of your application on multiple machines to scale up to high-volume production workloads. Kafka Streams transparently handles the load balancing of multiple instances of the same application by leveraging Kafka's parallelism model.
     </p>
 
-    <p>Learn More about Kafka Streams read <a href="/0110/documentation/streams">this</a> Section.</p>
+    <p>Learn More about Kafka Streams read <a href="/11/documentation/streams">this</a> Section.</p>
 
 <!--#include virtual="../includes/_footer.htm" -->
 <!--#include virtual="../includes/_docs_footer.htm" -->

http://git-wip-us.apache.org/repos/asf/kafka/blob/487436b1/docs/js/templateData.js
----------------------------------------------------------------------
diff --git a/docs/js/templateData.js b/docs/js/templateData.js
index 4b57914..28f0eec 100644
--- a/docs/js/templateData.js
+++ b/docs/js/templateData.js
@@ -17,8 +17,8 @@ limitations under the License.
 
 // Define variables for doc templates
 var context={
-    "version": "100",
-    "dotVersion": "1.0",
-    "fullDotVersion": "1.0.0",
+    "version": "11",
+    "dotVersion": "1.1",
+    "fullDotVersion": "1.1.0",
     "scalaVersion": "2.11"
 };

http://git-wip-us.apache.org/repos/asf/kafka/blob/487436b1/docs/streams/index.html
----------------------------------------------------------------------
diff --git a/docs/streams/index.html b/docs/streams/index.html
index c3a2762..ab72c87 100644
--- a/docs/streams/index.html
+++ b/docs/streams/index.html
@@ -152,10 +152,12 @@
                <pre class="brush: java;">
                    import org.apache.kafka.common.serialization.Serdes;
                    import org.apache.kafka.streams.KafkaStreams;
+                   import org.apache.kafka.streams.StreamsBuilder;
                    import org.apache.kafka.streams.StreamsConfig;
-                   import org.apache.kafka.streams.kstream.KStream;
-                   import org.apache.kafka.streams.kstream.KStreamBuilder;
-                   import org.apache.kafka.streams.kstream.KTable;
+                   import org.apache.kafka.streams.Topology;
+                   import org.apache.kafka.streams.kstream.Materialized;
+                   import org.apache.kafka.streams.kstream.Produced;
+                   import org.apache.kafka.streams.state.KeyValueStore;
        
                    import java.util.Arrays;
                    import java.util.Properties;
@@ -169,15 +171,15 @@
                            config.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
                            config.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());
        
-                           KStreamBuilder builder = new KStreamBuilder();
+                           StreamsBuilder builder = new StreamsBuilder();
                            KStream&lt;String, String&gt; textLines = builder.stream("TextLinesTopic");
                            KTable&lt;String, Long&gt; wordCounts = textLines
                                .flatMapValues(textLine -> Arrays.asList(textLine.toLowerCase().split("\\W+")))
                                .groupBy((key, word) -> word)
-                               .count("Counts");
-                           wordCounts.to(Serdes.String(), Serdes.Long(), "WordsWithCountsTopic");
+                               .count(Materialized.&lt;String, Long, KeyValueStore&lt;Bytes, byte[]&gt;&gt;as("counts-store"));
+                           wordCounts.toStream().to("WordsWithCountsTopic", Produced.with(Serdes.String(), Serdes.Long()));
        
-                           KafkaStreams streams = new KafkaStreams(builder, config);
+                           KafkaStreams streams = new KafkaStreams(builder.build(), config);
                            streams.start();
                        }
        
@@ -189,13 +191,15 @@
                <pre class="brush: java;">
                    import org.apache.kafka.common.serialization.Serdes;
                    import org.apache.kafka.streams.KafkaStreams;
+                   import org.apache.kafka.streams.StreamsBuilder;
                    import org.apache.kafka.streams.StreamsConfig;
-                   import org.apache.kafka.streams.kstream.KStream;
-                   import org.apache.kafka.streams.kstream.KStreamBuilder;
-                   import org.apache.kafka.streams.kstream.KTable;
+                   import org.apache.kafka.streams.Topology;
                    import org.apache.kafka.streams.kstream.KeyValueMapper;
+                   import org.apache.kafka.streams.kstream.Materialized;
+                   import org.apache.kafka.streams.kstream.Produced;
                    import org.apache.kafka.streams.kstream.ValueMapper;
-       
+                   import org.apache.kafka.streams.state.KeyValueStore;
+
                    import java.util.Arrays;
                    import java.util.Properties;
        
@@ -208,7 +212,7 @@
                            config.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
                            config.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());
        
-                           KStreamBuilder builder = new KStreamBuilder();
+                           StreamsBuilder builder = new StreamsBuilder();
                            KStream&lt;String, String&gt; textLines = builder.stream("TextLinesTopic");
                            KTable&lt;String, Long&gt; wordCounts = textLines
                                .flatMapValues(new ValueMapper&lt;String, Iterable&lt;String&gt;&gt;() {
@@ -223,10 +227,12 @@
                                        return word;
                                    }
                                })
-                               .count("Counts");
-                           wordCounts.to(Serdes.String(), Serdes.Long(), "WordsWithCountsTopic");
+                               .count(Materialized.&lt;String, Long, KeyValueStore&lt;Bytes, byte[]&gt;&gt;as("counts-store"));
+
+
+                           wordCounts.toStream().to("WordsWithCountsTopic", Produced.with(Serdes.String(), Serdes.Long()));
        
-                           KafkaStreams streams = new KafkaStreams(builder, config);
+                           KafkaStreams streams = new KafkaStreams(builder.build(), config);
                            streams.start();
                        }
        
@@ -242,7 +248,8 @@
        
                    import org.apache.kafka.common.serialization._
                    import org.apache.kafka.streams._
-                   import org.apache.kafka.streams.kstream.{KStream, KStreamBuilder, KTable}
+                   import org.apache.kafka.streams.kstream.{KeyValueMapper, Materialized, Produced, ValueMapper}
+                   import org.apache.kafka.streams.state.KeyValueStore;
        
                    import scala.collection.JavaConverters.asJavaIterableConverter
        
@@ -258,15 +265,15 @@
                                p
                            }
        
-                           val builder: KStreamBuilder = new KStreamBuilder()
+                           val builder: StreamsBuilder = new StreamsBuilder()
                            val textLines: KStream[String, String] = builder.stream("TextLinesTopic")
                            val wordCounts: KTable[String, Long] = textLines
                                .flatMapValues(textLine => textLine.toLowerCase.split("\\W+").toIterable.asJava)
                                .groupBy((_, word) => word)
-                               .count("Counts")
-                           wordCounts.to(Serdes.String(), Serdes.Long(), "WordsWithCountsTopic")
+                               .count(Materialized.as("counts-store").asInstanceOf[Materialized[String, Long, KeyValueStore[Bytes, Array[Byte]]]])
+                           wordCounts.toStream().to("WordsWithCountsTopic", Produced.with(Serdes.String(), Serdes.Long()))
        
-                           val streams: KafkaStreams = new KafkaStreams(builder, config)
+                           val streams: KafkaStreams = new KafkaStreams(builder.build(), config)
                            streams.start()
        
                            Runtime.getRuntime.addShutdownHook(new Thread(() => {