You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by pw...@apache.org on 2014/01/14 09:05:49 UTC

[4/5] git commit: Fixed loose ends in docs.

Fixed loose ends in docs.


Project: http://git-wip-us.apache.org/repos/asf/incubator-spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-spark/commit/f8bd828c
Tree: http://git-wip-us.apache.org/repos/asf/incubator-spark/tree/f8bd828c
Diff: http://git-wip-us.apache.org/repos/asf/incubator-spark/diff/f8bd828c

Branch: refs/heads/master
Commit: f8bd828c7ccf1ff69bc35bf95d07183cb35a7c72
Parents: f8e239e
Author: Tathagata Das <ta...@gmail.com>
Authored: Tue Jan 14 00:03:46 2014 -0800
Committer: Tathagata Das <ta...@gmail.com>
Committed: Tue Jan 14 00:03:46 2014 -0800

----------------------------------------------------------------------
 docs/streaming-programming-guide.md                              | 4 ++--
 .../main/scala/org/apache/spark/streaming/dstream/DStream.scala  | 2 --
 2 files changed, 2 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/f8bd828c/docs/streaming-programming-guide.md
----------------------------------------------------------------------
diff --git a/docs/streaming-programming-guide.md b/docs/streaming-programming-guide.md
index 1495af2..07c4c55 100644
--- a/docs/streaming-programming-guide.md
+++ b/docs/streaming-programming-guide.md
@@ -48,10 +48,10 @@ ssc.textFileStream(directory)    // Creates a stream that monitors and processes
 ssc.socketStream(hostname, port) // Creates a stream that uses a TCP socket to read data from hostname:port
 {% endhighlight %}
 
-The core Spark Streaming API provides input streams for files, sockets, Akka actors. Additional functionality for Kafka, Flume, ZeroMQ, Twitter, etc. can be imported by adding the right dependencies as explained in the [linking](#linking-with-spark-streaming) section.
+The core Spark Streaming API provides input streams for files, sockets, and Akka actors. Additional functionality for Kafka, Flume, ZeroMQ, Twitter, etc. can be imported by adding the right dependencies as explained in the [linking](#linking-with-spark-streaming) section.
 
 # DStream Operations
-Data received from the input streams can be processed using _DStream operations_. There are two kinds of operations - _transformations_ and _output operations_. Similar to RDD transformations, DStream transformations operate on one or more DStreams to create new DStreams with transformed data. After applying a sequence of transformations to the input streams, output operations need to called, which writes data out to an external data sink like a file system or a database.
+Data received from the input streams can be processed using _DStream operations_. There are two kinds of operations - _transformations_ and _output operations_. Similar to RDD transformations, DStream transformations operate on one or more DStreams to create new DStreams with transformed data. After applying a sequence of transformations to the input streams, output operations need to called, which write data out to an external data sink like a file system or a database.
 
 ## Transformations
 

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/f8bd828c/streaming/src/main/scala/org/apache/spark/streaming/dstream/DStream.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/dstream/DStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/DStream.scala
index 844316a..71a4c5c 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/dstream/DStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/DStream.scala
@@ -54,8 +54,6 @@ import org.apache.spark.streaming.Duration
  *  - A list of other DStreams that the DStream depends on
  *  - A time interval at which the DStream generates an RDD
  *  - A function that is used to generate an RDD after each time interval
- *
- * There are two types of DStream operations - __transformations__
  */
 
 abstract class DStream[T: ClassTag] (