You are viewing a plain text version of this content. The canonical link for it is here.
Posted to reviews@spark.apache.org by tdas <gi...@git.apache.org> on 2018/04/02 20:44:37 UTC

[GitHub] spark pull request #20951: [SPARK-23099][SS] Migrate foreach sink to DataSou...

Github user tdas commented on a diff in the pull request:

    https://github.com/apache/spark/pull/20951#discussion_r178645279
  
    --- Diff: sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/ForeachSink.scala ---
    @@ -17,52 +17,81 @@
     
     package org.apache.spark.sql.execution.streaming
     
    -import org.apache.spark.TaskContext
    -import org.apache.spark.sql.{DataFrame, Encoder, ForeachWriter}
    +import org.apache.spark.sql.{Encoder, ForeachWriter, SparkSession}
    +import org.apache.spark.sql.catalyst.InternalRow
    +import org.apache.spark.sql.catalyst.encoders.ExpressionEncoder
     import org.apache.spark.sql.catalyst.encoders.encoderFor
    +import org.apache.spark.sql.sources.v2.{DataSourceOptions, StreamWriteSupport}
    +import org.apache.spark.sql.sources.v2.writer.{DataWriter, DataWriterFactory, SupportsWriteInternalRow, WriterCommitMessage}
    +import org.apache.spark.sql.sources.v2.writer.streaming.StreamWriter
    +import org.apache.spark.sql.streaming.OutputMode
    +import org.apache.spark.sql.types.StructType
     
    -/**
    - * A [[Sink]] that forwards all data into [[ForeachWriter]] according to the contract defined by
    - * [[ForeachWriter]].
    - *
    - * @param writer The [[ForeachWriter]] to process all data.
    - * @tparam T The expected type of the sink.
    - */
    -class ForeachSink[T : Encoder](writer: ForeachWriter[T]) extends Sink with Serializable {
    +case class ForeachWriterProvider[T: Encoder](writer: ForeachWriter[T]) extends StreamWriteSupport {
    --- End diff --
    
    Rename the file accordingly. and Add docs.


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org