You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by we...@apache.org on 2016/10/11 07:36:03 UTC

spark git commit: [SPARK-17864][SQL] Mark data type APIs as stable (not DeveloperApi)

Repository: spark
Updated Branches:
  refs/heads/master 7388ad94d -> 3694ba48f


[SPARK-17864][SQL] Mark data type APIs as stable (not DeveloperApi)

## What changes were proposed in this pull request?
The data type API has not been changed since Spark 1.3.0, and is ready for graduation. This patch marks them as stable APIs using the new InterfaceStability annotation.

This patch also looks at the various files in the catalyst module (not the "package") and marks the remaining few classes appropriately as well.

## How was this patch tested?
This is an annotation change. No functional changes.

Author: Reynold Xin <rx...@databricks.com>

Closes #15426 from rxin/SPARK-17864.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/3694ba48
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/3694ba48
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/3694ba48

Branch: refs/heads/master
Commit: 3694ba48f0db0f47baea4b005cdeef3f454b7329
Parents: 7388ad9
Author: Reynold Xin <rx...@databricks.com>
Authored: Tue Oct 11 15:35:52 2016 +0800
Committer: Wenchen Fan <we...@databricks.com>
Committed: Tue Oct 11 15:35:52 2016 +0800

----------------------------------------------------------------------
 .../java/org/apache/spark/sql/RowFactory.java   |  6 +++++
 .../apache/spark/sql/streaming/OutputMode.java  |  2 ++
 .../org/apache/spark/sql/types/DataTypes.java   |  5 +++++
 .../spark/sql/types/SQLUserDefinedType.java     |  2 ++
 .../apache/spark/sql/AnalysisException.scala    |  9 ++++----
 .../scala/org/apache/spark/sql/Encoder.scala    |  3 ++-
 .../scala/org/apache/spark/sql/Encoders.scala   |  3 ++-
 .../main/scala/org/apache/spark/sql/Row.scala   | 10 +++++++--
 .../spark/sql/types/AbstractDataType.scala      |  7 +++---
 .../org/apache/spark/sql/types/ArrayType.scala  | 14 ++++++++----
 .../org/apache/spark/sql/types/BinaryType.scala | 10 +++++----
 .../apache/spark/sql/types/BooleanType.scala    | 12 ++++++----
 .../org/apache/spark/sql/types/ByteType.scala   | 12 +++++++---
 .../spark/sql/types/CalendarIntervalType.scala  | 12 ++++++----
 .../org/apache/spark/sql/types/DataType.scala   | 11 +++++++---
 .../org/apache/spark/sql/types/DateType.scala   | 12 ++++++----
 .../org/apache/spark/sql/types/Decimal.scala    |  5 +++--
 .../apache/spark/sql/types/DecimalType.scala    | 14 ++++++++----
 .../org/apache/spark/sql/types/DoubleType.scala | 11 +++++++---
 .../org/apache/spark/sql/types/FloatType.scala  | 12 +++++++---
 .../apache/spark/sql/types/IntegerType.scala    | 11 +++++++---
 .../org/apache/spark/sql/types/LongType.scala   | 12 ++++++----
 .../org/apache/spark/sql/types/MapType.scala    | 10 +++++----
 .../org/apache/spark/sql/types/Metadata.scala   | 20 ++++++++++-------
 .../org/apache/spark/sql/types/NullType.scala   | 11 +++++++---
 .../org/apache/spark/sql/types/ShortType.scala  | 11 +++++++---
 .../org/apache/spark/sql/types/StringType.scala | 11 +++++++---
 .../apache/spark/sql/types/StructField.scala    |  5 +++++
 .../org/apache/spark/sql/types/StructType.scala | 23 ++++++++++++--------
 .../apache/spark/sql/types/TimestampType.scala  | 11 +++++++---
 .../spark/sql/types/UserDefinedType.scala       |  4 ----
 31 files changed, 207 insertions(+), 94 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/3694ba48/sql/catalyst/src/main/java/org/apache/spark/sql/RowFactory.java
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/RowFactory.java b/sql/catalyst/src/main/java/org/apache/spark/sql/RowFactory.java
index 5ed60fe..2ce1fdc 100644
--- a/sql/catalyst/src/main/java/org/apache/spark/sql/RowFactory.java
+++ b/sql/catalyst/src/main/java/org/apache/spark/sql/RowFactory.java
@@ -17,16 +17,22 @@
 
 package org.apache.spark.sql;
 
+import org.apache.spark.annotation.InterfaceStability;
 import org.apache.spark.sql.catalyst.expressions.GenericRow;
 
 /**
  * A factory class used to construct {@link Row} objects.
+ *
+ * @since 1.3.0
  */
+@InterfaceStability.Stable
 public class RowFactory {
 
   /**
    * Create a {@link Row} from the given arguments. Position i in the argument list becomes
    * position i in the created {@link Row} object.
+   *
+   * @since 1.3.0
    */
   public static Row create(Object ... values) {
     return new GenericRow(values);

http://git-wip-us.apache.org/repos/asf/spark/blob/3694ba48/sql/catalyst/src/main/java/org/apache/spark/sql/streaming/OutputMode.java
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/streaming/OutputMode.java b/sql/catalyst/src/main/java/org/apache/spark/sql/streaming/OutputMode.java
index 41e2582..49a18df 100644
--- a/sql/catalyst/src/main/java/org/apache/spark/sql/streaming/OutputMode.java
+++ b/sql/catalyst/src/main/java/org/apache/spark/sql/streaming/OutputMode.java
@@ -18,6 +18,7 @@
 package org.apache.spark.sql.streaming;
 
 import org.apache.spark.annotation.Experimental;
+import org.apache.spark.annotation.InterfaceStability;
 import org.apache.spark.sql.InternalOutputModes;
 
 /**
@@ -29,6 +30,7 @@ import org.apache.spark.sql.InternalOutputModes;
  * @since 2.0.0
  */
 @Experimental
+@InterfaceStability.Evolving
 public class OutputMode {
 
   /**

http://git-wip-us.apache.org/repos/asf/spark/blob/3694ba48/sql/catalyst/src/main/java/org/apache/spark/sql/types/DataTypes.java
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/types/DataTypes.java b/sql/catalyst/src/main/java/org/apache/spark/sql/types/DataTypes.java
index 747ab18..0f8570f 100644
--- a/sql/catalyst/src/main/java/org/apache/spark/sql/types/DataTypes.java
+++ b/sql/catalyst/src/main/java/org/apache/spark/sql/types/DataTypes.java
@@ -19,10 +19,15 @@ package org.apache.spark.sql.types;
 
 import java.util.*;
 
+import org.apache.spark.annotation.InterfaceStability;
+
 /**
  * To get/create specific data type, users should use singleton objects and factory methods
  * provided by this class.
+ *
+ * @since 1.3.0
  */
+@InterfaceStability.Stable
 public class DataTypes {
   /**
    * Gets the StringType object.

http://git-wip-us.apache.org/repos/asf/spark/blob/3694ba48/sql/catalyst/src/main/java/org/apache/spark/sql/types/SQLUserDefinedType.java
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/types/SQLUserDefinedType.java b/sql/catalyst/src/main/java/org/apache/spark/sql/types/SQLUserDefinedType.java
index 110ed46..1290614 100644
--- a/sql/catalyst/src/main/java/org/apache/spark/sql/types/SQLUserDefinedType.java
+++ b/sql/catalyst/src/main/java/org/apache/spark/sql/types/SQLUserDefinedType.java
@@ -20,6 +20,7 @@ package org.apache.spark.sql.types;
 import java.lang.annotation.*;
 
 import org.apache.spark.annotation.DeveloperApi;
+import org.apache.spark.annotation.InterfaceStability;
 
 /**
  * ::DeveloperApi::
@@ -30,6 +31,7 @@ import org.apache.spark.annotation.DeveloperApi;
 @DeveloperApi
 @Retention(RetentionPolicy.RUNTIME)
 @Target(ElementType.TYPE)
+@InterfaceStability.Evolving
 public @interface SQLUserDefinedType {
 
   /**

http://git-wip-us.apache.org/repos/asf/spark/blob/3694ba48/sql/catalyst/src/main/scala/org/apache/spark/sql/AnalysisException.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/AnalysisException.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/AnalysisException.scala
index 6911843..f300330 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/AnalysisException.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/AnalysisException.scala
@@ -17,17 +17,16 @@
 
 package org.apache.spark.sql
 
-import org.apache.spark.annotation.DeveloperApi
+import org.apache.spark.annotation.InterfaceStability
 import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
 
 
-// TODO: don't swallow original stack trace if it exists
-
 /**
- * :: DeveloperApi ::
  * Thrown when a query fails to analyze, usually because the query itself is invalid.
+ *
+ * @since 1.3.0
  */
-@DeveloperApi
+@InterfaceStability.Stable
 class AnalysisException protected[sql] (
     val message: String,
     val line: Option[Int] = None,

http://git-wip-us.apache.org/repos/asf/spark/blob/3694ba48/sql/catalyst/src/main/scala/org/apache/spark/sql/Encoder.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/Encoder.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/Encoder.scala
index 501c130..b9f8c46 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/Encoder.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/Encoder.scala
@@ -20,7 +20,7 @@ package org.apache.spark.sql
 import scala.annotation.implicitNotFound
 import scala.reflect.ClassTag
 
-import org.apache.spark.annotation.Experimental
+import org.apache.spark.annotation.{Experimental, InterfaceStability}
 import org.apache.spark.sql.types._
 
 
@@ -67,6 +67,7 @@ import org.apache.spark.sql.types._
  * @since 1.6.0
  */
 @Experimental
+@InterfaceStability.Evolving
 @implicitNotFound("Unable to find encoder for type stored in a Dataset.  Primitive types " +
   "(Int, String, etc) and Product types (case classes) are supported by importing " +
   "spark.implicits._  Support for serializing other types will be added in future " +

http://git-wip-us.apache.org/repos/asf/spark/blob/3694ba48/sql/catalyst/src/main/scala/org/apache/spark/sql/Encoders.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/Encoders.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/Encoders.scala
index e72f67c..dc90659 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/Encoders.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/Encoders.scala
@@ -22,7 +22,7 @@ import java.lang.reflect.Modifier
 import scala.reflect.{classTag, ClassTag}
 import scala.reflect.runtime.universe.TypeTag
 
-import org.apache.spark.annotation.Experimental
+import org.apache.spark.annotation.{Experimental, InterfaceStability}
 import org.apache.spark.sql.catalyst.analysis.GetColumnByOrdinal
 import org.apache.spark.sql.catalyst.encoders.{encoderFor, ExpressionEncoder}
 import org.apache.spark.sql.catalyst.expressions.{BoundReference, Cast}
@@ -36,6 +36,7 @@ import org.apache.spark.sql.types._
  * @since 1.6.0
  */
 @Experimental
+@InterfaceStability.Evolving
 object Encoders {
 
   /**

http://git-wip-us.apache.org/repos/asf/spark/blob/3694ba48/sql/catalyst/src/main/scala/org/apache/spark/sql/Row.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/Row.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/Row.scala
index e16850e..344dcb9 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/Row.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/Row.scala
@@ -20,9 +20,14 @@ package org.apache.spark.sql
 import scala.collection.JavaConverters._
 import scala.util.hashing.MurmurHash3
 
+import org.apache.spark.annotation.InterfaceStability
 import org.apache.spark.sql.catalyst.expressions.GenericRow
 import org.apache.spark.sql.types.StructType
 
+/**
+ * @since 1.3.0
+ */
+@InterfaceStability.Stable
 object Row {
   /**
    * This method can be used to extract fields from a [[Row]] object in a pattern match. Example:
@@ -117,8 +122,9 @@ object Row {
  * }
  * }}}
  *
- * @group row
+ * @since 1.3.0
  */
+@InterfaceStability.Stable
 trait Row extends Serializable {
   /** Number of elements in the Row. */
   def size: Int = length
@@ -351,7 +357,7 @@ trait Row extends Serializable {
     }.toMap
   }
 
-  override def toString(): String = s"[${this.mkString(",")}]"
+  override def toString: String = s"[${this.mkString(",")}]"
 
   /**
    * Make a copy of the current [[Row]] object.

http://git-wip-us.apache.org/repos/asf/spark/blob/3694ba48/sql/catalyst/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala
index 1981fd8..76dbb7c 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql.types
 
 import scala.reflect.runtime.universe.TypeTag
 
-import org.apache.spark.annotation.DeveloperApi
+import org.apache.spark.annotation.InterfaceStability
 import org.apache.spark.sql.catalyst.expressions.Expression
 
 /**
@@ -131,10 +131,11 @@ protected[sql] abstract class AtomicType extends DataType {
 
 
 /**
- * :: DeveloperApi ::
  * Numeric data types.
+ *
+ * @since 1.3.0
  */
-@DeveloperApi
+@InterfaceStability.Stable
 abstract class NumericType extends AtomicType {
   // Unfortunately we can't get this implicitly as that breaks Spark Serialization. In order for
   // implicitly[Numeric[JvmType]] to be valid, we have to change JvmType from a type variable to a

http://git-wip-us.apache.org/repos/asf/spark/blob/3694ba48/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ArrayType.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ArrayType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ArrayType.scala
index 82a03b0..5d70ef0 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ArrayType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ArrayType.scala
@@ -21,9 +21,15 @@ import scala.math.Ordering
 
 import org.json4s.JsonDSL._
 
-import org.apache.spark.annotation.DeveloperApi
+import org.apache.spark.annotation.InterfaceStability
 import org.apache.spark.sql.catalyst.util.ArrayData
 
+/**
+ * Companion object for ArrayType.
+ *
+ * @since 1.3.0
+ */
+@InterfaceStability.Stable
 object ArrayType extends AbstractDataType {
   /** Construct a [[ArrayType]] object with the given element type. The `containsNull` is true. */
   def apply(elementType: DataType): ArrayType = ArrayType(elementType, containsNull = true)
@@ -37,9 +43,7 @@ object ArrayType extends AbstractDataType {
   override private[sql] def simpleString: String = "array"
 }
 
-
 /**
- * :: DeveloperApi ::
  * The data type for collections of multiple values.
  * Internally these are represented as columns that contain a ``scala.collection.Seq``.
  *
@@ -51,8 +55,10 @@ object ArrayType extends AbstractDataType {
  *
  * @param elementType The data type of values.
  * @param containsNull Indicates if values have `null` values
+ *
+ * @since 1.3.0
  */
-@DeveloperApi
+@InterfaceStability.Stable
 case class ArrayType(elementType: DataType, containsNull: Boolean) extends DataType {
 
   /** No-arg constructor for kryo. */

http://git-wip-us.apache.org/repos/asf/spark/blob/3694ba48/sql/catalyst/src/main/scala/org/apache/spark/sql/types/BinaryType.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/BinaryType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/BinaryType.scala
index c40e140..a4a358a 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/BinaryType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/BinaryType.scala
@@ -20,17 +20,16 @@ package org.apache.spark.sql.types
 import scala.math.Ordering
 import scala.reflect.runtime.universe.typeTag
 
-import org.apache.spark.annotation.DeveloperApi
+import org.apache.spark.annotation.InterfaceStability
 import org.apache.spark.sql.catalyst.ScalaReflectionLock
 import org.apache.spark.sql.catalyst.util.TypeUtils
 
 
 /**
- * :: DeveloperApi ::
  * The data type representing `Array[Byte]` values.
  * Please use the singleton [[DataTypes.BinaryType]].
  */
-@DeveloperApi
+@InterfaceStability.Stable
 class BinaryType private() extends AtomicType {
   // The companion object and this class is separated so the companion object also subclasses
   // this type. Otherwise, the companion object would be of type "BinaryType$" in byte code.
@@ -54,5 +53,8 @@ class BinaryType private() extends AtomicType {
   private[spark] override def asNullable: BinaryType = this
 }
 
-
+/**
+ * @since 1.3.0
+ */
+@InterfaceStability.Stable
 case object BinaryType extends BinaryType

http://git-wip-us.apache.org/repos/asf/spark/blob/3694ba48/sql/catalyst/src/main/scala/org/apache/spark/sql/types/BooleanType.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/BooleanType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/BooleanType.scala
index 2d8ee3d..059f89f 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/BooleanType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/BooleanType.scala
@@ -20,15 +20,16 @@ package org.apache.spark.sql.types
 import scala.math.Ordering
 import scala.reflect.runtime.universe.typeTag
 
-import org.apache.spark.annotation.DeveloperApi
+import org.apache.spark.annotation.InterfaceStability
 import org.apache.spark.sql.catalyst.ScalaReflectionLock
 
 
 /**
- * :: DeveloperApi ::
  * The data type representing `Boolean` values. Please use the singleton [[DataTypes.BooleanType]].
+ *
+ * @since 1.3.0
  */
-@DeveloperApi
+@InterfaceStability.Stable
 class BooleanType private() extends AtomicType {
   // The companion object and this class is separated so the companion object also subclasses
   // this type. Otherwise, the companion object would be of type "BooleanType$" in byte code.
@@ -45,5 +46,8 @@ class BooleanType private() extends AtomicType {
   private[spark] override def asNullable: BooleanType = this
 }
 
-
+/**
+ * @since 1.3.0
+ */
+@InterfaceStability.Stable
 case object BooleanType extends BooleanType

http://git-wip-us.apache.org/repos/asf/spark/blob/3694ba48/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ByteType.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ByteType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ByteType.scala
index d37130e..bc6251f 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ByteType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ByteType.scala
@@ -20,14 +20,15 @@ package org.apache.spark.sql.types
 import scala.math.{Integral, Numeric, Ordering}
 import scala.reflect.runtime.universe.typeTag
 
-import org.apache.spark.annotation.DeveloperApi
+import org.apache.spark.annotation.InterfaceStability
 import org.apache.spark.sql.catalyst.ScalaReflectionLock
 
 /**
- * :: DeveloperApi ::
  * The data type representing `Byte` values. Please use the singleton [[DataTypes.ByteType]].
+ *
+ * @since 1.3.0
  */
-@DeveloperApi
+@InterfaceStability.Stable
 class ByteType private() extends IntegralType {
   // The companion object and this class is separated so the companion object also subclasses
   // this type. Otherwise, the companion object would be of type "ByteType$" in byte code.
@@ -48,4 +49,9 @@ class ByteType private() extends IntegralType {
   private[spark] override def asNullable: ByteType = this
 }
 
+
+/**
+ * @since 1.3.0
+ */
+@InterfaceStability.Stable
 case object ByteType extends ByteType

http://git-wip-us.apache.org/repos/asf/spark/blob/3694ba48/sql/catalyst/src/main/scala/org/apache/spark/sql/types/CalendarIntervalType.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/CalendarIntervalType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/CalendarIntervalType.scala
index 3565f52..e121044 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/CalendarIntervalType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/CalendarIntervalType.scala
@@ -17,19 +17,19 @@
 
 package org.apache.spark.sql.types
 
-import org.apache.spark.annotation.DeveloperApi
-
+import org.apache.spark.annotation.InterfaceStability
 
 /**
- * :: DeveloperApi ::
  * The data type representing calendar time intervals. The calendar time interval is stored
  * internally in two components: number of months the number of microseconds.
  *
  * Note that calendar intervals are not comparable.
  *
  * Please use the singleton [[DataTypes.CalendarIntervalType]].
+ *
+ * @since 1.5.0
  */
-@DeveloperApi
+@InterfaceStability.Stable
 class CalendarIntervalType private() extends DataType {
 
   override def defaultSize: Int = 16
@@ -37,4 +37,8 @@ class CalendarIntervalType private() extends DataType {
   private[spark] override def asNullable: CalendarIntervalType = this
 }
 
+/**
+ * @since 1.5.0
+ */
+@InterfaceStability.Stable
 case object CalendarIntervalType extends CalendarIntervalType

http://git-wip-us.apache.org/repos/asf/spark/blob/3694ba48/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataType.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataType.scala
index 4fc65cb..312585d 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataType.scala
@@ -22,15 +22,16 @@ import org.json4s.JsonAST.JValue
 import org.json4s.JsonDSL._
 import org.json4s.jackson.JsonMethods._
 
-import org.apache.spark.annotation.DeveloperApi
+import org.apache.spark.annotation.InterfaceStability
 import org.apache.spark.sql.catalyst.expressions.Expression
 import org.apache.spark.util.Utils
 
 /**
- * :: DeveloperApi ::
  * The base type of all Spark SQL data types.
+ *
+ * @since 1.3.0
  */
-@DeveloperApi
+@InterfaceStability.Stable
 abstract class DataType extends AbstractDataType {
   /**
    * Enables matching against DataType for expressions:
@@ -94,6 +95,10 @@ abstract class DataType extends AbstractDataType {
 }
 
 
+/**
+ * @since 1.3.0
+ */
+@InterfaceStability.Stable
 object DataType {
 
   def fromJson(json: String): DataType = parseDataType(parse(json))

http://git-wip-us.apache.org/repos/asf/spark/blob/3694ba48/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DateType.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DateType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DateType.scala
index 2c96623..8d0ecc0 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DateType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DateType.scala
@@ -20,19 +20,20 @@ package org.apache.spark.sql.types
 import scala.math.Ordering
 import scala.reflect.runtime.universe.typeTag
 
-import org.apache.spark.annotation.DeveloperApi
+import org.apache.spark.annotation.InterfaceStability
 import org.apache.spark.sql.catalyst.ScalaReflectionLock
 
 
 /**
- * :: DeveloperApi ::
  * A date type, supporting "0001-01-01" through "9999-12-31".
  *
  * Please use the singleton [[DataTypes.DateType]].
  *
  * Internally, this is represented as the number of days from 1970-01-01.
+ *
+ * @since 1.3.0
  */
-@DeveloperApi
+@InterfaceStability.Stable
 class DateType private() extends AtomicType {
   // The companion object and this class is separated so the companion object also subclasses
   // this type. Otherwise, the companion object would be of type "DateType$" in byte code.
@@ -51,5 +52,8 @@ class DateType private() extends AtomicType {
   private[spark] override def asNullable: DateType = this
 }
 
-
+/**
+ * @since 1.3.0
+ */
+@InterfaceStability.Stable
 case object DateType extends DateType

http://git-wip-us.apache.org/repos/asf/spark/blob/3694ba48/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala
index 7085905..465fb83 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala
@@ -20,7 +20,7 @@ package org.apache.spark.sql.types
 import java.lang.{Long => JLong}
 import java.math.{BigInteger, MathContext, RoundingMode}
 
-import org.apache.spark.annotation.DeveloperApi
+import org.apache.spark.annotation.InterfaceStability
 
 /**
  * A mutable implementation of BigDecimal that can hold a Long if values are small enough.
@@ -30,6 +30,7 @@ import org.apache.spark.annotation.DeveloperApi
  * - If decimalVal is set, it represents the whole decimal value
  * - Otherwise, the decimal value is longVal / (10 ** _scale)
  */
+@InterfaceStability.Unstable
 final class Decimal extends Ordered[Decimal] with Serializable {
   import org.apache.spark.sql.types.Decimal._
 
@@ -185,7 +186,6 @@ final class Decimal extends Ordered[Decimal] with Serializable {
 
   override def toString: String = toBigDecimal.toString()
 
-  @DeveloperApi
   def toDebugString: String = {
     if (decimalVal.ne(null)) {
       s"Decimal(expanded,$decimalVal,$precision,$scale})"
@@ -380,6 +380,7 @@ final class Decimal extends Ordered[Decimal] with Serializable {
   }
 }
 
+@InterfaceStability.Unstable
 object Decimal {
   val ROUND_HALF_UP = BigDecimal.RoundingMode.HALF_UP
   val ROUND_HALF_EVEN = BigDecimal.RoundingMode.HALF_EVEN

http://git-wip-us.apache.org/repos/asf/spark/blob/3694ba48/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DecimalType.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DecimalType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DecimalType.scala
index 6500875..d7ca0cb 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DecimalType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DecimalType.scala
@@ -19,14 +19,13 @@ package org.apache.spark.sql.types
 
 import scala.reflect.runtime.universe.typeTag
 
-import org.apache.spark.annotation.DeveloperApi
+import org.apache.spark.annotation.InterfaceStability
 import org.apache.spark.sql.AnalysisException
 import org.apache.spark.sql.catalyst.ScalaReflectionLock
 import org.apache.spark.sql.catalyst.expressions.Expression
 
 
 /**
- * :: DeveloperApi ::
  * The data type representing `java.math.BigDecimal` values.
  * A Decimal that must have fixed precision (the maximum number of digits) and scale (the number
  * of digits on right side of dot).
@@ -36,8 +35,10 @@ import org.apache.spark.sql.catalyst.expressions.Expression
  * The default precision and scale is (10, 0).
  *
  * Please use [[DataTypes.createDecimalType()]] to create a specific instance.
+ *
+ * @since 1.3.0
  */
-@DeveloperApi
+@InterfaceStability.Stable
 case class DecimalType(precision: Int, scale: Int) extends FractionalType {
 
   if (scale > precision) {
@@ -101,7 +102,12 @@ case class DecimalType(precision: Int, scale: Int) extends FractionalType {
 }
 
 
-/** Extra factory methods and pattern matchers for Decimals */
+/**
+ * Extra factory methods and pattern matchers for Decimals.
+ *
+ * @since 1.3.0
+ */
+@InterfaceStability.Stable
 object DecimalType extends AbstractDataType {
   import scala.math.min
 

http://git-wip-us.apache.org/repos/asf/spark/blob/3694ba48/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DoubleType.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DoubleType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DoubleType.scala
index e553f65..c21ac0e 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DoubleType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DoubleType.scala
@@ -21,15 +21,16 @@ import scala.math.{Fractional, Numeric, Ordering}
 import scala.math.Numeric.DoubleAsIfIntegral
 import scala.reflect.runtime.universe.typeTag
 
-import org.apache.spark.annotation.DeveloperApi
+import org.apache.spark.annotation.InterfaceStability
 import org.apache.spark.sql.catalyst.ScalaReflectionLock
 import org.apache.spark.util.Utils
 
 /**
- * :: DeveloperApi ::
  * The data type representing `Double` values. Please use the singleton [[DataTypes.DoubleType]].
+ *
+ * @since 1.3.0
  */
-@DeveloperApi
+@InterfaceStability.Stable
 class DoubleType private() extends FractionalType {
   // The companion object and this class is separated so the companion object also subclasses
   // this type. Otherwise, the companion object would be of type "DoubleType$" in byte code.
@@ -51,4 +52,8 @@ class DoubleType private() extends FractionalType {
   private[spark] override def asNullable: DoubleType = this
 }
 
+/**
+ * @since 1.3.0
+ */
+@InterfaceStability.Stable
 case object DoubleType extends DoubleType

http://git-wip-us.apache.org/repos/asf/spark/blob/3694ba48/sql/catalyst/src/main/scala/org/apache/spark/sql/types/FloatType.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/FloatType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/FloatType.scala
index ae9aa9e..c5bf888 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/FloatType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/FloatType.scala
@@ -21,15 +21,16 @@ import scala.math.{Fractional, Numeric, Ordering}
 import scala.math.Numeric.FloatAsIfIntegral
 import scala.reflect.runtime.universe.typeTag
 
-import org.apache.spark.annotation.DeveloperApi
+import org.apache.spark.annotation.InterfaceStability
 import org.apache.spark.sql.catalyst.ScalaReflectionLock
 import org.apache.spark.util.Utils
 
 /**
- * :: DeveloperApi ::
  * The data type representing `Float` values. Please use the singleton [[DataTypes.FloatType]].
+ *
+ * @since 1.3.0
  */
-@DeveloperApi
+@InterfaceStability.Stable
 class FloatType private() extends FractionalType {
   // The companion object and this class is separated so the companion object also subclasses
   // this type. Otherwise, the companion object would be of type "FloatType$" in byte code.
@@ -51,4 +52,9 @@ class FloatType private() extends FractionalType {
   private[spark] override def asNullable: FloatType = this
 }
 
+
+/**
+ * @since 1.3.0
+ */
+@InterfaceStability.Stable
 case object FloatType extends FloatType

http://git-wip-us.apache.org/repos/asf/spark/blob/3694ba48/sql/catalyst/src/main/scala/org/apache/spark/sql/types/IntegerType.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/IntegerType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/IntegerType.scala
index 38a7b8e..724e59c 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/IntegerType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/IntegerType.scala
@@ -20,15 +20,16 @@ package org.apache.spark.sql.types
 import scala.math.{Integral, Numeric, Ordering}
 import scala.reflect.runtime.universe.typeTag
 
-import org.apache.spark.annotation.DeveloperApi
+import org.apache.spark.annotation.InterfaceStability
 import org.apache.spark.sql.catalyst.ScalaReflectionLock
 
 
 /**
- * :: DeveloperApi ::
  * The data type representing `Int` values. Please use the singleton [[DataTypes.IntegerType]].
+ *
+ * @since 1.3.0
  */
-@DeveloperApi
+@InterfaceStability.Stable
 class IntegerType private() extends IntegralType {
   // The companion object and this class is separated so the companion object also subclasses
   // this type. Otherwise, the companion object would be of type "IntegerType$" in byte code.
@@ -49,4 +50,8 @@ class IntegerType private() extends IntegralType {
   private[spark] override def asNullable: IntegerType = this
 }
 
+/**
+ * @since 1.3.0
+ */
+@InterfaceStability.Stable
 case object IntegerType extends IntegerType

http://git-wip-us.apache.org/repos/asf/spark/blob/3694ba48/sql/catalyst/src/main/scala/org/apache/spark/sql/types/LongType.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/LongType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/LongType.scala
index 88aff0c..42285a9 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/LongType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/LongType.scala
@@ -20,14 +20,15 @@ package org.apache.spark.sql.types
 import scala.math.{Integral, Numeric, Ordering}
 import scala.reflect.runtime.universe.typeTag
 
-import org.apache.spark.annotation.DeveloperApi
+import org.apache.spark.annotation.InterfaceStability
 import org.apache.spark.sql.catalyst.ScalaReflectionLock
 
 /**
- * :: DeveloperApi ::
  * The data type representing `Long` values. Please use the singleton [[DataTypes.LongType]].
+ *
+ * @since 1.3.0
  */
-@DeveloperApi
+@InterfaceStability.Stable
 class LongType private() extends IntegralType {
   // The companion object and this class is separated so the companion object also subclasses
   // this type. Otherwise, the companion object would be of type "LongType$" in byte code.
@@ -48,5 +49,8 @@ class LongType private() extends IntegralType {
   private[spark] override def asNullable: LongType = this
 }
 
-
+/**
+ * @since 1.3.0
+ */
+@InterfaceStability.Stable
 case object LongType extends LongType

http://git-wip-us.apache.org/repos/asf/spark/blob/3694ba48/sql/catalyst/src/main/scala/org/apache/spark/sql/types/MapType.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/MapType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/MapType.scala
index 1789609..3a32aa4 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/MapType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/MapType.scala
@@ -20,10 +20,9 @@ package org.apache.spark.sql.types
 import org.json4s.JsonAST.JValue
 import org.json4s.JsonDSL._
 
-import org.apache.spark.annotation.DeveloperApi
+import org.apache.spark.annotation.InterfaceStability
 
 /**
- * :: DeveloperApi ::
  * The data type for Maps. Keys in a map are not allowed to have `null` values.
  *
  * Please use [[DataTypes.createMapType()]] to create a specific instance.
@@ -32,7 +31,7 @@ import org.apache.spark.annotation.DeveloperApi
  * @param valueType The data type of map values.
  * @param valueContainsNull Indicates if map values have `null` values.
  */
-@DeveloperApi
+@InterfaceStability.Stable
 case class MapType(
   keyType: DataType,
   valueType: DataType,
@@ -76,7 +75,10 @@ case class MapType(
   }
 }
 
-
+/**
+ * @since 1.3.0
+ */
+@InterfaceStability.Stable
 object MapType extends AbstractDataType {
 
   override private[sql] def defaultConcreteType: DataType = apply(NullType, NullType)

http://git-wip-us.apache.org/repos/asf/spark/blob/3694ba48/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Metadata.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Metadata.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Metadata.scala
index 657bd86..3aa4bf6 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Metadata.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Metadata.scala
@@ -22,22 +22,22 @@ import scala.collection.mutable
 import org.json4s._
 import org.json4s.jackson.JsonMethods._
 
-import org.apache.spark.annotation.DeveloperApi
+import org.apache.spark.annotation.InterfaceStability
 
 
 /**
- * :: DeveloperApi ::
- *
  * Metadata is a wrapper over Map[String, Any] that limits the value type to simple ones: Boolean,
  * Long, Double, String, Metadata, Array[Boolean], Array[Long], Array[Double], Array[String], and
  * Array[Metadata]. JSON is used for serialization.
  *
  * The default constructor is private. User should use either [[MetadataBuilder]] or
- * [[Metadata.fromJson()]] to create Metadata instances.
+ * `Metadata.fromJson()` to create Metadata instances.
  *
  * @param map an immutable map that stores the data
+ *
+ * @since 1.3.0
  */
-@DeveloperApi
+@InterfaceStability.Stable
 sealed class Metadata private[types] (private[types] val map: Map[String, Any])
   extends Serializable {
 
@@ -114,6 +114,10 @@ sealed class Metadata private[types] (private[types] val map: Map[String, Any])
   private[sql] def jsonValue: JValue = Metadata.toJsonValue(this)
 }
 
+/**
+ * @since 1.3.0
+ */
+@InterfaceStability.Stable
 object Metadata {
 
   private[this] val _empty = new Metadata(Map.empty)
@@ -218,11 +222,11 @@ object Metadata {
 }
 
 /**
- * :: DeveloperApi ::
- *
  * Builder for [[Metadata]]. If there is a key collision, the latter will overwrite the former.
+ *
+ * @since 1.3.0
  */
-@DeveloperApi
+@InterfaceStability.Stable
 class MetadataBuilder {
 
   private val map: mutable.Map[String, Any] = mutable.Map.empty

http://git-wip-us.apache.org/repos/asf/spark/blob/3694ba48/sql/catalyst/src/main/scala/org/apache/spark/sql/types/NullType.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/NullType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/NullType.scala
index aa84115..bdf9a81 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/NullType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/NullType.scala
@@ -17,14 +17,15 @@
 
 package org.apache.spark.sql.types
 
-import org.apache.spark.annotation.DeveloperApi
+import org.apache.spark.annotation.InterfaceStability
 
 
 /**
- * :: DeveloperApi ::
  * The data type representing `NULL` values. Please use the singleton [[DataTypes.NullType]].
+ *
+ * @since 1.3.0
  */
-@DeveloperApi
+@InterfaceStability.Stable
 class NullType private() extends DataType {
   // The companion object and this class is separated so the companion object also subclasses
   // this type. Otherwise, the companion object would be of type "NullType$" in byte code.
@@ -34,4 +35,8 @@ class NullType private() extends DataType {
   private[spark] override def asNullable: NullType = this
 }
 
+/**
+ * @since 1.3.0
+ */
+@InterfaceStability.Stable
 case object NullType extends NullType

http://git-wip-us.apache.org/repos/asf/spark/blob/3694ba48/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ShortType.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ShortType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ShortType.scala
index 486cf58..3fee299 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ShortType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ShortType.scala
@@ -20,14 +20,15 @@ package org.apache.spark.sql.types
 import scala.math.{Integral, Numeric, Ordering}
 import scala.reflect.runtime.universe.typeTag
 
-import org.apache.spark.annotation.DeveloperApi
+import org.apache.spark.annotation.InterfaceStability
 import org.apache.spark.sql.catalyst.ScalaReflectionLock
 
 /**
- * :: DeveloperApi ::
  * The data type representing `Short` values. Please use the singleton [[DataTypes.ShortType]].
+ *
+ * @since 1.3.0
  */
-@DeveloperApi
+@InterfaceStability.Stable
 class ShortType private() extends IntegralType {
   // The companion object and this class is separated so the companion object also subclasses
   // this type. Otherwise, the companion object would be of type "ShortType$" in byte code.
@@ -48,4 +49,8 @@ class ShortType private() extends IntegralType {
   private[spark] override def asNullable: ShortType = this
 }
 
+/**
+ * @since 1.3.0
+ */
+@InterfaceStability.Stable
 case object ShortType extends ShortType

http://git-wip-us.apache.org/repos/asf/spark/blob/3694ba48/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StringType.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StringType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StringType.scala
index 44a2536..5d5a6f5 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StringType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StringType.scala
@@ -20,15 +20,16 @@ package org.apache.spark.sql.types
 import scala.math.Ordering
 import scala.reflect.runtime.universe.typeTag
 
-import org.apache.spark.annotation.DeveloperApi
+import org.apache.spark.annotation.InterfaceStability
 import org.apache.spark.sql.catalyst.ScalaReflectionLock
 import org.apache.spark.unsafe.types.UTF8String
 
 /**
- * :: DeveloperApi ::
  * The data type representing `String` values. Please use the singleton [[DataTypes.StringType]].
+ *
+ * @since 1.3.0
  */
-@DeveloperApi
+@InterfaceStability.Stable
 class StringType private() extends AtomicType {
   // The companion object and this class is separated so the companion object also subclasses
   // this type. Otherwise, the companion object would be of type "StringType$" in byte code.
@@ -45,5 +46,9 @@ class StringType private() extends AtomicType {
   private[spark] override def asNullable: StringType = this
 }
 
+/**
+ * @since 1.3.0
+ */
+@InterfaceStability.Stable
 case object StringType extends StringType
 

http://git-wip-us.apache.org/repos/asf/spark/blob/3694ba48/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructField.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructField.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructField.scala
index cb8bf61..2c18fdc 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructField.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructField.scala
@@ -20,6 +20,8 @@ package org.apache.spark.sql.types
 import org.json4s.JsonAST.JValue
 import org.json4s.JsonDSL._
 
+import org.apache.spark.annotation.InterfaceStability
+
 /**
  * A field inside a StructType.
  * @param name The name of this field.
@@ -27,7 +29,10 @@ import org.json4s.JsonDSL._
  * @param nullable Indicates if values of this field can be `null` values.
  * @param metadata The metadata of this field. The metadata should be preserved during
  *                 transformation if the content of the column is not modified, e.g, in selection.
+ *
+ * @since 1.3.0
  */
+@InterfaceStability.Stable
 case class StructField(
     name: String,
     dataType: DataType,

http://git-wip-us.apache.org/repos/asf/spark/blob/3694ba48/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructType.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructType.scala
index dd4c88c..0205c13 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructType.scala
@@ -23,14 +23,13 @@ import scala.util.Try
 import org.json4s.JsonDSL._
 
 import org.apache.spark.SparkException
-import org.apache.spark.annotation.DeveloperApi
+import org.apache.spark.annotation.InterfaceStability
 import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference, InterpretedOrdering}
 import org.apache.spark.sql.catalyst.parser.{CatalystSqlParser, LegacyTypeStringParser}
 import org.apache.spark.sql.catalyst.util.quoteIdentifier
 import org.apache.spark.util.Utils
 
 /**
- * :: DeveloperApi ::
  * A [[StructType]] object can be constructed by
  * {{{
  * StructType(fields: Seq[StructField])
@@ -90,8 +89,10 @@ import org.apache.spark.util.Utils
  * val row = Row(Row(1, 2, true))
  * // row: Row = [[1,2,true]]
  * }}}
+ *
+ * @since 1.3.0
  */
-@DeveloperApi
+@InterfaceStability.Stable
 case class StructType(fields: Array[StructField]) extends DataType with Seq[StructField] {
 
   /** No-arg constructor for kryo. */
@@ -138,7 +139,7 @@ case class StructType(fields: Array[StructField]) extends DataType with Seq[Stru
    *   .add("c", StringType)
    */
   def add(name: String, dataType: DataType): StructType = {
-    StructType(fields :+ new StructField(name, dataType, nullable = true, Metadata.empty))
+    StructType(fields :+ StructField(name, dataType, nullable = true, Metadata.empty))
   }
 
   /**
@@ -150,7 +151,7 @@ case class StructType(fields: Array[StructField]) extends DataType with Seq[Stru
    *   .add("c", StringType, true)
    */
   def add(name: String, dataType: DataType, nullable: Boolean): StructType = {
-    StructType(fields :+ new StructField(name, dataType, nullable, Metadata.empty))
+    StructType(fields :+ StructField(name, dataType, nullable, Metadata.empty))
   }
 
   /**
@@ -167,7 +168,7 @@ case class StructType(fields: Array[StructField]) extends DataType with Seq[Stru
       dataType: DataType,
       nullable: Boolean,
       metadata: Metadata): StructType = {
-    StructType(fields :+ new StructField(name, dataType, nullable, metadata))
+    StructType(fields :+ StructField(name, dataType, nullable, metadata))
   }
 
   /**
@@ -347,7 +348,7 @@ case class StructType(fields: Array[StructField]) extends DataType with Seq[Stru
   private[sql] override def simpleString(maxNumberFields: Int): String = {
     val builder = new StringBuilder
     val fieldTypes = fields.take(maxNumberFields).map {
-      case f => s"${f.name}: ${f.dataType.simpleString(maxNumberFields)}"
+      f => s"${f.name}: ${f.dataType.simpleString(maxNumberFields)}"
     }
     builder.append("struct<")
     builder.append(fieldTypes.mkString(", "))
@@ -393,6 +394,10 @@ case class StructType(fields: Array[StructField]) extends DataType with Seq[Stru
     InterpretedOrdering.forSchema(this.fields.map(_.dataType))
 }
 
+/**
+ * @since 1.3.0
+ */
+@InterfaceStability.Stable
 object StructType extends AbstractDataType {
 
   /**
@@ -469,7 +474,7 @@ object StructType extends AbstractDataType {
                   nullable = leftNullable || rightNullable)
               }
               .orElse {
-                optionalMeta.putBoolean(metadataKeyForOptionalField, true)
+                optionalMeta.putBoolean(metadataKeyForOptionalField, value = true)
                 Some(leftField.copy(metadata = optionalMeta.build()))
               }
               .foreach(newFields += _)
@@ -479,7 +484,7 @@ object StructType extends AbstractDataType {
         rightFields
           .filterNot(f => leftMapped.get(f.name).nonEmpty)
           .foreach { f =>
-            optionalMeta.putBoolean(metadataKeyForOptionalField, true)
+            optionalMeta.putBoolean(metadataKeyForOptionalField, value = true)
             newFields += f.copy(metadata = optionalMeta.build())
           }
 

http://git-wip-us.apache.org/repos/asf/spark/blob/3694ba48/sql/catalyst/src/main/scala/org/apache/spark/sql/types/TimestampType.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/TimestampType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/TimestampType.scala
index 2be9b2d..4540d83 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/TimestampType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/TimestampType.scala
@@ -20,16 +20,17 @@ package org.apache.spark.sql.types
 import scala.math.Ordering
 import scala.reflect.runtime.universe.typeTag
 
-import org.apache.spark.annotation.DeveloperApi
+import org.apache.spark.annotation.InterfaceStability
 import org.apache.spark.sql.catalyst.ScalaReflectionLock
 
 
 /**
- * :: DeveloperApi ::
  * The data type representing `java.sql.Timestamp` values.
  * Please use the singleton [[DataTypes.TimestampType]].
+ *
+ * @since 1.3.0
  */
-@DeveloperApi
+@InterfaceStability.Stable
 class TimestampType private() extends AtomicType {
   // The companion object and this class is separated so the companion object also subclasses
   // this type. Otherwise, the companion object would be of type "TimestampType$" in byte code.
@@ -48,4 +49,8 @@ class TimestampType private() extends AtomicType {
   private[spark] override def asNullable: TimestampType = this
 }
 
+/**
+ * @since 1.3.0
+ */
+@InterfaceStability.Stable
 case object TimestampType extends TimestampType

http://git-wip-us.apache.org/repos/asf/spark/blob/3694ba48/sql/catalyst/src/main/scala/org/apache/spark/sql/types/UserDefinedType.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/UserDefinedType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/UserDefinedType.scala
index 8946313..c33219c 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/UserDefinedType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/UserDefinedType.scala
@@ -22,8 +22,6 @@ import java.util.Objects
 import org.json4s.JsonAST.JValue
 import org.json4s.JsonDSL._
 
-import org.apache.spark.annotation.DeveloperApi
-
 /**
  * The data type for User Defined Types (UDTs).
  *
@@ -96,12 +94,10 @@ abstract class UserDefinedType[UserType >: Null] extends DataType with Serializa
 }
 
 /**
- * :: DeveloperApi ::
  * The user defined type in Python.
  *
  * Note: This can only be accessed via Python UDF, or accessed as serialized object.
  */
-@DeveloperApi
 private[sql] class PythonUserDefinedType(
     val sqlType: DataType,
     override val pyUDT: String,


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org