You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by fe...@apache.org on 2018/03/05 17:30:54 UTC

[1/3] spark git commit: [SPARK-22430][R][DOCS] Unknown tag warnings when building R docs with Roxygen 6.0.1

Repository: spark
Updated Branches:
  refs/heads/master 947b4e6f0 -> 4586eada4


http://git-wip-us.apache.org/repos/asf/spark/blob/4586eada/R/pkg/R/mllib_tree.R
----------------------------------------------------------------------
diff --git a/R/pkg/R/mllib_tree.R b/R/pkg/R/mllib_tree.R
index 4e5ddf2..6769be0 100644
--- a/R/pkg/R/mllib_tree.R
+++ b/R/pkg/R/mllib_tree.R
@@ -20,42 +20,36 @@
 #' S4 class that represents a GBTRegressionModel
 #'
 #' @param jobj a Java object reference to the backing Scala GBTRegressionModel
-#' @export
 #' @note GBTRegressionModel since 2.1.0
 setClass("GBTRegressionModel", representation(jobj = "jobj"))
 
 #' S4 class that represents a GBTClassificationModel
 #'
 #' @param jobj a Java object reference to the backing Scala GBTClassificationModel
-#' @export
 #' @note GBTClassificationModel since 2.1.0
 setClass("GBTClassificationModel", representation(jobj = "jobj"))
 
 #' S4 class that represents a RandomForestRegressionModel
 #'
 #' @param jobj a Java object reference to the backing Scala RandomForestRegressionModel
-#' @export
 #' @note RandomForestRegressionModel since 2.1.0
 setClass("RandomForestRegressionModel", representation(jobj = "jobj"))
 
 #' S4 class that represents a RandomForestClassificationModel
 #'
 #' @param jobj a Java object reference to the backing Scala RandomForestClassificationModel
-#' @export
 #' @note RandomForestClassificationModel since 2.1.0
 setClass("RandomForestClassificationModel", representation(jobj = "jobj"))
 
 #' S4 class that represents a DecisionTreeRegressionModel
 #'
 #' @param jobj a Java object reference to the backing Scala DecisionTreeRegressionModel
-#' @export
 #' @note DecisionTreeRegressionModel since 2.3.0
 setClass("DecisionTreeRegressionModel", representation(jobj = "jobj"))
 
 #' S4 class that represents a DecisionTreeClassificationModel
 #'
 #' @param jobj a Java object reference to the backing Scala DecisionTreeClassificationModel
-#' @export
 #' @note DecisionTreeClassificationModel since 2.3.0
 setClass("DecisionTreeClassificationModel", representation(jobj = "jobj"))
 
@@ -179,7 +173,6 @@ print.summary.decisionTree <- function(x) {
 #' @return \code{spark.gbt} returns a fitted Gradient Boosted Tree model.
 #' @rdname spark.gbt
 #' @name spark.gbt
-#' @export
 #' @examples
 #' \dontrun{
 #' # fit a Gradient Boosted Tree Regression Model
@@ -261,7 +254,6 @@ setMethod("spark.gbt", signature(data = "SparkDataFrame", formula = "formula"),
 #'         \code{numTrees} (number of trees), and \code{treeWeights} (tree weights).
 #' @rdname spark.gbt
 #' @aliases summary,GBTRegressionModel-method
-#' @export
 #' @note summary(GBTRegressionModel) since 2.1.0
 setMethod("summary", signature(object = "GBTRegressionModel"),
           function(object) {
@@ -275,7 +267,6 @@ setMethod("summary", signature(object = "GBTRegressionModel"),
 #' @param x summary object of Gradient Boosted Tree regression model or classification model
 #'          returned by \code{summary}.
 #' @rdname spark.gbt
-#' @export
 #' @note print.summary.GBTRegressionModel since 2.1.0
 print.summary.GBTRegressionModel <- function(x, ...) {
   print.summary.treeEnsemble(x)
@@ -285,7 +276,6 @@ print.summary.GBTRegressionModel <- function(x, ...) {
 
 #' @rdname spark.gbt
 #' @aliases summary,GBTClassificationModel-method
-#' @export
 #' @note summary(GBTClassificationModel) since 2.1.0
 setMethod("summary", signature(object = "GBTClassificationModel"),
           function(object) {
@@ -297,7 +287,6 @@ setMethod("summary", signature(object = "GBTClassificationModel"),
 #  Prints the summary of Gradient Boosted Tree Classification Model
 
 #' @rdname spark.gbt
-#' @export
 #' @note print.summary.GBTClassificationModel since 2.1.0
 print.summary.GBTClassificationModel <- function(x, ...) {
   print.summary.treeEnsemble(x)
@@ -310,7 +299,6 @@ print.summary.GBTClassificationModel <- function(x, ...) {
 #'         "prediction".
 #' @rdname spark.gbt
 #' @aliases predict,GBTRegressionModel-method
-#' @export
 #' @note predict(GBTRegressionModel) since 2.1.0
 setMethod("predict", signature(object = "GBTRegressionModel"),
           function(object, newData) {
@@ -319,7 +307,6 @@ setMethod("predict", signature(object = "GBTRegressionModel"),
 
 #' @rdname spark.gbt
 #' @aliases predict,GBTClassificationModel-method
-#' @export
 #' @note predict(GBTClassificationModel) since 2.1.0
 setMethod("predict", signature(object = "GBTClassificationModel"),
           function(object, newData) {
@@ -334,7 +321,6 @@ setMethod("predict", signature(object = "GBTClassificationModel"),
 #'                  which means throw exception if the output path exists.
 #' @aliases write.ml,GBTRegressionModel,character-method
 #' @rdname spark.gbt
-#' @export
 #' @note write.ml(GBTRegressionModel, character) since 2.1.0
 setMethod("write.ml", signature(object = "GBTRegressionModel", path = "character"),
           function(object, path, overwrite = FALSE) {
@@ -343,7 +329,6 @@ setMethod("write.ml", signature(object = "GBTRegressionModel", path = "character
 
 #' @aliases write.ml,GBTClassificationModel,character-method
 #' @rdname spark.gbt
-#' @export
 #' @note write.ml(GBTClassificationModel, character) since 2.1.0
 setMethod("write.ml", signature(object = "GBTClassificationModel", path = "character"),
           function(object, path, overwrite = FALSE) {
@@ -402,7 +387,6 @@ setMethod("write.ml", signature(object = "GBTClassificationModel", path = "chara
 #' @return \code{spark.randomForest} returns a fitted Random Forest model.
 #' @rdname spark.randomForest
 #' @name spark.randomForest
-#' @export
 #' @examples
 #' \dontrun{
 #' # fit a Random Forest Regression Model
@@ -480,7 +464,6 @@ setMethod("spark.randomForest", signature(data = "SparkDataFrame", formula = "fo
 #'         \code{numTrees} (number of trees), and \code{treeWeights} (tree weights).
 #' @rdname spark.randomForest
 #' @aliases summary,RandomForestRegressionModel-method
-#' @export
 #' @note summary(RandomForestRegressionModel) since 2.1.0
 setMethod("summary", signature(object = "RandomForestRegressionModel"),
           function(object) {
@@ -494,7 +477,6 @@ setMethod("summary", signature(object = "RandomForestRegressionModel"),
 #' @param x summary object of Random Forest regression model or classification model
 #'          returned by \code{summary}.
 #' @rdname spark.randomForest
-#' @export
 #' @note print.summary.RandomForestRegressionModel since 2.1.0
 print.summary.RandomForestRegressionModel <- function(x, ...) {
   print.summary.treeEnsemble(x)
@@ -504,7 +486,6 @@ print.summary.RandomForestRegressionModel <- function(x, ...) {
 
 #' @rdname spark.randomForest
 #' @aliases summary,RandomForestClassificationModel-method
-#' @export
 #' @note summary(RandomForestClassificationModel) since 2.1.0
 setMethod("summary", signature(object = "RandomForestClassificationModel"),
           function(object) {
@@ -516,7 +497,6 @@ setMethod("summary", signature(object = "RandomForestClassificationModel"),
 #  Prints the summary of Random Forest Classification Model
 
 #' @rdname spark.randomForest
-#' @export
 #' @note print.summary.RandomForestClassificationModel since 2.1.0
 print.summary.RandomForestClassificationModel <- function(x, ...) {
   print.summary.treeEnsemble(x)
@@ -529,7 +509,6 @@ print.summary.RandomForestClassificationModel <- function(x, ...) {
 #'         "prediction".
 #' @rdname spark.randomForest
 #' @aliases predict,RandomForestRegressionModel-method
-#' @export
 #' @note predict(RandomForestRegressionModel) since 2.1.0
 setMethod("predict", signature(object = "RandomForestRegressionModel"),
           function(object, newData) {
@@ -538,7 +517,6 @@ setMethod("predict", signature(object = "RandomForestRegressionModel"),
 
 #' @rdname spark.randomForest
 #' @aliases predict,RandomForestClassificationModel-method
-#' @export
 #' @note predict(RandomForestClassificationModel) since 2.1.0
 setMethod("predict", signature(object = "RandomForestClassificationModel"),
           function(object, newData) {
@@ -554,7 +532,6 @@ setMethod("predict", signature(object = "RandomForestClassificationModel"),
 #'
 #' @aliases write.ml,RandomForestRegressionModel,character-method
 #' @rdname spark.randomForest
-#' @export
 #' @note write.ml(RandomForestRegressionModel, character) since 2.1.0
 setMethod("write.ml", signature(object = "RandomForestRegressionModel", path = "character"),
           function(object, path, overwrite = FALSE) {
@@ -563,7 +540,6 @@ setMethod("write.ml", signature(object = "RandomForestRegressionModel", path = "
 
 #' @aliases write.ml,RandomForestClassificationModel,character-method
 #' @rdname spark.randomForest
-#' @export
 #' @note write.ml(RandomForestClassificationModel, character) since 2.1.0
 setMethod("write.ml", signature(object = "RandomForestClassificationModel", path = "character"),
           function(object, path, overwrite = FALSE) {
@@ -617,7 +593,6 @@ setMethod("write.ml", signature(object = "RandomForestClassificationModel", path
 #' @return \code{spark.decisionTree} returns a fitted Decision Tree model.
 #' @rdname spark.decisionTree
 #' @name spark.decisionTree
-#' @export
 #' @examples
 #' \dontrun{
 #' # fit a Decision Tree Regression Model
@@ -690,7 +665,6 @@ setMethod("spark.decisionTree", signature(data = "SparkDataFrame", formula = "fo
 #'         trees).
 #' @rdname spark.decisionTree
 #' @aliases summary,DecisionTreeRegressionModel-method
-#' @export
 #' @note summary(DecisionTreeRegressionModel) since 2.3.0
 setMethod("summary", signature(object = "DecisionTreeRegressionModel"),
           function(object) {
@@ -704,7 +678,6 @@ setMethod("summary", signature(object = "DecisionTreeRegressionModel"),
 #' @param x summary object of Decision Tree regression model or classification model
 #'          returned by \code{summary}.
 #' @rdname spark.decisionTree
-#' @export
 #' @note print.summary.DecisionTreeRegressionModel since 2.3.0
 print.summary.DecisionTreeRegressionModel <- function(x, ...) {
   print.summary.decisionTree(x)
@@ -714,7 +687,6 @@ print.summary.DecisionTreeRegressionModel <- function(x, ...) {
 
 #' @rdname spark.decisionTree
 #' @aliases summary,DecisionTreeClassificationModel-method
-#' @export
 #' @note summary(DecisionTreeClassificationModel) since 2.3.0
 setMethod("summary", signature(object = "DecisionTreeClassificationModel"),
           function(object) {
@@ -726,7 +698,6 @@ setMethod("summary", signature(object = "DecisionTreeClassificationModel"),
 #  Prints the summary of Decision Tree Classification Model
 
 #' @rdname spark.decisionTree
-#' @export
 #' @note print.summary.DecisionTreeClassificationModel since 2.3.0
 print.summary.DecisionTreeClassificationModel <- function(x, ...) {
   print.summary.decisionTree(x)
@@ -739,7 +710,6 @@ print.summary.DecisionTreeClassificationModel <- function(x, ...) {
 #'         "prediction".
 #' @rdname spark.decisionTree
 #' @aliases predict,DecisionTreeRegressionModel-method
-#' @export
 #' @note predict(DecisionTreeRegressionModel) since 2.3.0
 setMethod("predict", signature(object = "DecisionTreeRegressionModel"),
           function(object, newData) {
@@ -748,7 +718,6 @@ setMethod("predict", signature(object = "DecisionTreeRegressionModel"),
 
 #' @rdname spark.decisionTree
 #' @aliases predict,DecisionTreeClassificationModel-method
-#' @export
 #' @note predict(DecisionTreeClassificationModel) since 2.3.0
 setMethod("predict", signature(object = "DecisionTreeClassificationModel"),
           function(object, newData) {
@@ -764,7 +733,6 @@ setMethod("predict", signature(object = "DecisionTreeClassificationModel"),
 #'
 #' @aliases write.ml,DecisionTreeRegressionModel,character-method
 #' @rdname spark.decisionTree
-#' @export
 #' @note write.ml(DecisionTreeRegressionModel, character) since 2.3.0
 setMethod("write.ml", signature(object = "DecisionTreeRegressionModel", path = "character"),
           function(object, path, overwrite = FALSE) {
@@ -773,7 +741,6 @@ setMethod("write.ml", signature(object = "DecisionTreeRegressionModel", path = "
 
 #' @aliases write.ml,DecisionTreeClassificationModel,character-method
 #' @rdname spark.decisionTree
-#' @export
 #' @note write.ml(DecisionTreeClassificationModel, character) since 2.3.0
 setMethod("write.ml", signature(object = "DecisionTreeClassificationModel", path = "character"),
           function(object, path, overwrite = FALSE) {

http://git-wip-us.apache.org/repos/asf/spark/blob/4586eada/R/pkg/R/mllib_utils.R
----------------------------------------------------------------------
diff --git a/R/pkg/R/mllib_utils.R b/R/pkg/R/mllib_utils.R
index a53c92c..7d04bff 100644
--- a/R/pkg/R/mllib_utils.R
+++ b/R/pkg/R/mllib_utils.R
@@ -31,7 +31,6 @@
 #' MLlib model below.
 #' @rdname write.ml
 #' @name write.ml
-#' @export
 #' @seealso \link{spark.als}, \link{spark.bisectingKmeans}, \link{spark.decisionTree},
 #' @seealso \link{spark.gaussianMixture}, \link{spark.gbt},
 #' @seealso \link{spark.glm}, \link{glm}, \link{spark.isoreg},
@@ -48,7 +47,6 @@ NULL
 #' MLlib model below.
 #' @rdname predict
 #' @name predict
-#' @export
 #' @seealso \link{spark.als}, \link{spark.bisectingKmeans}, \link{spark.decisionTree},
 #' @seealso \link{spark.gaussianMixture}, \link{spark.gbt},
 #' @seealso \link{spark.glm}, \link{glm}, \link{spark.isoreg},
@@ -75,7 +73,6 @@ predict_internal <- function(object, newData) {
 #' @return A fitted MLlib model.
 #' @rdname read.ml
 #' @name read.ml
-#' @export
 #' @seealso \link{write.ml}
 #' @examples
 #' \dontrun{

http://git-wip-us.apache.org/repos/asf/spark/blob/4586eada/R/pkg/R/schema.R
----------------------------------------------------------------------
diff --git a/R/pkg/R/schema.R b/R/pkg/R/schema.R
index 65f4187..9831fc3 100644
--- a/R/pkg/R/schema.R
+++ b/R/pkg/R/schema.R
@@ -29,7 +29,6 @@
 #' @param ... additional structField objects
 #' @return a structType object
 #' @rdname structType
-#' @export
 #' @examples
 #'\dontrun{
 #' schema <- structType(structField("a", "integer"), structField("c", "string"),
@@ -49,7 +48,6 @@ structType <- function(x, ...) {
 
 #' @rdname structType
 #' @method structType jobj
-#' @export
 structType.jobj <- function(x, ...) {
   obj <- structure(list(), class = "structType")
   obj$jobj <- x
@@ -59,7 +57,6 @@ structType.jobj <- function(x, ...) {
 
 #' @rdname structType
 #' @method structType structField
-#' @export
 structType.structField <- function(x, ...) {
   fields <- list(x, ...)
   if (!all(sapply(fields, inherits, "structField"))) {
@@ -76,7 +73,6 @@ structType.structField <- function(x, ...) {
 
 #' @rdname structType
 #' @method structType character
-#' @export
 structType.character <- function(x, ...) {
   if (!is.character(x)) {
     stop("schema must be a DDL-formatted string.")
@@ -119,7 +115,6 @@ print.structType <- function(x, ...) {
 #' @param ... additional argument(s) passed to the method.
 #' @return A structField object.
 #' @rdname structField
-#' @export
 #' @examples
 #'\dontrun{
 #' field1 <- structField("a", "integer")
@@ -137,7 +132,6 @@ structField <- function(x, ...) {
 
 #' @rdname structField
 #' @method structField jobj
-#' @export
 structField.jobj <- function(x, ...) {
   obj <- structure(list(), class = "structField")
   obj$jobj <- x
@@ -212,7 +206,6 @@ checkType <- function(type) {
 #' @param type The data type of the field
 #' @param nullable A logical vector indicating whether or not the field is nullable
 #' @rdname structField
-#' @export
 structField.character <- function(x, type, nullable = TRUE, ...) {
   if (class(x) != "character") {
     stop("Field name must be a string.")

http://git-wip-us.apache.org/repos/asf/spark/blob/4586eada/R/pkg/R/sparkR.R
----------------------------------------------------------------------
diff --git a/R/pkg/R/sparkR.R b/R/pkg/R/sparkR.R
index 965471f..a480ac6 100644
--- a/R/pkg/R/sparkR.R
+++ b/R/pkg/R/sparkR.R
@@ -35,7 +35,6 @@ connExists <- function(env) {
 #' Also terminates the backend this R session is connected to.
 #' @rdname sparkR.session.stop
 #' @name sparkR.session.stop
-#' @export
 #' @note sparkR.session.stop since 2.0.0
 sparkR.session.stop <- function() {
   env <- .sparkREnv
@@ -84,7 +83,6 @@ sparkR.session.stop <- function() {
 
 #' @rdname sparkR.session.stop
 #' @name sparkR.stop
-#' @export
 #' @note sparkR.stop since 1.4.0
 sparkR.stop <- function() {
   sparkR.session.stop()
@@ -103,7 +101,6 @@ sparkR.stop <- function() {
 #' @param sparkPackages Character vector of package coordinates
 #' @seealso \link{sparkR.session}
 #' @rdname sparkR.init-deprecated
-#' @export
 #' @examples
 #'\dontrun{
 #' sc <- sparkR.init("local[2]", "SparkR", "/home/spark")
@@ -270,7 +267,6 @@ sparkR.sparkContext <- function(
 #' @param jsc The existing JavaSparkContext created with SparkR.init()
 #' @seealso \link{sparkR.session}
 #' @rdname sparkRSQL.init-deprecated
-#' @export
 #' @examples
 #'\dontrun{
 #' sc <- sparkR.init()
@@ -298,7 +294,6 @@ sparkRSQL.init <- function(jsc = NULL) {
 #' @param jsc The existing JavaSparkContext created with SparkR.init()
 #' @seealso \link{sparkR.session}
 #' @rdname sparkRHive.init-deprecated
-#' @export
 #' @examples
 #'\dontrun{
 #' sc <- sparkR.init()
@@ -347,7 +342,6 @@ sparkRHive.init <- function(jsc = NULL) {
 #' @param enableHiveSupport enable support for Hive, fallback if not built with Hive support; once
 #'        set, this cannot be turned off on an existing session
 #' @param ... named Spark properties passed to the method.
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -442,7 +436,6 @@ sparkR.session <- function(
 #' @return the SparkUI URL, or NA if it is disabled, or not started.
 #' @rdname sparkR.uiWebUrl
 #' @name sparkR.uiWebUrl
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()

http://git-wip-us.apache.org/repos/asf/spark/blob/4586eada/R/pkg/R/stats.R
----------------------------------------------------------------------
diff --git a/R/pkg/R/stats.R b/R/pkg/R/stats.R
index c8af798..497f18c 100644
--- a/R/pkg/R/stats.R
+++ b/R/pkg/R/stats.R
@@ -37,7 +37,6 @@ setOldClass("jobj")
 #' @name crosstab
 #' @aliases crosstab,SparkDataFrame,character,character-method
 #' @family stat functions
-#' @export
 #' @examples
 #' \dontrun{
 #' df <- read.json("/path/to/file.json")
@@ -63,7 +62,6 @@ setMethod("crosstab",
 #' @rdname cov
 #' @aliases cov,SparkDataFrame-method
 #' @family stat functions
-#' @export
 #' @examples
 #'
 #' \dontrun{
@@ -92,7 +90,6 @@ setMethod("cov",
 #' @name corr
 #' @aliases corr,SparkDataFrame-method
 #' @family stat functions
-#' @export
 #' @examples
 #'
 #' \dontrun{
@@ -124,7 +121,6 @@ setMethod("corr",
 #' @name freqItems
 #' @aliases freqItems,SparkDataFrame,character-method
 #' @family stat functions
-#' @export
 #' @examples
 #' \dontrun{
 #' df <- read.json("/path/to/file.json")
@@ -168,7 +164,6 @@ setMethod("freqItems", signature(x = "SparkDataFrame", cols = "character"),
 #' @name approxQuantile
 #' @aliases approxQuantile,SparkDataFrame,character,numeric,numeric-method
 #' @family stat functions
-#' @export
 #' @examples
 #' \dontrun{
 #' df <- read.json("/path/to/file.json")
@@ -205,7 +200,6 @@ setMethod("approxQuantile",
 #' @aliases sampleBy,SparkDataFrame,character,list,numeric-method
 #' @name sampleBy
 #' @family stat functions
-#' @export
 #' @examples
 #'\dontrun{
 #' df <- read.json("/path/to/file.json")

http://git-wip-us.apache.org/repos/asf/spark/blob/4586eada/R/pkg/R/streaming.R
----------------------------------------------------------------------
diff --git a/R/pkg/R/streaming.R b/R/pkg/R/streaming.R
index 8390bd5..fc83463 100644
--- a/R/pkg/R/streaming.R
+++ b/R/pkg/R/streaming.R
@@ -28,7 +28,6 @@ NULL
 #' @seealso \link{read.stream}
 #'
 #' @param ssq A Java object reference to the backing Scala StreamingQuery
-#' @export
 #' @note StreamingQuery since 2.2.0
 #' @note experimental
 setClass("StreamingQuery",
@@ -45,7 +44,6 @@ streamingQuery <- function(ssq) {
 }
 
 #' @rdname show
-#' @export
 #' @note show(StreamingQuery) since 2.2.0
 setMethod("show", "StreamingQuery",
           function(object) {
@@ -70,7 +68,6 @@ setMethod("show", "StreamingQuery",
 #' @aliases queryName,StreamingQuery-method
 #' @family StreamingQuery methods
 #' @seealso \link{write.stream}
-#' @export
 #' @examples
 #' \dontrun{ queryName(sq) }
 #' @note queryName(StreamingQuery) since 2.2.0
@@ -85,7 +82,6 @@ setMethod("queryName",
 #' @name explain
 #' @aliases explain,StreamingQuery-method
 #' @family StreamingQuery methods
-#' @export
 #' @examples
 #' \dontrun{ explain(sq) }
 #' @note explain(StreamingQuery) since 2.2.0
@@ -104,7 +100,6 @@ setMethod("explain",
 #' @name lastProgress
 #' @aliases lastProgress,StreamingQuery-method
 #' @family StreamingQuery methods
-#' @export
 #' @examples
 #' \dontrun{ lastProgress(sq) }
 #' @note lastProgress(StreamingQuery) since 2.2.0
@@ -129,7 +124,6 @@ setMethod("lastProgress",
 #' @name status
 #' @aliases status,StreamingQuery-method
 #' @family StreamingQuery methods
-#' @export
 #' @examples
 #' \dontrun{ status(sq) }
 #' @note status(StreamingQuery) since 2.2.0
@@ -150,7 +144,6 @@ setMethod("status",
 #' @name isActive
 #' @aliases isActive,StreamingQuery-method
 #' @family StreamingQuery methods
-#' @export
 #' @examples
 #' \dontrun{ isActive(sq) }
 #' @note isActive(StreamingQuery) since 2.2.0
@@ -177,7 +170,6 @@ setMethod("isActive",
 #' @name awaitTermination
 #' @aliases awaitTermination,StreamingQuery-method
 #' @family StreamingQuery methods
-#' @export
 #' @examples
 #' \dontrun{ awaitTermination(sq, 10000) }
 #' @note awaitTermination(StreamingQuery) since 2.2.0
@@ -202,7 +194,6 @@ setMethod("awaitTermination",
 #' @name stopQuery
 #' @aliases stopQuery,StreamingQuery-method
 #' @family StreamingQuery methods
-#' @export
 #' @examples
 #' \dontrun{ stopQuery(sq) }
 #' @note stopQuery(StreamingQuery) since 2.2.0

http://git-wip-us.apache.org/repos/asf/spark/blob/4586eada/R/pkg/R/utils.R
----------------------------------------------------------------------
diff --git a/R/pkg/R/utils.R b/R/pkg/R/utils.R
index 164cd6d..f1b5eca 100644
--- a/R/pkg/R/utils.R
+++ b/R/pkg/R/utils.R
@@ -108,7 +108,6 @@ isRDD <- function(name, env) {
 #'
 #' @param key the object to be hashed
 #' @return the hash code as an integer
-#' @export
 #' @examples
 #'\dontrun{
 #' hashCode(1L) # 1

http://git-wip-us.apache.org/repos/asf/spark/blob/4586eada/R/pkg/R/window.R
----------------------------------------------------------------------
diff --git a/R/pkg/R/window.R b/R/pkg/R/window.R
index 0799d84..396b27b 100644
--- a/R/pkg/R/window.R
+++ b/R/pkg/R/window.R
@@ -29,7 +29,6 @@
 #' @rdname windowPartitionBy
 #' @name windowPartitionBy
 #' @aliases windowPartitionBy,character-method
-#' @export
 #' @examples
 #' \dontrun{
 #'   ws <- orderBy(windowPartitionBy("key1", "key2"), "key3")
@@ -52,7 +51,6 @@ setMethod("windowPartitionBy",
 #' @rdname windowPartitionBy
 #' @name windowPartitionBy
 #' @aliases windowPartitionBy,Column-method
-#' @export
 #' @note windowPartitionBy(Column) since 2.0.0
 setMethod("windowPartitionBy",
           signature(col = "Column"),
@@ -78,7 +76,6 @@ setMethod("windowPartitionBy",
 #' @rdname windowOrderBy
 #' @name windowOrderBy
 #' @aliases windowOrderBy,character-method
-#' @export
 #' @examples
 #' \dontrun{
 #'   ws <- windowOrderBy("key1", "key2")
@@ -101,7 +98,6 @@ setMethod("windowOrderBy",
 #' @rdname windowOrderBy
 #' @name windowOrderBy
 #' @aliases windowOrderBy,Column-method
-#' @export
 #' @note windowOrderBy(Column) since 2.0.0
 setMethod("windowOrderBy",
           signature(col = "Column"),


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org


[2/3] spark git commit: [SPARK-22430][R][DOCS] Unknown tag warnings when building R docs with Roxygen 6.0.1

Posted by fe...@apache.org.
http://git-wip-us.apache.org/repos/asf/spark/blob/4586eada/R/pkg/R/generics.R
----------------------------------------------------------------------
diff --git a/R/pkg/R/generics.R b/R/pkg/R/generics.R
index e0dde33..6fba4b6 100644
--- a/R/pkg/R/generics.R
+++ b/R/pkg/R/generics.R
@@ -19,7 +19,6 @@
 
 # @rdname aggregateRDD
 # @seealso reduce
-# @export
 setGeneric("aggregateRDD",
            function(x, zeroValue, seqOp, combOp) { standardGeneric("aggregateRDD") })
 
@@ -27,21 +26,17 @@ setGeneric("cacheRDD", function(x) { standardGeneric("cacheRDD") })
 
 # @rdname coalesce
 # @seealso repartition
-# @export
 setGeneric("coalesceRDD", function(x, numPartitions, ...) { standardGeneric("coalesceRDD") })
 
 # @rdname checkpoint-methods
-# @export
 setGeneric("checkpointRDD", function(x) { standardGeneric("checkpointRDD") })
 
 setGeneric("collectRDD", function(x, ...) { standardGeneric("collectRDD") })
 
 # @rdname collect-methods
-# @export
 setGeneric("collectAsMap", function(x) { standardGeneric("collectAsMap") })
 
 # @rdname collect-methods
-# @export
 setGeneric("collectPartition",
            function(x, partitionId) {
              standardGeneric("collectPartition")
@@ -52,19 +47,15 @@ setGeneric("countRDD", function(x) { standardGeneric("countRDD") })
 setGeneric("lengthRDD", function(x) { standardGeneric("lengthRDD") })
 
 # @rdname countByValue
-# @export
 setGeneric("countByValue", function(x) { standardGeneric("countByValue") })
 
 # @rdname crosstab
-# @export
 setGeneric("crosstab", function(x, col1, col2) { standardGeneric("crosstab") })
 
 # @rdname freqItems
-# @export
 setGeneric("freqItems", function(x, cols, support = 0.01) { standardGeneric("freqItems") })
 
 # @rdname approxQuantile
-# @export
 setGeneric("approxQuantile",
            function(x, cols, probabilities, relativeError) {
              standardGeneric("approxQuantile")
@@ -73,18 +64,15 @@ setGeneric("approxQuantile",
 setGeneric("distinctRDD", function(x, numPartitions = 1) { standardGeneric("distinctRDD") })
 
 # @rdname filterRDD
-# @export
 setGeneric("filterRDD", function(x, f) { standardGeneric("filterRDD") })
 
 setGeneric("firstRDD", function(x, ...) { standardGeneric("firstRDD") })
 
 # @rdname flatMap
-# @export
 setGeneric("flatMap", function(X, FUN) { standardGeneric("flatMap") })
 
 # @rdname fold
 # @seealso reduce
-# @export
 setGeneric("fold", function(x, zeroValue, op) { standardGeneric("fold") })
 
 setGeneric("foreach", function(x, func) { standardGeneric("foreach") })
@@ -95,17 +83,14 @@ setGeneric("foreachPartition", function(x, func) { standardGeneric("foreachParti
 setGeneric("getJRDD", function(rdd, ...) { standardGeneric("getJRDD") })
 
 # @rdname glom
-# @export
 setGeneric("glom", function(x) { standardGeneric("glom") })
 
 # @rdname histogram
-# @export
 setGeneric("histogram", function(df, col, nbins=10) { standardGeneric("histogram") })
 
 setGeneric("joinRDD", function(x, y, ...) { standardGeneric("joinRDD") })
 
 # @rdname keyBy
-# @export
 setGeneric("keyBy", function(x, func) { standardGeneric("keyBy") })
 
 setGeneric("lapplyPartition", function(X, FUN) { standardGeneric("lapplyPartition") })
@@ -123,47 +108,37 @@ setGeneric("mapPartitionsWithIndex",
            function(X, FUN) { standardGeneric("mapPartitionsWithIndex") })
 
 # @rdname maximum
-# @export
 setGeneric("maximum", function(x) { standardGeneric("maximum") })
 
 # @rdname minimum
-# @export
 setGeneric("minimum", function(x) { standardGeneric("minimum") })
 
 # @rdname sumRDD
-# @export
 setGeneric("sumRDD", function(x) { standardGeneric("sumRDD") })
 
 # @rdname name
-# @export
 setGeneric("name", function(x) { standardGeneric("name") })
 
 # @rdname getNumPartitionsRDD
-# @export
 setGeneric("getNumPartitionsRDD", function(x) { standardGeneric("getNumPartitionsRDD") })
 
 # @rdname getNumPartitions
-# @export
 setGeneric("numPartitions", function(x) { standardGeneric("numPartitions") })
 
 setGeneric("persistRDD", function(x, newLevel) { standardGeneric("persistRDD") })
 
 # @rdname pipeRDD
-# @export
 setGeneric("pipeRDD", function(x, command, env = list()) { standardGeneric("pipeRDD")})
 
 # @rdname pivot
-# @export
 setGeneric("pivot", function(x, colname, values = list()) { standardGeneric("pivot") })
 
 # @rdname reduce
-# @export
 setGeneric("reduce", function(x, func) { standardGeneric("reduce") })
 
 setGeneric("repartitionRDD", function(x, ...) { standardGeneric("repartitionRDD") })
 
 # @rdname sampleRDD
-# @export
 setGeneric("sampleRDD",
            function(x, withReplacement, fraction, seed) {
              standardGeneric("sampleRDD")
@@ -171,21 +146,17 @@ setGeneric("sampleRDD",
 
 # @rdname saveAsObjectFile
 # @seealso objectFile
-# @export
 setGeneric("saveAsObjectFile", function(x, path) { standardGeneric("saveAsObjectFile") })
 
 # @rdname saveAsTextFile
-# @export
 setGeneric("saveAsTextFile", function(x, path) { standardGeneric("saveAsTextFile") })
 
 # @rdname setName
-# @export
 setGeneric("setName", function(x, name) { standardGeneric("setName") })
 
 setGeneric("showRDD", function(object, ...) { standardGeneric("showRDD") })
 
 # @rdname sortBy
-# @export
 setGeneric("sortBy",
            function(x, func, ascending = TRUE, numPartitions = 1) {
              standardGeneric("sortBy")
@@ -194,88 +165,71 @@ setGeneric("sortBy",
 setGeneric("takeRDD", function(x, num) { standardGeneric("takeRDD") })
 
 # @rdname takeOrdered
-# @export
 setGeneric("takeOrdered", function(x, num) { standardGeneric("takeOrdered") })
 
 # @rdname takeSample
-# @export
 setGeneric("takeSample",
            function(x, withReplacement, num, seed) {
              standardGeneric("takeSample")
            })
 
 # @rdname top
-# @export
 setGeneric("top", function(x, num) { standardGeneric("top") })
 
 # @rdname unionRDD
-# @export
 setGeneric("unionRDD", function(x, y) { standardGeneric("unionRDD") })
 
 setGeneric("unpersistRDD", function(x, ...) { standardGeneric("unpersistRDD") })
 
 # @rdname zipRDD
-# @export
 setGeneric("zipRDD", function(x, other) { standardGeneric("zipRDD") })
 
 # @rdname zipRDD
-# @export
 setGeneric("zipPartitions", function(..., func) { standardGeneric("zipPartitions") },
            signature = "...")
 
 # @rdname zipWithIndex
 # @seealso zipWithUniqueId
-# @export
 setGeneric("zipWithIndex", function(x) { standardGeneric("zipWithIndex") })
 
 # @rdname zipWithUniqueId
 # @seealso zipWithIndex
-# @export
 setGeneric("zipWithUniqueId", function(x) { standardGeneric("zipWithUniqueId") })
 
 
 ############ Binary Functions #############
 
 # @rdname cartesian
-# @export
 setGeneric("cartesian", function(x, other) { standardGeneric("cartesian") })
 
 # @rdname countByKey
-# @export
 setGeneric("countByKey", function(x) { standardGeneric("countByKey") })
 
 # @rdname flatMapValues
-# @export
 setGeneric("flatMapValues", function(X, FUN) { standardGeneric("flatMapValues") })
 
 # @rdname intersection
-# @export
 setGeneric("intersection",
            function(x, other, numPartitions = 1) {
              standardGeneric("intersection")
            })
 
 # @rdname keys
-# @export
 setGeneric("keys", function(x) { standardGeneric("keys") })
 
 # @rdname lookup
-# @export
 setGeneric("lookup", function(x, key) { standardGeneric("lookup") })
 
 # @rdname mapValues
-# @export
 setGeneric("mapValues", function(X, FUN) { standardGeneric("mapValues") })
 
 # @rdname sampleByKey
-# @export
 setGeneric("sampleByKey",
            function(x, withReplacement, fractions, seed) {
              standardGeneric("sampleByKey")
            })
 
 # @rdname values
-# @export
 setGeneric("values", function(x) { standardGeneric("values") })
 
 
@@ -283,14 +237,12 @@ setGeneric("values", function(x) { standardGeneric("values") })
 
 # @rdname aggregateByKey
 # @seealso foldByKey, combineByKey
-# @export
 setGeneric("aggregateByKey",
            function(x, zeroValue, seqOp, combOp, numPartitions) {
              standardGeneric("aggregateByKey")
            })
 
 # @rdname cogroup
-# @export
 setGeneric("cogroup",
            function(..., numPartitions) {
              standardGeneric("cogroup")
@@ -299,7 +251,6 @@ setGeneric("cogroup",
 
 # @rdname combineByKey
 # @seealso groupByKey, reduceByKey
-# @export
 setGeneric("combineByKey",
            function(x, createCombiner, mergeValue, mergeCombiners, numPartitions) {
              standardGeneric("combineByKey")
@@ -307,64 +258,53 @@ setGeneric("combineByKey",
 
 # @rdname foldByKey
 # @seealso aggregateByKey, combineByKey
-# @export
 setGeneric("foldByKey",
            function(x, zeroValue, func, numPartitions) {
              standardGeneric("foldByKey")
            })
 
 # @rdname join-methods
-# @export
 setGeneric("fullOuterJoin", function(x, y, numPartitions) { standardGeneric("fullOuterJoin") })
 
 # @rdname groupByKey
 # @seealso reduceByKey
-# @export
 setGeneric("groupByKey", function(x, numPartitions) { standardGeneric("groupByKey") })
 
 # @rdname join-methods
-# @export
 setGeneric("join", function(x, y, ...) { standardGeneric("join") })
 
 # @rdname join-methods
-# @export
 setGeneric("leftOuterJoin", function(x, y, numPartitions) { standardGeneric("leftOuterJoin") })
 
 setGeneric("partitionByRDD", function(x, ...) { standardGeneric("partitionByRDD") })
 
 # @rdname reduceByKey
 # @seealso groupByKey
-# @export
 setGeneric("reduceByKey", function(x, combineFunc, numPartitions) { standardGeneric("reduceByKey")})
 
 # @rdname reduceByKeyLocally
 # @seealso reduceByKey
-# @export
 setGeneric("reduceByKeyLocally",
            function(x, combineFunc) {
              standardGeneric("reduceByKeyLocally")
            })
 
 # @rdname join-methods
-# @export
 setGeneric("rightOuterJoin", function(x, y, numPartitions) { standardGeneric("rightOuterJoin") })
 
 # @rdname sortByKey
-# @export
 setGeneric("sortByKey",
            function(x, ascending = TRUE, numPartitions = 1) {
              standardGeneric("sortByKey")
            })
 
 # @rdname subtract
-# @export
 setGeneric("subtract",
            function(x, other, numPartitions = 1) {
              standardGeneric("subtract")
            })
 
 # @rdname subtractByKey
-# @export
 setGeneric("subtractByKey",
            function(x, other, numPartitions = 1) {
              standardGeneric("subtractByKey")
@@ -374,7 +314,6 @@ setGeneric("subtractByKey",
 ################### Broadcast Variable Methods #################
 
 # @rdname broadcast
-# @export
 setGeneric("value", function(bcast) { standardGeneric("value") })
 
 
@@ -384,7 +323,6 @@ setGeneric("value", function(bcast) { standardGeneric("value") })
 #' @param ... further arguments to be passed to or from other methods.
 #' @return A SparkDataFrame.
 #' @rdname summarize
-#' @export
 setGeneric("agg", function(x, ...) { standardGeneric("agg") })
 
 #' alias
@@ -399,11 +337,9 @@ setGeneric("agg", function(x, ...) { standardGeneric("agg") })
 NULL
 
 #' @rdname arrange
-#' @export
 setGeneric("arrange", function(x, col, ...) { standardGeneric("arrange") })
 
 #' @rdname as.data.frame
-#' @export
 setGeneric("as.data.frame",
            function(x, row.names = NULL, optional = FALSE, ...) {
              standardGeneric("as.data.frame")
@@ -411,52 +347,41 @@ setGeneric("as.data.frame",
 
 # Do not document the generic because of signature changes across R versions
 #' @noRd
-#' @export
 setGeneric("attach")
 
 #' @rdname cache
-#' @export
 setGeneric("cache", function(x) { standardGeneric("cache") })
 
 #' @rdname checkpoint
-#' @export
 setGeneric("checkpoint", function(x, eager = TRUE) { standardGeneric("checkpoint") })
 
 #' @rdname coalesce
 #' @param x a SparkDataFrame.
 #' @param ... additional argument(s).
-#' @export
 setGeneric("coalesce", function(x, ...) { standardGeneric("coalesce") })
 
 #' @rdname collect
-#' @export
 setGeneric("collect", function(x, ...) { standardGeneric("collect") })
 
 #' @param do.NULL currently not used.
 #' @param prefix currently not used.
 #' @rdname columns
-#' @export
 setGeneric("colnames", function(x, do.NULL = TRUE, prefix = "col") { standardGeneric("colnames") })
 
 #' @rdname columns
-#' @export
 setGeneric("colnames<-", function(x, value) { standardGeneric("colnames<-") })
 
 #' @rdname coltypes
-#' @export
 setGeneric("coltypes", function(x) { standardGeneric("coltypes") })
 
 #' @rdname coltypes
-#' @export
 setGeneric("coltypes<-", function(x, value) { standardGeneric("coltypes<-") })
 
 #' @rdname columns
-#' @export
 setGeneric("columns", function(x) {standardGeneric("columns") })
 
 #' @param x a GroupedData or Column.
 #' @rdname count
-#' @export
 setGeneric("count", function(x) { standardGeneric("count") })
 
 #' @rdname cov
@@ -464,7 +389,6 @@ setGeneric("count", function(x) { standardGeneric("count") })
 #' @param ... additional argument(s). If \code{x} is a Column, a Column
 #'        should be provided. If \code{x} is a SparkDataFrame, two column names should
 #'        be provided.
-#' @export
 setGeneric("cov", function(x, ...) {standardGeneric("cov") })
 
 #' @rdname corr
@@ -472,294 +396,229 @@ setGeneric("cov", function(x, ...) {standardGeneric("cov") })
 #' @param ... additional argument(s). If \code{x} is a Column, a Column
 #'        should be provided. If \code{x} is a SparkDataFrame, two column names should
 #'        be provided.
-#' @export
 setGeneric("corr", function(x, ...) {standardGeneric("corr") })
 
 #' @rdname cov
-#' @export
 setGeneric("covar_samp", function(col1, col2) {standardGeneric("covar_samp") })
 
 #' @rdname cov
-#' @export
 setGeneric("covar_pop", function(col1, col2) {standardGeneric("covar_pop") })
 
 #' @rdname createOrReplaceTempView
-#' @export
 setGeneric("createOrReplaceTempView",
            function(x, viewName) {
              standardGeneric("createOrReplaceTempView")
            })
 
 # @rdname crossJoin
-# @export
 setGeneric("crossJoin", function(x, y) { standardGeneric("crossJoin") })
 
 #' @rdname cube
-#' @export
 setGeneric("cube", function(x, ...) { standardGeneric("cube") })
 
 #' @rdname dapply
-#' @export
 setGeneric("dapply", function(x, func, schema) { standardGeneric("dapply") })
 
 #' @rdname dapplyCollect
-#' @export
 setGeneric("dapplyCollect", function(x, func) { standardGeneric("dapplyCollect") })
 
 #' @param x a SparkDataFrame or GroupedData.
 #' @param ... additional argument(s) passed to the method.
 #' @rdname gapply
-#' @export
 setGeneric("gapply", function(x, ...) { standardGeneric("gapply") })
 
 #' @param x a SparkDataFrame or GroupedData.
 #' @param ... additional argument(s) passed to the method.
 #' @rdname gapplyCollect
-#' @export
 setGeneric("gapplyCollect", function(x, ...) { standardGeneric("gapplyCollect") })
 
 # @rdname getNumPartitions
-# @export
 setGeneric("getNumPartitions", function(x) { standardGeneric("getNumPartitions") })
 
 #' @rdname describe
-#' @export
 setGeneric("describe", function(x, col, ...) { standardGeneric("describe") })
 
 #' @rdname distinct
-#' @export
 setGeneric("distinct", function(x) { standardGeneric("distinct") })
 
 #' @rdname drop
-#' @export
 setGeneric("drop", function(x, ...) { standardGeneric("drop") })
 
 #' @rdname dropDuplicates
-#' @export
 setGeneric("dropDuplicates", function(x, ...) { standardGeneric("dropDuplicates") })
 
 #' @rdname nafunctions
-#' @export
 setGeneric("dropna",
            function(x, how = c("any", "all"), minNonNulls = NULL, cols = NULL) {
              standardGeneric("dropna")
            })
 
 #' @rdname nafunctions
-#' @export
 setGeneric("na.omit",
            function(object, ...) {
              standardGeneric("na.omit")
            })
 
 #' @rdname dtypes
-#' @export
 setGeneric("dtypes", function(x) { standardGeneric("dtypes") })
 
 #' @rdname explain
-#' @export
 #' @param x a SparkDataFrame or a StreamingQuery.
 #' @param extended Logical. If extended is FALSE, prints only the physical plan.
 #' @param ... further arguments to be passed to or from other methods.
 setGeneric("explain", function(x, ...) { standardGeneric("explain") })
 
 #' @rdname except
-#' @export
 setGeneric("except", function(x, y) { standardGeneric("except") })
 
 #' @rdname nafunctions
-#' @export
 setGeneric("fillna", function(x, value, cols = NULL) { standardGeneric("fillna") })
 
 #' @rdname filter
-#' @export
 setGeneric("filter", function(x, condition) { standardGeneric("filter") })
 
 #' @rdname first
-#' @export
 setGeneric("first", function(x, ...) { standardGeneric("first") })
 
 #' @rdname groupBy
-#' @export
 setGeneric("group_by", function(x, ...) { standardGeneric("group_by") })
 
 #' @rdname groupBy
-#' @export
 setGeneric("groupBy", function(x, ...) { standardGeneric("groupBy") })
 
 #' @rdname hint
-#' @export
 setGeneric("hint", function(x, name, ...) { standardGeneric("hint") })
 
 #' @rdname insertInto
-#' @export
 setGeneric("insertInto", function(x, tableName, ...) { standardGeneric("insertInto") })
 
 #' @rdname intersect
-#' @export
 setGeneric("intersect", function(x, y) { standardGeneric("intersect") })
 
 #' @rdname isLocal
-#' @export
 setGeneric("isLocal", function(x) { standardGeneric("isLocal") })
 
 #' @rdname isStreaming
-#' @export
 setGeneric("isStreaming", function(x) { standardGeneric("isStreaming") })
 
 #' @rdname limit
-#' @export
 setGeneric("limit", function(x, num) {standardGeneric("limit") })
 
 #' @rdname localCheckpoint
-#' @export
 setGeneric("localCheckpoint", function(x, eager = TRUE) { standardGeneric("localCheckpoint") })
 
 #' @rdname merge
-#' @export
 setGeneric("merge")
 
 #' @rdname mutate
-#' @export
 setGeneric("mutate", function(.data, ...) {standardGeneric("mutate") })
 
 #' @rdname orderBy
-#' @export
 setGeneric("orderBy", function(x, col, ...) { standardGeneric("orderBy") })
 
 #' @rdname persist
-#' @export
 setGeneric("persist", function(x, newLevel) { standardGeneric("persist") })
 
 #' @rdname printSchema
-#' @export
 setGeneric("printSchema", function(x) { standardGeneric("printSchema") })
 
 #' @rdname registerTempTable-deprecated
-#' @export
 setGeneric("registerTempTable", function(x, tableName) { standardGeneric("registerTempTable") })
 
 #' @rdname rename
-#' @export
 setGeneric("rename", function(x, ...) { standardGeneric("rename") })
 
 #' @rdname repartition
-#' @export
 setGeneric("repartition", function(x, ...) { standardGeneric("repartition") })
 
 #' @rdname sample
-#' @export
 setGeneric("sample",
            function(x, withReplacement = FALSE, fraction, seed) {
              standardGeneric("sample")
            })
 
 #' @rdname rollup
-#' @export
 setGeneric("rollup", function(x, ...) { standardGeneric("rollup") })
 
 #' @rdname sample
-#' @export
 setGeneric("sample_frac",
            function(x, withReplacement = FALSE, fraction, seed) { standardGeneric("sample_frac") })
 
 #' @rdname sampleBy
-#' @export
 setGeneric("sampleBy", function(x, col, fractions, seed) { standardGeneric("sampleBy") })
 
 #' @rdname saveAsTable
-#' @export
 setGeneric("saveAsTable", function(df, tableName, source = NULL, mode = "error", ...) {
   standardGeneric("saveAsTable")
 })
 
-#' @export
 setGeneric("str")
 
 #' @rdname take
-#' @export
 setGeneric("take", function(x, num) { standardGeneric("take") })
 
 #' @rdname mutate
-#' @export
 setGeneric("transform", function(`_data`, ...) {standardGeneric("transform") })
 
 #' @rdname write.df
-#' @export
 setGeneric("write.df", function(df, path = NULL, source = NULL, mode = "error", ...) {
   standardGeneric("write.df")
 })
 
 #' @rdname write.df
-#' @export
 setGeneric("saveDF", function(df, path, source = NULL, mode = "error", ...) {
   standardGeneric("saveDF")
 })
 
 #' @rdname write.jdbc
-#' @export
 setGeneric("write.jdbc", function(x, url, tableName, mode = "error", ...) {
   standardGeneric("write.jdbc")
 })
 
 #' @rdname write.json
-#' @export
 setGeneric("write.json", function(x, path, ...) { standardGeneric("write.json") })
 
 #' @rdname write.orc
-#' @export
 setGeneric("write.orc", function(x, path, ...) { standardGeneric("write.orc") })
 
 #' @rdname write.parquet
-#' @export
 setGeneric("write.parquet", function(x, path, ...) {
   standardGeneric("write.parquet")
 })
 
 #' @rdname write.parquet
-#' @export
 setGeneric("saveAsParquetFile", function(x, path) { standardGeneric("saveAsParquetFile") })
 
 #' @rdname write.stream
-#' @export
 setGeneric("write.stream", function(df, source = NULL, outputMode = NULL, ...) {
   standardGeneric("write.stream")
 })
 
 #' @rdname write.text
-#' @export
 setGeneric("write.text", function(x, path, ...) { standardGeneric("write.text") })
 
 #' @rdname schema
-#' @export
 setGeneric("schema", function(x) { standardGeneric("schema") })
 
 #' @rdname select
-#' @export
 setGeneric("select", function(x, col, ...) { standardGeneric("select") })
 
 #' @rdname selectExpr
-#' @export
 setGeneric("selectExpr", function(x, expr, ...) { standardGeneric("selectExpr") })
 
 #' @rdname showDF
-#' @export
 setGeneric("showDF", function(x, ...) { standardGeneric("showDF") })
 
 # @rdname storageLevel
-# @export
 setGeneric("storageLevel", function(x) { standardGeneric("storageLevel") })
 
 #' @rdname subset
-#' @export
 setGeneric("subset", function(x, ...) { standardGeneric("subset") })
 
 #' @rdname summarize
-#' @export
 setGeneric("summarize", function(x, ...) { standardGeneric("summarize") })
 
 #' @rdname summary
-#' @export
 setGeneric("summary", function(object, ...) { standardGeneric("summary") })
 
 setGeneric("toJSON", function(x) { standardGeneric("toJSON") })
@@ -767,830 +626,660 @@ setGeneric("toJSON", function(x) { standardGeneric("toJSON") })
 setGeneric("toRDD", function(x) { standardGeneric("toRDD") })
 
 #' @rdname union
-#' @export
 setGeneric("union", function(x, y) { standardGeneric("union") })
 
 #' @rdname union
-#' @export
 setGeneric("unionAll", function(x, y) { standardGeneric("unionAll") })
 
 #' @rdname unionByName
-#' @export
 setGeneric("unionByName", function(x, y) { standardGeneric("unionByName") })
 
 #' @rdname unpersist
-#' @export
 setGeneric("unpersist", function(x, ...) { standardGeneric("unpersist") })
 
 #' @rdname filter
-#' @export
 setGeneric("where", function(x, condition) { standardGeneric("where") })
 
 #' @rdname with
-#' @export
 setGeneric("with")
 
 #' @rdname withColumn
-#' @export
 setGeneric("withColumn", function(x, colName, col) { standardGeneric("withColumn") })
 
 #' @rdname rename
-#' @export
 setGeneric("withColumnRenamed",
            function(x, existingCol, newCol) { standardGeneric("withColumnRenamed") })
 
 #' @rdname withWatermark
-#' @export
 setGeneric("withWatermark", function(x, eventTime, delayThreshold) {
   standardGeneric("withWatermark")
 })
 
 #' @rdname write.df
-#' @export
 setGeneric("write.df", function(df, path = NULL, ...) { standardGeneric("write.df") })
 
 #' @rdname randomSplit
-#' @export
 setGeneric("randomSplit", function(x, weights, seed) { standardGeneric("randomSplit") })
 
 #' @rdname broadcast
-#' @export
 setGeneric("broadcast", function(x) { standardGeneric("broadcast") })
 
 ###################### Column Methods ##########################
 
 #' @rdname columnfunctions
-#' @export
 setGeneric("asc", function(x) { standardGeneric("asc") })
 
 #' @rdname between
-#' @export
 setGeneric("between", function(x, bounds) { standardGeneric("between") })
 
 #' @rdname cast
-#' @export
 setGeneric("cast", function(x, dataType) { standardGeneric("cast") })
 
 #' @rdname columnfunctions
 #' @param x a Column object.
 #' @param ... additional argument(s).
-#' @export
 setGeneric("contains", function(x, ...) { standardGeneric("contains") })
 
 #' @rdname columnfunctions
-#' @export
 setGeneric("desc", function(x) { standardGeneric("desc") })
 
 #' @rdname endsWith
-#' @export
 setGeneric("endsWith", function(x, suffix) { standardGeneric("endsWith") })
 
 #' @rdname columnfunctions
-#' @export
 setGeneric("getField", function(x, ...) { standardGeneric("getField") })
 
 #' @rdname columnfunctions
-#' @export
 setGeneric("getItem", function(x, ...) { standardGeneric("getItem") })
 
 #' @rdname columnfunctions
-#' @export
 setGeneric("isNaN", function(x) { standardGeneric("isNaN") })
 
 #' @rdname columnfunctions
-#' @export
 setGeneric("isNull", function(x) { standardGeneric("isNull") })
 
 #' @rdname columnfunctions
-#' @export
 setGeneric("isNotNull", function(x) { standardGeneric("isNotNull") })
 
 #' @rdname columnfunctions
-#' @export
 setGeneric("like", function(x, ...) { standardGeneric("like") })
 
 #' @rdname columnfunctions
-#' @export
 setGeneric("rlike", function(x, ...) { standardGeneric("rlike") })
 
 #' @rdname startsWith
-#' @export
 setGeneric("startsWith", function(x, prefix) { standardGeneric("startsWith") })
 
 #' @rdname column_nonaggregate_functions
-#' @export
 #' @name NULL
 setGeneric("when", function(condition, value) { standardGeneric("when") })
 
 #' @rdname otherwise
-#' @export
 setGeneric("otherwise", function(x, value) { standardGeneric("otherwise") })
 
 #' @rdname over
-#' @export
 setGeneric("over", function(x, window) { standardGeneric("over") })
 
 #' @rdname eq_null_safe
-#' @export
 setGeneric("%<=>%", function(x, value) { standardGeneric("%<=>%") })
 
 ###################### WindowSpec Methods ##########################
 
 #' @rdname partitionBy
-#' @export
 setGeneric("partitionBy", function(x, ...) { standardGeneric("partitionBy") })
 
 #' @rdname rowsBetween
-#' @export
 setGeneric("rowsBetween", function(x, start, end) { standardGeneric("rowsBetween") })
 
 #' @rdname rangeBetween
-#' @export
 setGeneric("rangeBetween", function(x, start, end) { standardGeneric("rangeBetween") })
 
 #' @rdname windowPartitionBy
-#' @export
 setGeneric("windowPartitionBy", function(col, ...) { standardGeneric("windowPartitionBy") })
 
 #' @rdname windowOrderBy
-#' @export
 setGeneric("windowOrderBy", function(col, ...) { standardGeneric("windowOrderBy") })
 
 ###################### Expression Function Methods ##########################
 
 #' @rdname column_datetime_diff_functions
-#' @export
 #' @name NULL
 setGeneric("add_months", function(y, x) { standardGeneric("add_months") })
 
 #' @rdname column_aggregate_functions
-#' @export
 #' @name NULL
 setGeneric("approxCountDistinct", function(x, ...) { standardGeneric("approxCountDistinct") })
 
 #' @rdname column_collection_functions
-#' @export
 #' @name NULL
 setGeneric("array_contains", function(x, value) { standardGeneric("array_contains") })
 
 #' @rdname column_string_functions
-#' @export
 #' @name NULL
 setGeneric("ascii", function(x) { standardGeneric("ascii") })
 
 #' @param x Column to compute on or a GroupedData object.
 #' @param ... additional argument(s) when \code{x} is a GroupedData object.
 #' @rdname avg
-#' @export
 setGeneric("avg", function(x, ...) { standardGeneric("avg") })
 
 #' @rdname column_string_functions
-#' @export
 #' @name NULL
 setGeneric("base64", function(x) { standardGeneric("base64") })
 
 #' @rdname column_math_functions
-#' @export
 #' @name NULL
 setGeneric("bin", function(x) { standardGeneric("bin") })
 
 #' @rdname column_nonaggregate_functions
-#' @export
 #' @name NULL
 setGeneric("bitwiseNOT", function(x) { standardGeneric("bitwiseNOT") })
 
 #' @rdname column_math_functions
-#' @export
 #' @name NULL
 setGeneric("bround", function(x, ...) { standardGeneric("bround") })
 
 #' @rdname column_math_functions
-#' @export
 #' @name NULL
 setGeneric("cbrt", function(x) { standardGeneric("cbrt") })
 
 #' @rdname column_math_functions
-#' @export
 #' @name NULL
 setGeneric("ceil", function(x) { standardGeneric("ceil") })
 
 #' @rdname column_aggregate_functions
-#' @export
 #' @name NULL
 setGeneric("collect_list", function(x) { standardGeneric("collect_list") })
 
 #' @rdname column_aggregate_functions
-#' @export
 #' @name NULL
 setGeneric("collect_set", function(x) { standardGeneric("collect_set") })
 
 #' @rdname column
-#' @export
 setGeneric("column", function(x) { standardGeneric("column") })
 
 #' @rdname column_string_functions
-#' @export
 #' @name NULL
 setGeneric("concat", function(x, ...) { standardGeneric("concat") })
 
 #' @rdname column_string_functions
-#' @export
 #' @name NULL
 setGeneric("concat_ws", function(sep, x, ...) { standardGeneric("concat_ws") })
 
 #' @rdname column_math_functions
-#' @export
 #' @name NULL
 setGeneric("conv", function(x, fromBase, toBase) { standardGeneric("conv") })
 
 #' @rdname column_aggregate_functions
-#' @export
 #' @name NULL
 setGeneric("countDistinct", function(x, ...) { standardGeneric("countDistinct") })
 
 #' @rdname column_misc_functions
-#' @export
 #' @name NULL
 setGeneric("crc32", function(x) { standardGeneric("crc32") })
 
 #' @rdname column_nonaggregate_functions
-#' @export
 #' @name NULL
 setGeneric("create_array", function(x, ...) { standardGeneric("create_array") })
 
 #' @rdname column_nonaggregate_functions
-#' @export
 #' @name NULL
 setGeneric("create_map", function(x, ...) { standardGeneric("create_map") })
 
 #' @rdname column_misc_functions
-#' @export
 #' @name NULL
 setGeneric("hash", function(x, ...) { standardGeneric("hash") })
 
 #' @rdname column_window_functions
-#' @export
 #' @name NULL
 setGeneric("cume_dist", function(x = "missing") { standardGeneric("cume_dist") })
 
 #' @rdname column_datetime_functions
-#' @export
 #' @name NULL
 setGeneric("current_date", function(x = "missing") { standardGeneric("current_date") })
 
 #' @rdname column_datetime_functions
-#' @export
 #' @name NULL
 setGeneric("current_timestamp", function(x = "missing") { standardGeneric("current_timestamp") })
 
 
 #' @rdname column_datetime_diff_functions
-#' @export
 #' @name NULL
 setGeneric("datediff", function(y, x) { standardGeneric("datediff") })
 
 #' @rdname column_datetime_diff_functions
-#' @export
 #' @name NULL
 setGeneric("date_add", function(y, x) { standardGeneric("date_add") })
 
 #' @rdname column_datetime_diff_functions
-#' @export
 #' @name NULL
 setGeneric("date_format", function(y, x) { standardGeneric("date_format") })
 
 #' @rdname column_datetime_diff_functions
-#' @export
 #' @name NULL
 setGeneric("date_sub", function(y, x) { standardGeneric("date_sub") })
 
 #' @rdname column_datetime_functions
-#' @export
 #' @name NULL
 setGeneric("date_trunc", function(format, x) { standardGeneric("date_trunc") })
 
 #' @rdname column_datetime_functions
-#' @export
 #' @name NULL
 setGeneric("dayofmonth", function(x) { standardGeneric("dayofmonth") })
 
 #' @rdname column_datetime_functions
-#' @export
 #' @name NULL
 setGeneric("dayofweek", function(x) { standardGeneric("dayofweek") })
 
 #' @rdname column_datetime_functions
-#' @export
 #' @name NULL
 setGeneric("dayofyear", function(x) { standardGeneric("dayofyear") })
 
 #' @rdname column_string_functions
-#' @export
 #' @name NULL
 setGeneric("decode", function(x, charset) { standardGeneric("decode") })
 
 #' @rdname column_window_functions
-#' @export
 #' @name NULL
 setGeneric("dense_rank", function(x = "missing") { standardGeneric("dense_rank") })
 
 #' @rdname column_string_functions
-#' @export
 #' @name NULL
 setGeneric("encode", function(x, charset) { standardGeneric("encode") })
 
 #' @rdname column_collection_functions
-#' @export
 #' @name NULL
 setGeneric("explode", function(x) { standardGeneric("explode") })
 
 #' @rdname column_collection_functions
-#' @export
 #' @name NULL
 setGeneric("explode_outer", function(x) { standardGeneric("explode_outer") })
 
 #' @rdname column_nonaggregate_functions
-#' @export
 #' @name NULL
 setGeneric("expr", function(x) { standardGeneric("expr") })
 
 #' @rdname column_datetime_diff_functions
-#' @export
 #' @name NULL
 setGeneric("from_utc_timestamp", function(y, x) { standardGeneric("from_utc_timestamp") })
 
 #' @rdname column_string_functions
-#' @export
 #' @name NULL
 setGeneric("format_number", function(y, x) { standardGeneric("format_number") })
 
 #' @rdname column_string_functions
-#' @export
 #' @name NULL
 setGeneric("format_string", function(format, x, ...) { standardGeneric("format_string") })
 
 #' @rdname column_collection_functions
-#' @export
 #' @name NULL
 setGeneric("from_json", function(x, schema, ...) { standardGeneric("from_json") })
 
 #' @rdname column_datetime_functions
-#' @export
 #' @name NULL
 setGeneric("from_unixtime", function(x, ...) { standardGeneric("from_unixtime") })
 
 #' @rdname column_nonaggregate_functions
-#' @export
 #' @name NULL
 setGeneric("greatest", function(x, ...) { standardGeneric("greatest") })
 
 #' @rdname column_aggregate_functions
-#' @export
 #' @name NULL
 setGeneric("grouping_bit", function(x) { standardGeneric("grouping_bit") })
 
 #' @rdname column_aggregate_functions
-#' @export
 #' @name NULL
 setGeneric("grouping_id", function(x, ...) { standardGeneric("grouping_id") })
 
 #' @rdname column_math_functions
-#' @export
 #' @name NULL
 setGeneric("hex", function(x) { standardGeneric("hex") })
 
 #' @rdname column_datetime_functions
-#' @export
 #' @name NULL
 setGeneric("hour", function(x) { standardGeneric("hour") })
 
 #' @rdname column_math_functions
-#' @export
 #' @name NULL
 setGeneric("hypot", function(y, x) { standardGeneric("hypot") })
 
 #' @rdname column_string_functions
-#' @export
 #' @name NULL
 setGeneric("initcap", function(x) { standardGeneric("initcap") })
 
 #' @rdname column_nonaggregate_functions
-#' @export
 #' @name NULL
 setGeneric("input_file_name",
            function(x = "missing") { standardGeneric("input_file_name") })
 
 #' @rdname column_string_functions
-#' @export
 #' @name NULL
 setGeneric("instr", function(y, x) { standardGeneric("instr") })
 
 #' @rdname column_nonaggregate_functions
-#' @export
 #' @name NULL
 setGeneric("isnan", function(x) { standardGeneric("isnan") })
 
 #' @rdname column_aggregate_functions
-#' @export
 #' @name NULL
 setGeneric("kurtosis", function(x) { standardGeneric("kurtosis") })
 
 #' @rdname column_window_functions
-#' @export
 #' @name NULL
 setGeneric("lag", function(x, ...) { standardGeneric("lag") })
 
 #' @rdname last
-#' @export
 setGeneric("last", function(x, ...) { standardGeneric("last") })
 
 #' @rdname column_datetime_functions
-#' @export
 #' @name NULL
 setGeneric("last_day", function(x) { standardGeneric("last_day") })
 
 #' @rdname column_window_functions
-#' @export
 #' @name NULL
 setGeneric("lead", function(x, offset, defaultValue = NULL) { standardGeneric("lead") })
 
 #' @rdname column_nonaggregate_functions
-#' @export
 #' @name NULL
 setGeneric("least", function(x, ...) { standardGeneric("least") })
 
 #' @rdname column_string_functions
-#' @export
 #' @name NULL
 setGeneric("levenshtein", function(y, x) { standardGeneric("levenshtein") })
 
 #' @rdname column_nonaggregate_functions
-#' @export
 #' @name NULL
 setGeneric("lit", function(x) { standardGeneric("lit") })
 
 #' @rdname column_string_functions
-#' @export
 #' @name NULL
 setGeneric("locate", function(substr, str, ...) { standardGeneric("locate") })
 
 #' @rdname column_string_functions
-#' @export
 #' @name NULL
 setGeneric("lower", function(x) { standardGeneric("lower") })
 
 #' @rdname column_string_functions
-#' @export
 #' @name NULL
 setGeneric("lpad", function(x, len, pad) { standardGeneric("lpad") })
 
 #' @rdname column_string_functions
-#' @export
 #' @name NULL
 setGeneric("ltrim", function(x, trimString) { standardGeneric("ltrim") })
 
 #' @rdname column_collection_functions
-#' @export
 #' @name NULL
 setGeneric("map_keys", function(x) { standardGeneric("map_keys") })
 
 #' @rdname column_collection_functions
-#' @export
 #' @name NULL
 setGeneric("map_values", function(x) { standardGeneric("map_values") })
 
 #' @rdname column_misc_functions
-#' @export
 #' @name NULL
 setGeneric("md5", function(x) { standardGeneric("md5") })
 
 #' @rdname column_datetime_functions
-#' @export
 #' @name NULL
 setGeneric("minute", function(x) { standardGeneric("minute") })
 
 #' @rdname column_nonaggregate_functions
-#' @export
 #' @name NULL
 setGeneric("monotonically_increasing_id",
            function(x = "missing") { standardGeneric("monotonically_increasing_id") })
 
 #' @rdname column_datetime_functions
-#' @export
 #' @name NULL
 setGeneric("month", function(x) { standardGeneric("month") })
 
 #' @rdname column_datetime_diff_functions
-#' @export
 #' @name NULL
 setGeneric("months_between", function(y, x) { standardGeneric("months_between") })
 
 #' @rdname count
-#' @export
 setGeneric("n", function(x) { standardGeneric("n") })
 
 #' @rdname column_nonaggregate_functions
-#' @export
 #' @name NULL
 setGeneric("nanvl", function(y, x) { standardGeneric("nanvl") })
 
 #' @rdname column_nonaggregate_functions
-#' @export
 #' @name NULL
 setGeneric("negate", function(x) { standardGeneric("negate") })
 
 #' @rdname not
-#' @export
 setGeneric("not", function(x) { standardGeneric("not") })
 
 #' @rdname column_datetime_diff_functions
-#' @export
 #' @name NULL
 setGeneric("next_day", function(y, x) { standardGeneric("next_day") })
 
 #' @rdname column_window_functions
-#' @export
 #' @name NULL
 setGeneric("ntile", function(x) { standardGeneric("ntile") })
 
 #' @rdname column_aggregate_functions
-#' @export
 #' @name NULL
 setGeneric("n_distinct", function(x, ...) { standardGeneric("n_distinct") })
 
 #' @rdname column_window_functions
-#' @export
 #' @name NULL
 setGeneric("percent_rank", function(x = "missing") { standardGeneric("percent_rank") })
 
 #' @rdname column_math_functions
-#' @export
 #' @name NULL
 setGeneric("pmod", function(y, x) { standardGeneric("pmod") })
 
 #' @rdname column_collection_functions
-#' @export
 #' @name NULL
 setGeneric("posexplode", function(x) { standardGeneric("posexplode") })
 
 #' @rdname column_collection_functions
-#' @export
 #' @name NULL
 setGeneric("posexplode_outer", function(x) { standardGeneric("posexplode_outer") })
 
 #' @rdname column_datetime_functions
-#' @export
 #' @name NULL
 setGeneric("quarter", function(x) { standardGeneric("quarter") })
 
 #' @rdname column_nonaggregate_functions
-#' @export
 #' @name NULL
 setGeneric("rand", function(seed) { standardGeneric("rand") })
 
 #' @rdname column_nonaggregate_functions
-#' @export
 #' @name NULL
 setGeneric("randn", function(seed) { standardGeneric("randn") })
 
 #' @rdname column_window_functions
-#' @export
 #' @name NULL
 setGeneric("rank", function(x, ...) { standardGeneric("rank") })
 
 #' @rdname column_string_functions
-#' @export
 #' @name NULL
 setGeneric("regexp_extract", function(x, pattern, idx) { standardGeneric("regexp_extract") })
 
 #' @rdname column_string_functions
-#' @export
 #' @name NULL
 setGeneric("regexp_replace",
            function(x, pattern, replacement) { standardGeneric("regexp_replace") })
 
 #' @rdname column_string_functions
-#' @export
 #' @name NULL
 setGeneric("repeat_string", function(x, n) { standardGeneric("repeat_string") })
 
 #' @rdname column_string_functions
-#' @export
 #' @name NULL
 setGeneric("reverse", function(x) { standardGeneric("reverse") })
 
 #' @rdname column_math_functions
-#' @export
 #' @name NULL
 setGeneric("rint", function(x) { standardGeneric("rint") })
 
 #' @rdname column_window_functions
-#' @export
 #' @name NULL
 setGeneric("row_number", function(x = "missing") { standardGeneric("row_number") })
 
 #' @rdname column_string_functions
-#' @export
 #' @name NULL
 setGeneric("rpad", function(x, len, pad) { standardGeneric("rpad") })
 
 #' @rdname column_string_functions
-#' @export
 #' @name NULL
 setGeneric("rtrim", function(x, trimString) { standardGeneric("rtrim") })
 
 #' @rdname column_aggregate_functions
-#' @export
 #' @name NULL
 setGeneric("sd", function(x, na.rm = FALSE) { standardGeneric("sd") })
 
 #' @rdname column_datetime_functions
-#' @export
 #' @name NULL
 setGeneric("second", function(x) { standardGeneric("second") })
 
 #' @rdname column_misc_functions
-#' @export
 #' @name NULL
 setGeneric("sha1", function(x) { standardGeneric("sha1") })
 
 #' @rdname column_misc_functions
-#' @export
 #' @name NULL
 setGeneric("sha2", function(y, x) { standardGeneric("sha2") })
 
 #' @rdname column_math_functions
-#' @export
 #' @name NULL
 setGeneric("shiftLeft", function(y, x) { standardGeneric("shiftLeft") })
 
 #' @rdname column_math_functions
-#' @export
 #' @name NULL
 setGeneric("shiftRight", function(y, x) { standardGeneric("shiftRight") })
 
 #' @rdname column_math_functions
-#' @export
 #' @name NULL
 setGeneric("shiftRightUnsigned", function(y, x) { standardGeneric("shiftRightUnsigned") })
 
 #' @rdname column_math_functions
-#' @export
 #' @name NULL
 setGeneric("signum", function(x) { standardGeneric("signum") })
 
 #' @rdname column_collection_functions
-#' @export
 #' @name NULL
 setGeneric("size", function(x) { standardGeneric("size") })
 
 #' @rdname column_aggregate_functions
-#' @export
 #' @name NULL
 setGeneric("skewness", function(x) { standardGeneric("skewness") })
 
 #' @rdname column_collection_functions
-#' @export
 #' @name NULL
 setGeneric("sort_array", function(x, asc = TRUE) { standardGeneric("sort_array") })
 
 #' @rdname column_string_functions
-#' @export
 #' @name NULL
 setGeneric("split_string", function(x, pattern) { standardGeneric("split_string") })
 
 #' @rdname column_string_functions
-#' @export
 #' @name NULL
 setGeneric("soundex", function(x) { standardGeneric("soundex") })
 
 #' @rdname column_nonaggregate_functions
-#' @export
 #' @name NULL
 setGeneric("spark_partition_id", function(x = "missing") { standardGeneric("spark_partition_id") })
 
 #' @rdname column_aggregate_functions
-#' @export
 #' @name NULL
 setGeneric("stddev", function(x) { standardGeneric("stddev") })
 
 #' @rdname column_aggregate_functions
-#' @export
 #' @name NULL
 setGeneric("stddev_pop", function(x) { standardGeneric("stddev_pop") })
 
 #' @rdname column_aggregate_functions
-#' @export
 #' @name NULL
 setGeneric("stddev_samp", function(x) { standardGeneric("stddev_samp") })
 
 #' @rdname column_nonaggregate_functions
-#' @export
 #' @name NULL
 setGeneric("struct", function(x, ...) { standardGeneric("struct") })
 
 #' @rdname column_string_functions
-#' @export
 #' @name NULL
 setGeneric("substring_index", function(x, delim, count) { standardGeneric("substring_index") })
 
 #' @rdname column_aggregate_functions
-#' @export
 #' @name NULL
 setGeneric("sumDistinct", function(x) { standardGeneric("sumDistinct") })
 
 #' @rdname column_math_functions
-#' @export
 #' @name NULL
 setGeneric("toDegrees", function(x) { standardGeneric("toDegrees") })
 
 #' @rdname column_math_functions
-#' @export
 #' @name NULL
 setGeneric("toRadians", function(x) { standardGeneric("toRadians") })
 
 #' @rdname column_datetime_functions
-#' @export
 #' @name NULL
 setGeneric("to_date", function(x, format) { standardGeneric("to_date") })
 
 #' @rdname column_collection_functions
-#' @export
 #' @name NULL
 setGeneric("to_json", function(x, ...) { standardGeneric("to_json") })
 
 #' @rdname column_datetime_functions
-#' @export
 #' @name NULL
 setGeneric("to_timestamp", function(x, format) { standardGeneric("to_timestamp") })
 
 #' @rdname column_datetime_diff_functions
-#' @export
 #' @name NULL
 setGeneric("to_utc_timestamp", function(y, x) { standardGeneric("to_utc_timestamp") })
 
 #' @rdname column_string_functions
-#' @export
 #' @name NULL
 setGeneric("translate", function(x, matchingString, replaceString) { standardGeneric("translate") })
 
 #' @rdname column_string_functions
-#' @export
 #' @name NULL
 setGeneric("trim", function(x, trimString) { standardGeneric("trim") })
 
 #' @rdname column_string_functions
-#' @export
 #' @name NULL
 setGeneric("unbase64", function(x) { standardGeneric("unbase64") })
 
 #' @rdname column_math_functions
-#' @export
 #' @name NULL
 setGeneric("unhex", function(x) { standardGeneric("unhex") })
 
 #' @rdname column_datetime_functions
-#' @export
 #' @name NULL
 setGeneric("unix_timestamp", function(x, format) { standardGeneric("unix_timestamp") })
 
 #' @rdname column_string_functions
-#' @export
 #' @name NULL
 setGeneric("upper", function(x) { standardGeneric("upper") })
 
 #' @rdname column_aggregate_functions
-#' @export
 #' @name NULL
 setGeneric("var", function(x, y = NULL, na.rm = FALSE, use) { standardGeneric("var") })
 
 #' @rdname column_aggregate_functions
-#' @export
 #' @name NULL
 setGeneric("variance", function(x) { standardGeneric("variance") })
 
 #' @rdname column_aggregate_functions
-#' @export
 #' @name NULL
 setGeneric("var_pop", function(x) { standardGeneric("var_pop") })
 
 #' @rdname column_aggregate_functions
-#' @export
 #' @name NULL
 setGeneric("var_samp", function(x) { standardGeneric("var_samp") })
 
 #' @rdname column_datetime_functions
-#' @export
 #' @name NULL
 setGeneric("weekofyear", function(x) { standardGeneric("weekofyear") })
 
 #' @rdname column_datetime_functions
-#' @export
 #' @name NULL
 setGeneric("window", function(x, ...) { standardGeneric("window") })
 
 #' @rdname column_datetime_functions
-#' @export
 #' @name NULL
 setGeneric("year", function(x) { standardGeneric("year") })
 
@@ -1598,142 +1287,110 @@ setGeneric("year", function(x) { standardGeneric("year") })
 ###################### Spark.ML Methods ##########################
 
 #' @rdname fitted
-#' @export
 setGeneric("fitted")
 
 # Do not carry stats::glm usage and param here, and do not document the generic
-#' @export
 #' @noRd
 setGeneric("glm")
 
 #' @param object a fitted ML model object.
 #' @param ... additional argument(s) passed to the method.
 #' @rdname predict
-#' @export
 setGeneric("predict", function(object, ...) { standardGeneric("predict") })
 
 #' @rdname rbind
-#' @export
 setGeneric("rbind", signature = "...")
 
 #' @rdname spark.als
-#' @export
 setGeneric("spark.als", function(data, ...) { standardGeneric("spark.als") })
 
 #' @rdname spark.bisectingKmeans
-#' @export
 setGeneric("spark.bisectingKmeans",
            function(data, formula, ...) { standardGeneric("spark.bisectingKmeans") })
 
 #' @rdname spark.gaussianMixture
-#' @export
 setGeneric("spark.gaussianMixture",
            function(data, formula, ...) { standardGeneric("spark.gaussianMixture") })
 
 #' @rdname spark.gbt
-#' @export
 setGeneric("spark.gbt", function(data, formula, ...) { standardGeneric("spark.gbt") })
 
 #' @rdname spark.glm
-#' @export
 setGeneric("spark.glm", function(data, formula, ...) { standardGeneric("spark.glm") })
 
 #' @rdname spark.isoreg
-#' @export
 setGeneric("spark.isoreg", function(data, formula, ...) { standardGeneric("spark.isoreg") })
 
 #' @rdname spark.kmeans
-#' @export
 setGeneric("spark.kmeans", function(data, formula, ...) { standardGeneric("spark.kmeans") })
 
 #' @rdname spark.kstest
-#' @export
 setGeneric("spark.kstest", function(data, ...) { standardGeneric("spark.kstest") })
 
 #' @rdname spark.lda
-#' @export
 setGeneric("spark.lda", function(data, ...) { standardGeneric("spark.lda") })
 
 #' @rdname spark.logit
-#' @export
 setGeneric("spark.logit", function(data, formula, ...) { standardGeneric("spark.logit") })
 
 #' @rdname spark.mlp
-#' @export
 setGeneric("spark.mlp", function(data, formula, ...) { standardGeneric("spark.mlp") })
 
 #' @rdname spark.naiveBayes
-#' @export
 setGeneric("spark.naiveBayes", function(data, formula, ...) { standardGeneric("spark.naiveBayes") })
 
 #' @rdname spark.decisionTree
-#' @export
 setGeneric("spark.decisionTree",
            function(data, formula, ...) { standardGeneric("spark.decisionTree") })
 
 #' @rdname spark.randomForest
-#' @export
 setGeneric("spark.randomForest",
            function(data, formula, ...) { standardGeneric("spark.randomForest") })
 
 #' @rdname spark.survreg
-#' @export
 setGeneric("spark.survreg", function(data, formula, ...) { standardGeneric("spark.survreg") })
 
 #' @rdname spark.svmLinear
-#' @export
 setGeneric("spark.svmLinear", function(data, formula, ...) { standardGeneric("spark.svmLinear") })
 
 #' @rdname spark.lda
-#' @export
 setGeneric("spark.posterior", function(object, newData) { standardGeneric("spark.posterior") })
 
 #' @rdname spark.lda
-#' @export
 setGeneric("spark.perplexity", function(object, data) { standardGeneric("spark.perplexity") })
 
 #' @rdname spark.fpGrowth
-#' @export
 setGeneric("spark.fpGrowth", function(data, ...) { standardGeneric("spark.fpGrowth") })
 
 #' @rdname spark.fpGrowth
-#' @export
 setGeneric("spark.freqItemsets", function(object) { standardGeneric("spark.freqItemsets") })
 
 #' @rdname spark.fpGrowth
-#' @export
 setGeneric("spark.associationRules", function(object) { standardGeneric("spark.associationRules") })
 
 #' @param object a fitted ML model object.
 #' @param path the directory where the model is saved.
 #' @param ... additional argument(s) passed to the method.
 #' @rdname write.ml
-#' @export
 setGeneric("write.ml", function(object, path, ...) { standardGeneric("write.ml") })
 
 
 ###################### Streaming Methods ##########################
 
 #' @rdname awaitTermination
-#' @export
 setGeneric("awaitTermination", function(x, timeout = NULL) { standardGeneric("awaitTermination") })
 
 #' @rdname isActive
-#' @export
 setGeneric("isActive", function(x) { standardGeneric("isActive") })
 
 #' @rdname lastProgress
-#' @export
 setGeneric("lastProgress", function(x) { standardGeneric("lastProgress") })
 
 #' @rdname queryName
-#' @export
 setGeneric("queryName", function(x) { standardGeneric("queryName") })
 
 #' @rdname status
-#' @export
 setGeneric("status", function(x) { standardGeneric("status") })
 
 #' @rdname stopQuery
-#' @export
 setGeneric("stopQuery", function(x) { standardGeneric("stopQuery") })

http://git-wip-us.apache.org/repos/asf/spark/blob/4586eada/R/pkg/R/group.R
----------------------------------------------------------------------
diff --git a/R/pkg/R/group.R b/R/pkg/R/group.R
index 54ef9f0..f751b95 100644
--- a/R/pkg/R/group.R
+++ b/R/pkg/R/group.R
@@ -30,7 +30,6 @@ setOldClass("jobj")
 #' @seealso groupBy
 #'
 #' @param sgd A Java object reference to the backing Scala GroupedData
-#' @export
 #' @note GroupedData since 1.4.0
 setClass("GroupedData",
          slots = list(sgd = "jobj"))
@@ -48,7 +47,6 @@ groupedData <- function(sgd) {
 
 #' @rdname show
 #' @aliases show,GroupedData-method
-#' @export
 #' @note show(GroupedData) since 1.4.0
 setMethod("show", "GroupedData",
           function(object) {
@@ -63,7 +61,6 @@ setMethod("show", "GroupedData",
 #' @return A SparkDataFrame.
 #' @rdname count
 #' @aliases count,GroupedData-method
-#' @export
 #' @examples
 #' \dontrun{
 #'   count(groupBy(df, "name"))
@@ -87,7 +84,6 @@ setMethod("count",
 #' @aliases agg,GroupedData-method
 #' @name agg
 #' @family agg_funcs
-#' @export
 #' @examples
 #' \dontrun{
 #'  df2 <- agg(df, age = "sum")  # new column name will be created as 'SUM(age#0)'
@@ -150,7 +146,6 @@ methods <- c("avg", "max", "mean", "min", "sum")
 #' @rdname pivot
 #' @aliases pivot,GroupedData,character-method
 #' @name pivot
-#' @export
 #' @examples
 #' \dontrun{
 #' df <- createDataFrame(data.frame(
@@ -202,7 +197,6 @@ createMethods()
 #' @rdname gapply
 #' @aliases gapply,GroupedData-method
 #' @name gapply
-#' @export
 #' @note gapply(GroupedData) since 2.0.0
 setMethod("gapply",
           signature(x = "GroupedData"),
@@ -216,7 +210,6 @@ setMethod("gapply",
 #' @rdname gapplyCollect
 #' @aliases gapplyCollect,GroupedData-method
 #' @name gapplyCollect
-#' @export
 #' @note gapplyCollect(GroupedData) since 2.0.0
 setMethod("gapplyCollect",
           signature(x = "GroupedData"),

http://git-wip-us.apache.org/repos/asf/spark/blob/4586eada/R/pkg/R/install.R
----------------------------------------------------------------------
diff --git a/R/pkg/R/install.R b/R/pkg/R/install.R
index 04dc756..6d1edf6 100644
--- a/R/pkg/R/install.R
+++ b/R/pkg/R/install.R
@@ -58,7 +58,6 @@
 #' @rdname install.spark
 #' @name install.spark
 #' @aliases install.spark
-#' @export
 #' @examples
 #'\dontrun{
 #' install.spark()

http://git-wip-us.apache.org/repos/asf/spark/blob/4586eada/R/pkg/R/jvm.R
----------------------------------------------------------------------
diff --git a/R/pkg/R/jvm.R b/R/pkg/R/jvm.R
index bb5c775..9a1b26b 100644
--- a/R/pkg/R/jvm.R
+++ b/R/pkg/R/jvm.R
@@ -35,7 +35,6 @@
 #' @param ... parameters to pass to the Java method.
 #' @return the return value of the Java method. Either returned as a R object
 #'  if it can be deserialized or returned as a "jobj". See details section for more.
-#' @export
 #' @seealso \link{sparkR.callJStatic}, \link{sparkR.newJObject}
 #' @rdname sparkR.callJMethod
 #' @examples
@@ -69,7 +68,6 @@ sparkR.callJMethod <- function(x, methodName, ...) {
 #' @param ... parameters to pass to the Java method.
 #' @return the return value of the Java method. Either returned as a R object
 #'  if it can be deserialized or returned as a "jobj". See details section for more.
-#' @export
 #' @seealso \link{sparkR.callJMethod}, \link{sparkR.newJObject}
 #' @rdname sparkR.callJStatic
 #' @examples
@@ -100,7 +98,6 @@ sparkR.callJStatic <- function(x, methodName, ...) {
 #' @param ... arguments to be passed to the constructor.
 #' @return the object created. Either returned as a R object
 #'   if it can be deserialized or returned as a "jobj". See details section for more.
-#' @export
 #' @seealso \link{sparkR.callJMethod}, \link{sparkR.callJStatic}
 #' @rdname sparkR.newJObject
 #' @examples

http://git-wip-us.apache.org/repos/asf/spark/blob/4586eada/R/pkg/R/mllib_classification.R
----------------------------------------------------------------------
diff --git a/R/pkg/R/mllib_classification.R b/R/pkg/R/mllib_classification.R
index f6e9b13..2964fde 100644
--- a/R/pkg/R/mllib_classification.R
+++ b/R/pkg/R/mllib_classification.R
@@ -21,28 +21,24 @@
 #' S4 class that represents an LinearSVCModel
 #'
 #' @param jobj a Java object reference to the backing Scala LinearSVCModel
-#' @export
 #' @note LinearSVCModel since 2.2.0
 setClass("LinearSVCModel", representation(jobj = "jobj"))
 
 #' S4 class that represents an LogisticRegressionModel
 #'
 #' @param jobj a Java object reference to the backing Scala LogisticRegressionModel
-#' @export
 #' @note LogisticRegressionModel since 2.1.0
 setClass("LogisticRegressionModel", representation(jobj = "jobj"))
 
 #' S4 class that represents a MultilayerPerceptronClassificationModel
 #'
 #' @param jobj a Java object reference to the backing Scala MultilayerPerceptronClassifierWrapper
-#' @export
 #' @note MultilayerPerceptronClassificationModel since 2.1.0
 setClass("MultilayerPerceptronClassificationModel", representation(jobj = "jobj"))
 
 #' S4 class that represents a NaiveBayesModel
 #'
 #' @param jobj a Java object reference to the backing Scala NaiveBayesWrapper
-#' @export
 #' @note NaiveBayesModel since 2.0.0
 setClass("NaiveBayesModel", representation(jobj = "jobj"))
 
@@ -82,7 +78,6 @@ setClass("NaiveBayesModel", representation(jobj = "jobj"))
 #' @rdname spark.svmLinear
 #' @aliases spark.svmLinear,SparkDataFrame,formula-method
 #' @name spark.svmLinear
-#' @export
 #' @examples
 #' \dontrun{
 #' sparkR.session()
@@ -131,7 +126,6 @@ setMethod("spark.svmLinear", signature(data = "SparkDataFrame", formula = "formu
 #' @return \code{predict} returns the predicted values based on a LinearSVCModel.
 #' @rdname spark.svmLinear
 #' @aliases predict,LinearSVCModel,SparkDataFrame-method
-#' @export
 #' @note predict(LinearSVCModel) since 2.2.0
 setMethod("predict", signature(object = "LinearSVCModel"),
           function(object, newData) {
@@ -146,7 +140,6 @@ setMethod("predict", signature(object = "LinearSVCModel"),
 #'         \code{numClasses} (number of classes), \code{numFeatures} (number of features).
 #' @rdname spark.svmLinear
 #' @aliases summary,LinearSVCModel-method
-#' @export
 #' @note summary(LinearSVCModel) since 2.2.0
 setMethod("summary", signature(object = "LinearSVCModel"),
           function(object) {
@@ -169,7 +162,6 @@ setMethod("summary", signature(object = "LinearSVCModel"),
 #'
 #' @rdname spark.svmLinear
 #' @aliases write.ml,LinearSVCModel,character-method
-#' @export
 #' @note write.ml(LogisticRegression, character) since 2.2.0
 setMethod("write.ml", signature(object = "LinearSVCModel", path = "character"),
 function(object, path, overwrite = FALSE) {
@@ -257,7 +249,6 @@ function(object, path, overwrite = FALSE) {
 #' @rdname spark.logit
 #' @aliases spark.logit,SparkDataFrame,formula-method
 #' @name spark.logit
-#' @export
 #' @examples
 #' \dontrun{
 #' sparkR.session()
@@ -374,7 +365,6 @@ setMethod("spark.logit", signature(data = "SparkDataFrame", formula = "formula")
 #'         The list includes \code{coefficients} (coefficients matrix of the fitted model).
 #' @rdname spark.logit
 #' @aliases summary,LogisticRegressionModel-method
-#' @export
 #' @note summary(LogisticRegressionModel) since 2.1.0
 setMethod("summary", signature(object = "LogisticRegressionModel"),
           function(object) {
@@ -402,7 +392,6 @@ setMethod("summary", signature(object = "LogisticRegressionModel"),
 #' @return \code{predict} returns the predicted values based on an LogisticRegressionModel.
 #' @rdname spark.logit
 #' @aliases predict,LogisticRegressionModel,SparkDataFrame-method
-#' @export
 #' @note predict(LogisticRegressionModel) since 2.1.0
 setMethod("predict", signature(object = "LogisticRegressionModel"),
           function(object, newData) {
@@ -417,7 +406,6 @@ setMethod("predict", signature(object = "LogisticRegressionModel"),
 #'
 #' @rdname spark.logit
 #' @aliases write.ml,LogisticRegressionModel,character-method
-#' @export
 #' @note write.ml(LogisticRegression, character) since 2.1.0
 setMethod("write.ml", signature(object = "LogisticRegressionModel", path = "character"),
           function(object, path, overwrite = FALSE) {
@@ -458,7 +446,6 @@ setMethod("write.ml", signature(object = "LogisticRegressionModel", path = "char
 #' @aliases spark.mlp,SparkDataFrame,formula-method
 #' @name spark.mlp
 #' @seealso \link{read.ml}
-#' @export
 #' @examples
 #' \dontrun{
 #' df <- read.df("data/mllib/sample_multiclass_classification_data.txt", source = "libsvm")
@@ -517,7 +504,6 @@ setMethod("spark.mlp", signature(data = "SparkDataFrame", formula = "formula"),
 #'         For \code{weights}, it is a numeric vector with length equal to the expected
 #'         given the architecture (i.e., for 8-10-2 network, 112 connection weights).
 #' @rdname spark.mlp
-#' @export
 #' @aliases summary,MultilayerPerceptronClassificationModel-method
 #' @note summary(MultilayerPerceptronClassificationModel) since 2.1.0
 setMethod("summary", signature(object = "MultilayerPerceptronClassificationModel"),
@@ -538,7 +524,6 @@ setMethod("summary", signature(object = "MultilayerPerceptronClassificationModel
 #' "prediction".
 #' @rdname spark.mlp
 #' @aliases predict,MultilayerPerceptronClassificationModel-method
-#' @export
 #' @note predict(MultilayerPerceptronClassificationModel) since 2.1.0
 setMethod("predict", signature(object = "MultilayerPerceptronClassificationModel"),
           function(object, newData) {
@@ -553,7 +538,6 @@ setMethod("predict", signature(object = "MultilayerPerceptronClassificationModel
 #'
 #' @rdname spark.mlp
 #' @aliases write.ml,MultilayerPerceptronClassificationModel,character-method
-#' @export
 #' @seealso \link{write.ml}
 #' @note write.ml(MultilayerPerceptronClassificationModel, character) since 2.1.0
 setMethod("write.ml", signature(object = "MultilayerPerceptronClassificationModel",
@@ -585,7 +569,6 @@ setMethod("write.ml", signature(object = "MultilayerPerceptronClassificationMode
 #' @aliases spark.naiveBayes,SparkDataFrame,formula-method
 #' @name spark.naiveBayes
 #' @seealso e1071: \url{https://cran.r-project.org/package=e1071}
-#' @export
 #' @examples
 #' \dontrun{
 #' data <- as.data.frame(UCBAdmissions)
@@ -624,7 +607,6 @@ setMethod("spark.naiveBayes", signature(data = "SparkDataFrame", formula = "form
 #'         The list includes \code{apriori} (the label distribution) and
 #'         \code{tables} (conditional probabilities given the target label).
 #' @rdname spark.naiveBayes
-#' @export
 #' @note summary(NaiveBayesModel) since 2.0.0
 setMethod("summary", signature(object = "NaiveBayesModel"),
           function(object) {
@@ -648,7 +630,6 @@ setMethod("summary", signature(object = "NaiveBayesModel"),
 #' @return \code{predict} returns a SparkDataFrame containing predicted labeled in a column named
 #' "prediction".
 #' @rdname spark.naiveBayes
-#' @export
 #' @note predict(NaiveBayesModel) since 2.0.0
 setMethod("predict", signature(object = "NaiveBayesModel"),
           function(object, newData) {
@@ -662,7 +643,6 @@ setMethod("predict", signature(object = "NaiveBayesModel"),
 #'                  which means throw exception if the output path exists.
 #'
 #' @rdname spark.naiveBayes
-#' @export
 #' @seealso \link{write.ml}
 #' @note write.ml(NaiveBayesModel, character) since 2.0.0
 setMethod("write.ml", signature(object = "NaiveBayesModel", path = "character"),

http://git-wip-us.apache.org/repos/asf/spark/blob/4586eada/R/pkg/R/mllib_clustering.R
----------------------------------------------------------------------
diff --git a/R/pkg/R/mllib_clustering.R b/R/pkg/R/mllib_clustering.R
index a25bf81..900be68 100644
--- a/R/pkg/R/mllib_clustering.R
+++ b/R/pkg/R/mllib_clustering.R
@@ -20,28 +20,24 @@
 #' S4 class that represents a BisectingKMeansModel
 #'
 #' @param jobj a Java object reference to the backing Scala BisectingKMeansModel
-#' @export
 #' @note BisectingKMeansModel since 2.2.0
 setClass("BisectingKMeansModel", representation(jobj = "jobj"))
 
 #' S4 class that represents a GaussianMixtureModel
 #'
 #' @param jobj a Java object reference to the backing Scala GaussianMixtureModel
-#' @export
 #' @note GaussianMixtureModel since 2.1.0
 setClass("GaussianMixtureModel", representation(jobj = "jobj"))
 
 #' S4 class that represents a KMeansModel
 #'
 #' @param jobj a Java object reference to the backing Scala KMeansModel
-#' @export
 #' @note KMeansModel since 2.0.0
 setClass("KMeansModel", representation(jobj = "jobj"))
 
 #' S4 class that represents an LDAModel
 #'
 #' @param jobj a Java object reference to the backing Scala LDAWrapper
-#' @export
 #' @note LDAModel since 2.1.0
 setClass("LDAModel", representation(jobj = "jobj"))
 
@@ -68,7 +64,6 @@ setClass("LDAModel", representation(jobj = "jobj"))
 #' @rdname spark.bisectingKmeans
 #' @aliases spark.bisectingKmeans,SparkDataFrame,formula-method
 #' @name spark.bisectingKmeans
-#' @export
 #' @examples
 #' \dontrun{
 #' sparkR.session()
@@ -117,7 +112,6 @@ setMethod("spark.bisectingKmeans", signature(data = "SparkDataFrame", formula =
 #'         (cluster centers of the transformed data; cluster is NULL if is.loaded is TRUE),
 #'         and \code{is.loaded} (whether the model is loaded from a saved file).
 #' @rdname spark.bisectingKmeans
-#' @export
 #' @note summary(BisectingKMeansModel) since 2.2.0
 setMethod("summary", signature(object = "BisectingKMeansModel"),
           function(object) {
@@ -144,7 +138,6 @@ setMethod("summary", signature(object = "BisectingKMeansModel"),
 #' @param newData a SparkDataFrame for testing.
 #' @return \code{predict} returns the predicted values based on a bisecting k-means model.
 #' @rdname spark.bisectingKmeans
-#' @export
 #' @note predict(BisectingKMeansModel) since 2.2.0
 setMethod("predict", signature(object = "BisectingKMeansModel"),
           function(object, newData) {
@@ -160,7 +153,6 @@ setMethod("predict", signature(object = "BisectingKMeansModel"),
 #'        or \code{"classes"} for assigned classes.
 #' @return \code{fitted} returns a SparkDataFrame containing fitted values.
 #' @rdname spark.bisectingKmeans
-#' @export
 #' @note fitted since 2.2.0
 setMethod("fitted", signature(object = "BisectingKMeansModel"),
           function(object, method = c("centers", "classes")) {
@@ -181,7 +173,6 @@ setMethod("fitted", signature(object = "BisectingKMeansModel"),
 #'                  which means throw exception if the output path exists.
 #'
 #' @rdname spark.bisectingKmeans
-#' @export
 #' @note write.ml(BisectingKMeansModel, character) since 2.2.0
 setMethod("write.ml", signature(object = "BisectingKMeansModel", path = "character"),
           function(object, path, overwrite = FALSE) {
@@ -208,7 +199,6 @@ setMethod("write.ml", signature(object = "BisectingKMeansModel", path = "charact
 #' @rdname spark.gaussianMixture
 #' @name spark.gaussianMixture
 #' @seealso mixtools: \url{https://cran.r-project.org/package=mixtools}
-#' @export
 #' @examples
 #' \dontrun{
 #' sparkR.session()
@@ -251,7 +241,6 @@ setMethod("spark.gaussianMixture", signature(data = "SparkDataFrame", formula =
 #'         \code{sigma} (sigma), \code{loglik} (loglik), and \code{posterior} (posterior).
 #' @aliases spark.gaussianMixture,SparkDataFrame,formula-method
 #' @rdname spark.gaussianMixture
-#' @export
 #' @note summary(GaussianMixtureModel) since 2.1.0
 setMethod("summary", signature(object = "GaussianMixtureModel"),
           function(object) {
@@ -291,7 +280,6 @@ setMethod("summary", signature(object = "GaussianMixtureModel"),
 #'         "prediction".
 #' @aliases predict,GaussianMixtureModel,SparkDataFrame-method
 #' @rdname spark.gaussianMixture
-#' @export
 #' @note predict(GaussianMixtureModel) since 2.1.0
 setMethod("predict", signature(object = "GaussianMixtureModel"),
           function(object, newData) {
@@ -306,7 +294,6 @@ setMethod("predict", signature(object = "GaussianMixtureModel"),
 #'
 #' @aliases write.ml,GaussianMixtureModel,character-method
 #' @rdname spark.gaussianMixture
-#' @export
 #' @note write.ml(GaussianMixtureModel, character) since 2.1.0
 setMethod("write.ml", signature(object = "GaussianMixtureModel", path = "character"),
           function(object, path, overwrite = FALSE) {
@@ -336,7 +323,6 @@ setMethod("write.ml", signature(object = "GaussianMixtureModel", path = "charact
 #' @rdname spark.kmeans
 #' @aliases spark.kmeans,SparkDataFrame,formula-method
 #' @name spark.kmeans
-#' @export
 #' @examples
 #' \dontrun{
 #' sparkR.session()
@@ -385,7 +371,6 @@ setMethod("spark.kmeans", signature(data = "SparkDataFrame", formula = "formula"
 #'         (the actual number of cluster centers. When using initMode = "random",
 #'         \code{clusterSize} may not equal to \code{k}).
 #' @rdname spark.kmeans
-#' @export
 #' @note summary(KMeansModel) since 2.0.0
 setMethod("summary", signature(object = "KMeansModel"),
           function(object) {
@@ -413,7 +398,6 @@ setMethod("summary", signature(object = "KMeansModel"),
 #' @param newData a SparkDataFrame for testing.
 #' @return \code{predict} returns the predicted values based on a k-means model.
 #' @rdname spark.kmeans
-#' @export
 #' @note predict(KMeansModel) since 2.0.0
 setMethod("predict", signature(object = "KMeansModel"),
           function(object, newData) {
@@ -431,7 +415,6 @@ setMethod("predict", signature(object = "KMeansModel"),
 #' @param ... additional argument(s) passed to the method.
 #' @return \code{fitted} returns a SparkDataFrame containing fitted values.
 #' @rdname fitted
-#' @export
 #' @examples
 #' \dontrun{
 #' model <- spark.kmeans(trainingData, ~ ., 2)
@@ -458,7 +441,6 @@ setMethod("fitted", signature(object = "KMeansModel"),
 #'                  which means throw exception if the output path exists.
 #'
 #' @rdname spark.kmeans
-#' @export
 #' @note write.ml(KMeansModel, character) since 2.0.0
 setMethod("write.ml", signature(object = "KMeansModel", path = "character"),
           function(object, path, overwrite = FALSE) {
@@ -496,7 +478,6 @@ setMethod("write.ml", signature(object = "KMeansModel", path = "character"),
 #' @rdname spark.lda
 #' @aliases spark.lda,SparkDataFrame-method
 #' @seealso topicmodels: \url{https://cran.r-project.org/package=topicmodels}
-#' @export
 #' @examples
 #' \dontrun{
 #' text <- read.df("data/mllib/sample_lda_libsvm_data.txt", source = "libsvm")
@@ -558,7 +539,6 @@ setMethod("spark.lda", signature(data = "SparkDataFrame"),
 #'               It is only for distributed LDA model (i.e., optimizer = "em")}
 #' @rdname spark.lda
 #' @aliases summary,LDAModel-method
-#' @export
 #' @note summary(LDAModel) since 2.1.0
 setMethod("summary", signature(object = "LDAModel"),
           function(object, maxTermsPerTopic) {
@@ -596,7 +576,6 @@ setMethod("summary", signature(object = "LDAModel"),
 #'         perplexity of the training data if missing argument "data".
 #' @rdname spark.lda
 #' @aliases spark.perplexity,LDAModel-method
-#' @export
 #' @note spark.perplexity(LDAModel) since 2.1.0
 setMethod("spark.perplexity", signature(object = "LDAModel", data = "SparkDataFrame"),
           function(object, data) {
@@ -611,7 +590,6 @@ setMethod("spark.perplexity", signature(object = "LDAModel", data = "SparkDataFr
 #'         vectors named "topicDistribution".
 #' @rdname spark.lda
 #' @aliases spark.posterior,LDAModel,SparkDataFrame-method
-#' @export
 #' @note spark.posterior(LDAModel) since 2.1.0
 setMethod("spark.posterior", signature(object = "LDAModel", newData = "SparkDataFrame"),
           function(object, newData) {
@@ -626,7 +604,6 @@ setMethod("spark.posterior", signature(object = "LDAModel", newData = "SparkData
 #'
 #' @rdname spark.lda
 #' @aliases write.ml,LDAModel,character-method
-#' @export
 #' @seealso \link{read.ml}
 #' @note write.ml(LDAModel, character) since 2.1.0
 setMethod("write.ml", signature(object = "LDAModel", path = "character"),

http://git-wip-us.apache.org/repos/asf/spark/blob/4586eada/R/pkg/R/mllib_fpm.R
----------------------------------------------------------------------
diff --git a/R/pkg/R/mllib_fpm.R b/R/pkg/R/mllib_fpm.R
index dfcb45a..e239490 100644
--- a/R/pkg/R/mllib_fpm.R
+++ b/R/pkg/R/mllib_fpm.R
@@ -20,7 +20,6 @@
 #' S4 class that represents a FPGrowthModel
 #'
 #' @param jobj a Java object reference to the backing Scala FPGrowthModel
-#' @export
 #' @note FPGrowthModel since 2.2.0
 setClass("FPGrowthModel", slots = list(jobj = "jobj"))
 
@@ -45,7 +44,6 @@ setClass("FPGrowthModel", slots = list(jobj = "jobj"))
 #' @rdname spark.fpGrowth
 #' @name spark.fpGrowth
 #' @aliases spark.fpGrowth,SparkDataFrame-method
-#' @export
 #' @examples
 #' \dontrun{
 #' raw_data <- read.df(
@@ -109,7 +107,6 @@ setMethod("spark.fpGrowth", signature(data = "SparkDataFrame"),
 #'         and \code{freq} (frequency of the itemset).
 #' @rdname spark.fpGrowth
 #' @aliases freqItemsets,FPGrowthModel-method
-#' @export
 #' @note spark.freqItemsets(FPGrowthModel) since 2.2.0
 setMethod("spark.freqItemsets", signature(object = "FPGrowthModel"),
           function(object) {
@@ -125,7 +122,6 @@ setMethod("spark.freqItemsets", signature(object = "FPGrowthModel"),
 #'         and \code{condfidence} (confidence).
 #' @rdname spark.fpGrowth
 #' @aliases associationRules,FPGrowthModel-method
-#' @export
 #' @note spark.associationRules(FPGrowthModel) since 2.2.0
 setMethod("spark.associationRules", signature(object = "FPGrowthModel"),
           function(object) {
@@ -138,7 +134,6 @@ setMethod("spark.associationRules", signature(object = "FPGrowthModel"),
 #' @return \code{predict} returns a SparkDataFrame containing predicted values.
 #' @rdname spark.fpGrowth
 #' @aliases predict,FPGrowthModel-method
-#' @export
 #' @note predict(FPGrowthModel) since 2.2.0
 setMethod("predict", signature(object = "FPGrowthModel"),
           function(object, newData) {
@@ -153,7 +148,6 @@ setMethod("predict", signature(object = "FPGrowthModel"),
 #'                  if the output path exists.
 #' @rdname spark.fpGrowth
 #' @aliases write.ml,FPGrowthModel,character-method
-#' @export
 #' @seealso \link{read.ml}
 #' @note write.ml(FPGrowthModel, character) since 2.2.0
 setMethod("write.ml", signature(object = "FPGrowthModel", path = "character"),

http://git-wip-us.apache.org/repos/asf/spark/blob/4586eada/R/pkg/R/mllib_recommendation.R
----------------------------------------------------------------------
diff --git a/R/pkg/R/mllib_recommendation.R b/R/pkg/R/mllib_recommendation.R
index 5441c4a..9a77b07 100644
--- a/R/pkg/R/mllib_recommendation.R
+++ b/R/pkg/R/mllib_recommendation.R
@@ -20,7 +20,6 @@
 #' S4 class that represents an ALSModel
 #'
 #' @param jobj a Java object reference to the backing Scala ALSWrapper
-#' @export
 #' @note ALSModel since 2.1.0
 setClass("ALSModel", representation(jobj = "jobj"))
 
@@ -55,7 +54,6 @@ setClass("ALSModel", representation(jobj = "jobj"))
 #' @rdname spark.als
 #' @aliases spark.als,SparkDataFrame-method
 #' @name spark.als
-#' @export
 #' @examples
 #' \dontrun{
 #' ratings <- list(list(0, 0, 4.0), list(0, 1, 2.0), list(1, 1, 3.0), list(1, 2, 4.0),
@@ -118,7 +116,6 @@ setMethod("spark.als", signature(data = "SparkDataFrame"),
 #'         and \code{rank} (rank of the matrix factorization model).
 #' @rdname spark.als
 #' @aliases summary,ALSModel-method
-#' @export
 #' @note summary(ALSModel) since 2.1.0
 setMethod("summary", signature(object = "ALSModel"),
           function(object) {
@@ -139,7 +136,6 @@ setMethod("summary", signature(object = "ALSModel"),
 #' @return \code{predict} returns a SparkDataFrame containing predicted values.
 #' @rdname spark.als
 #' @aliases predict,ALSModel-method
-#' @export
 #' @note predict(ALSModel) since 2.1.0
 setMethod("predict", signature(object = "ALSModel"),
           function(object, newData) {
@@ -155,7 +151,6 @@ setMethod("predict", signature(object = "ALSModel"),
 #'
 #' @rdname spark.als
 #' @aliases write.ml,ALSModel,character-method
-#' @export
 #' @seealso \link{read.ml}
 #' @note write.ml(ALSModel, character) since 2.1.0
 setMethod("write.ml", signature(object = "ALSModel", path = "character"),

http://git-wip-us.apache.org/repos/asf/spark/blob/4586eada/R/pkg/R/mllib_regression.R
----------------------------------------------------------------------
diff --git a/R/pkg/R/mllib_regression.R b/R/pkg/R/mllib_regression.R
index 545be5e..95c1a29 100644
--- a/R/pkg/R/mllib_regression.R
+++ b/R/pkg/R/mllib_regression.R
@@ -21,21 +21,18 @@
 #' S4 class that represents a AFTSurvivalRegressionModel
 #'
 #' @param jobj a Java object reference to the backing Scala AFTSurvivalRegressionWrapper
-#' @export
 #' @note AFTSurvivalRegressionModel since 2.0.0
 setClass("AFTSurvivalRegressionModel", representation(jobj = "jobj"))
 
 #' S4 class that represents a generalized linear model
 #'
 #' @param jobj a Java object reference to the backing Scala GeneralizedLinearRegressionWrapper
-#' @export
 #' @note GeneralizedLinearRegressionModel since 2.0.0
 setClass("GeneralizedLinearRegressionModel", representation(jobj = "jobj"))
 
 #' S4 class that represents an IsotonicRegressionModel
 #'
 #' @param jobj a Java object reference to the backing Scala IsotonicRegressionModel
-#' @export
 #' @note IsotonicRegressionModel since 2.1.0
 setClass("IsotonicRegressionModel", representation(jobj = "jobj"))
 
@@ -85,7 +82,6 @@ setClass("IsotonicRegressionModel", representation(jobj = "jobj"))
 #' @return \code{spark.glm} returns a fitted generalized linear model.
 #' @rdname spark.glm
 #' @name spark.glm
-#' @export
 #' @examples
 #' \dontrun{
 #' sparkR.session()
@@ -211,7 +207,6 @@ setMethod("spark.glm", signature(data = "SparkDataFrame", formula = "formula"),
 #' @return \code{glm} returns a fitted generalized linear model.
 #' @rdname glm
 #' @aliases glm
-#' @export
 #' @examples
 #' \dontrun{
 #' sparkR.session()
@@ -244,7 +239,6 @@ setMethod("glm", signature(formula = "formula", family = "ANY", data = "SparkDat
 #'         and \code{iter} (number of iterations IRLS takes). If there are collinear columns in
 #'         the data, the coefficients matrix only provides coefficients.
 #' @rdname spark.glm
-#' @export
 #' @note summary(GeneralizedLinearRegressionModel) since 2.0.0
 setMethod("summary", signature(object = "GeneralizedLinearRegressionModel"),
           function(object) {
@@ -290,7 +284,6 @@ setMethod("summary", signature(object = "GeneralizedLinearRegressionModel"),
 
 #' @rdname spark.glm
 #' @param x summary object of fitted generalized linear model returned by \code{summary} function.
-#' @export
 #' @note print.summary.GeneralizedLinearRegressionModel since 2.0.0
 print.summary.GeneralizedLinearRegressionModel <- function(x, ...) {
   if (x$is.loaded) {
@@ -324,7 +317,6 @@ print.summary.GeneralizedLinearRegressionModel <- function(x, ...) {
 #' @return \code{predict} returns a SparkDataFrame containing predicted labels in a column named
 #'         "prediction".
 #' @rdname spark.glm
-#' @export
 #' @note predict(GeneralizedLinearRegressionModel) since 1.5.0
 setMethod("predict", signature(object = "GeneralizedLinearRegressionModel"),
           function(object, newData) {
@@ -338,7 +330,6 @@ setMethod("predict", signature(object = "GeneralizedLinearRegressionModel"),
 #'                  which means throw exception if the output path exists.
 #'
 #' @rdname spark.glm
-#' @export
 #' @note write.ml(GeneralizedLinearRegressionModel, character) since 2.0.0
 setMethod("write.ml", signature(object = "GeneralizedLinearRegressionModel", path = "character"),
           function(object, path, overwrite = FALSE) {
@@ -363,7 +354,6 @@ setMethod("write.ml", signature(object = "GeneralizedLinearRegressionModel", pat
 #' @rdname spark.isoreg
 #' @aliases spark.isoreg,SparkDataFrame,formula-method
 #' @name spark.isoreg
-#' @export
 #' @examples
 #' \dontrun{
 #' sparkR.session()
@@ -412,7 +402,6 @@ setMethod("spark.isoreg", signature(data = "SparkDataFrame", formula = "formula"
 #'         and \code{predictions} (predictions associated with the boundaries at the same index).
 #' @rdname spark.isoreg
 #' @aliases summary,IsotonicRegressionModel-method
-#' @export
 #' @note summary(IsotonicRegressionModel) since 2.1.0
 setMethod("summary", signature(object = "IsotonicRegressionModel"),
           function(object) {
@@ -429,7 +418,6 @@ setMethod("summary", signature(object = "IsotonicRegressionModel"),
 #' @return \code{predict} returns a SparkDataFrame containing predicted values.
 #' @rdname spark.isoreg
 #' @aliases predict,IsotonicRegressionModel,SparkDataFrame-method
-#' @export
 #' @note predict(IsotonicRegressionModel) since 2.1.0
 setMethod("predict", signature(object = "IsotonicRegressionModel"),
           function(object, newData) {
@@ -444,7 +432,6 @@ setMethod("predict", signature(object = "IsotonicRegressionModel"),
 #'
 #' @rdname spark.isoreg
 #' @aliases write.ml,IsotonicRegressionModel,character-method
-#' @export
 #' @note write.ml(IsotonicRegression, character) since 2.1.0
 setMethod("write.ml", signature(object = "IsotonicRegressionModel", path = "character"),
           function(object, path, overwrite = FALSE) {
@@ -477,7 +464,6 @@ setMethod("write.ml", signature(object = "IsotonicRegressionModel", path = "char
 #' @return \code{spark.survreg} returns a fitted AFT survival regression model.
 #' @rdname spark.survreg
 #' @seealso survival: \url{https://cran.r-project.org/package=survival}
-#' @export
 #' @examples
 #' \dontrun{
 #' df <- createDataFrame(ovarian)
@@ -517,7 +503,6 @@ setMethod("spark.survreg", signature(data = "SparkDataFrame", formula = "formula
 #'         The list includes the model's \code{coefficients} (features, coefficients,
 #'         intercept and log(scale)).
 #' @rdname spark.survreg
-#' @export
 #' @note summary(AFTSurvivalRegressionModel) since 2.0.0
 setMethod("summary", signature(object = "AFTSurvivalRegressionModel"),
           function(object) {
@@ -537,7 +522,6 @@ setMethod("summary", signature(object = "AFTSurvivalRegressionModel"),
 #' @return \code{predict} returns a SparkDataFrame containing predicted values
 #'         on the original scale of the data (mean predicted value at scale = 1.0).
 #' @rdname spark.survreg
-#' @export
 #' @note predict(AFTSurvivalRegressionModel) since 2.0.0
 setMethod("predict", signature(object = "AFTSurvivalRegressionModel"),
           function(object, newData) {
@@ -550,7 +534,6 @@ setMethod("predict", signature(object = "AFTSurvivalRegressionModel"),
 #' @param overwrite overwrites or not if the output path already exists. Default is FALSE
 #'                  which means throw exception if the output path exists.
 #' @rdname spark.survreg
-#' @export
 #' @note write.ml(AFTSurvivalRegressionModel, character) since 2.0.0
 #' @seealso \link{write.ml}
 setMethod("write.ml", signature(object = "AFTSurvivalRegressionModel", path = "character"),

http://git-wip-us.apache.org/repos/asf/spark/blob/4586eada/R/pkg/R/mllib_stat.R
----------------------------------------------------------------------
diff --git a/R/pkg/R/mllib_stat.R b/R/pkg/R/mllib_stat.R
index 3e013f1..f8c3329 100644
--- a/R/pkg/R/mllib_stat.R
+++ b/R/pkg/R/mllib_stat.R
@@ -20,7 +20,6 @@
 #' S4 class that represents an KSTest
 #'
 #' @param jobj a Java object reference to the backing Scala KSTestWrapper
-#' @export
 #' @note KSTest since 2.1.0
 setClass("KSTest", representation(jobj = "jobj"))
 
@@ -52,7 +51,6 @@ setClass("KSTest", representation(jobj = "jobj"))
 #' @name spark.kstest
 #' @seealso \href{http://spark.apache.org/docs/latest/mllib-statistics.html#hypothesis-testing}{
 #'          MLlib: Hypothesis Testing}
-#' @export
 #' @examples
 #' \dontrun{
 #' data <- data.frame(test = c(0.1, 0.15, 0.2, 0.3, 0.25))
@@ -94,7 +92,6 @@ setMethod("spark.kstest", signature(data = "SparkDataFrame"),
 #'         parameters tested against) and \code{degreesOfFreedom} (degrees of freedom of the test).
 #' @rdname spark.kstest
 #' @aliases summary,KSTest-method
-#' @export
 #' @note summary(KSTest) since 2.1.0
 setMethod("summary", signature(object = "KSTest"),
           function(object) {
@@ -117,7 +114,6 @@ setMethod("summary", signature(object = "KSTest"),
 
 #' @rdname spark.kstest
 #' @param x summary object of KSTest returned by \code{summary}.
-#' @export
 #' @note print.summary.KSTest since 2.1.0
 print.summary.KSTest <- function(x, ...) {
   jobj <- x$jobj


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org


[3/3] spark git commit: [SPARK-22430][R][DOCS] Unknown tag warnings when building R docs with Roxygen 6.0.1

Posted by fe...@apache.org.
[SPARK-22430][R][DOCS] Unknown tag warnings when building R docs with Roxygen 6.0.1

## What changes were proposed in this pull request?
Removed export tag to get rid of unknown tag warnings

## How was this patch tested?
Existing tests

Author: Rekha Joshi <re...@gmail.com>
Author: rjoshi2 <re...@gmail.com>

Closes #20501 from rekhajoshm/SPARK-22430.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/4586eada
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/4586eada
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/4586eada

Branch: refs/heads/master
Commit: 4586eada42d6a16bb78d1650d145531c51fa747f
Parents: 947b4e6
Author: Rekha Joshi <re...@gmail.com>
Authored: Mon Mar 5 09:30:49 2018 -0800
Committer: Felix Cheung <fe...@apache.org>
Committed: Mon Mar 5 09:30:49 2018 -0800

----------------------------------------------------------------------
 R/pkg/R/DataFrame.R            |  92 ----------
 R/pkg/R/SQLContext.R           |  16 --
 R/pkg/R/WindowSpec.R           |   8 -
 R/pkg/R/broadcast.R            |   3 -
 R/pkg/R/catalog.R              |  18 --
 R/pkg/R/column.R               |   7 -
 R/pkg/R/context.R              |   6 -
 R/pkg/R/functions.R            | 181 -------------------
 R/pkg/R/generics.R             | 343 ------------------------------------
 R/pkg/R/group.R                |   7 -
 R/pkg/R/install.R              |   1 -
 R/pkg/R/jvm.R                  |   3 -
 R/pkg/R/mllib_classification.R |  20 ---
 R/pkg/R/mllib_clustering.R     |  23 ---
 R/pkg/R/mllib_fpm.R            |   6 -
 R/pkg/R/mllib_recommendation.R |   5 -
 R/pkg/R/mllib_regression.R     |  17 --
 R/pkg/R/mllib_stat.R           |   4 -
 R/pkg/R/mllib_tree.R           |  33 ----
 R/pkg/R/mllib_utils.R          |   3 -
 R/pkg/R/schema.R               |   7 -
 R/pkg/R/sparkR.R               |   7 -
 R/pkg/R/stats.R                |   6 -
 R/pkg/R/streaming.R            |   9 -
 R/pkg/R/utils.R                |   1 -
 R/pkg/R/window.R               |   4 -
 26 files changed, 830 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/4586eada/R/pkg/R/DataFrame.R
----------------------------------------------------------------------
diff --git a/R/pkg/R/DataFrame.R b/R/pkg/R/DataFrame.R
index 41c3c3a..c485202 100644
--- a/R/pkg/R/DataFrame.R
+++ b/R/pkg/R/DataFrame.R
@@ -36,7 +36,6 @@ setOldClass("structType")
 #' @slot sdf A Java object reference to the backing Scala DataFrame
 #' @seealso \link{createDataFrame}, \link{read.json}, \link{table}
 #' @seealso \url{https://spark.apache.org/docs/latest/sparkr.html#sparkr-dataframes}
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -77,7 +76,6 @@ setWriteMode <- function(write, mode) {
   write
 }
 
-#' @export
 #' @param sdf A Java object reference to the backing Scala DataFrame
 #' @param isCached TRUE if the SparkDataFrame is cached
 #' @noRd
@@ -97,7 +95,6 @@ dataFrame <- function(sdf, isCached = FALSE) {
 #' @rdname printSchema
 #' @name printSchema
 #' @aliases printSchema,SparkDataFrame-method
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -123,7 +120,6 @@ setMethod("printSchema",
 #' @rdname schema
 #' @name schema
 #' @aliases schema,SparkDataFrame-method
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -146,7 +142,6 @@ setMethod("schema",
 #' @aliases explain,SparkDataFrame-method
 #' @rdname explain
 #' @name explain
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -178,7 +173,6 @@ setMethod("explain",
 #' @rdname isLocal
 #' @name isLocal
 #' @aliases isLocal,SparkDataFrame-method
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -209,7 +203,6 @@ setMethod("isLocal",
 #' @aliases showDF,SparkDataFrame-method
 #' @rdname showDF
 #' @name showDF
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -241,7 +234,6 @@ setMethod("showDF",
 #' @rdname show
 #' @aliases show,SparkDataFrame-method
 #' @name show
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -269,7 +261,6 @@ setMethod("show", "SparkDataFrame",
 #' @rdname dtypes
 #' @name dtypes
 #' @aliases dtypes,SparkDataFrame-method
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -296,7 +287,6 @@ setMethod("dtypes",
 #' @rdname columns
 #' @name columns
 #' @aliases columns,SparkDataFrame-method
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -388,7 +378,6 @@ setMethod("colnames<-",
 #' @aliases coltypes,SparkDataFrame-method
 #' @name coltypes
 #' @family SparkDataFrame functions
-#' @export
 #' @examples
 #'\dontrun{
 #' irisDF <- createDataFrame(iris)
@@ -445,7 +434,6 @@ setMethod("coltypes",
 #' @rdname coltypes
 #' @name coltypes<-
 #' @aliases coltypes<-,SparkDataFrame,character-method
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -494,7 +482,6 @@ setMethod("coltypes<-",
 #' @rdname createOrReplaceTempView
 #' @name createOrReplaceTempView
 #' @aliases createOrReplaceTempView,SparkDataFrame,character-method
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -521,7 +508,6 @@ setMethod("createOrReplaceTempView",
 #' @rdname registerTempTable-deprecated
 #' @name registerTempTable
 #' @aliases registerTempTable,SparkDataFrame,character-method
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -552,7 +538,6 @@ setMethod("registerTempTable",
 #' @rdname insertInto
 #' @name insertInto
 #' @aliases insertInto,SparkDataFrame,character-method
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -580,7 +565,6 @@ setMethod("insertInto",
 #' @aliases cache,SparkDataFrame-method
 #' @rdname cache
 #' @name cache
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -611,7 +595,6 @@ setMethod("cache",
 #' @rdname persist
 #' @name persist
 #' @aliases persist,SparkDataFrame,character-method
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -641,7 +624,6 @@ setMethod("persist",
 #' @rdname unpersist
 #' @aliases unpersist,SparkDataFrame-method
 #' @name unpersist
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -669,7 +651,6 @@ setMethod("unpersist",
 #' @rdname storageLevel
 #' @aliases storageLevel,SparkDataFrame-method
 #' @name storageLevel
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -707,7 +688,6 @@ setMethod("storageLevel",
 #' @name coalesce
 #' @aliases coalesce,SparkDataFrame-method
 #' @seealso \link{repartition}
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -744,7 +724,6 @@ setMethod("coalesce",
 #' @name repartition
 #' @aliases repartition,SparkDataFrame-method
 #' @seealso \link{coalesce}
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -793,7 +772,6 @@ setMethod("repartition",
 #' @rdname toJSON
 #' @name toJSON
 #' @aliases toJSON,SparkDataFrame-method
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -826,7 +804,6 @@ setMethod("toJSON",
 #' @rdname write.json
 #' @name write.json
 #' @aliases write.json,SparkDataFrame,character-method
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -858,7 +835,6 @@ setMethod("write.json",
 #' @aliases write.orc,SparkDataFrame,character-method
 #' @rdname write.orc
 #' @name write.orc
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -890,7 +866,6 @@ setMethod("write.orc",
 #' @rdname write.parquet
 #' @name write.parquet
 #' @aliases write.parquet,SparkDataFrame,character-method
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -911,7 +886,6 @@ setMethod("write.parquet",
 #' @rdname write.parquet
 #' @name saveAsParquetFile
 #' @aliases saveAsParquetFile,SparkDataFrame,character-method
-#' @export
 #' @note saveAsParquetFile since 1.4.0
 setMethod("saveAsParquetFile",
           signature(x = "SparkDataFrame", path = "character"),
@@ -936,7 +910,6 @@ setMethod("saveAsParquetFile",
 #' @aliases write.text,SparkDataFrame,character-method
 #' @rdname write.text
 #' @name write.text
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -963,7 +936,6 @@ setMethod("write.text",
 #' @aliases distinct,SparkDataFrame-method
 #' @rdname distinct
 #' @name distinct
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -1004,7 +976,6 @@ setMethod("unique",
 #' @aliases sample,SparkDataFrame-method
 #' @rdname sample
 #' @name sample
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -1061,7 +1032,6 @@ setMethod("sample_frac",
 #' @rdname nrow
 #' @name nrow
 #' @aliases count,SparkDataFrame-method
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -1094,7 +1064,6 @@ setMethod("nrow",
 #' @rdname ncol
 #' @name ncol
 #' @aliases ncol,SparkDataFrame-method
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -1118,7 +1087,6 @@ setMethod("ncol",
 #' @rdname dim
 #' @aliases dim,SparkDataFrame-method
 #' @name dim
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -1144,7 +1112,6 @@ setMethod("dim",
 #' @rdname collect
 #' @aliases collect,SparkDataFrame-method
 #' @name collect
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -1229,7 +1196,6 @@ setMethod("collect",
 #' @rdname limit
 #' @name limit
 #' @aliases limit,SparkDataFrame,numeric-method
-#' @export
 #' @examples
 #' \dontrun{
 #' sparkR.session()
@@ -1253,7 +1219,6 @@ setMethod("limit",
 #' @rdname take
 #' @name take
 #' @aliases take,SparkDataFrame,numeric-method
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -1282,7 +1247,6 @@ setMethod("take",
 #' @aliases head,SparkDataFrame-method
 #' @rdname head
 #' @name head
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -1307,7 +1271,6 @@ setMethod("head",
 #' @aliases first,SparkDataFrame-method
 #' @rdname first
 #' @name first
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -1359,7 +1322,6 @@ setMethod("toRDD",
 #' @aliases groupBy,SparkDataFrame-method
 #' @rdname groupBy
 #' @name groupBy
-#' @export
 #' @examples
 #' \dontrun{
 #'   # Compute the average for all numeric columns grouped by department.
@@ -1401,7 +1363,6 @@ setMethod("group_by",
 #' @aliases agg,SparkDataFrame-method
 #' @rdname summarize
 #' @name agg
-#' @export
 #' @note agg since 1.4.0
 setMethod("agg",
           signature(x = "SparkDataFrame"),
@@ -1460,7 +1421,6 @@ setClassUnion("characterOrstructType", c("character", "structType"))
 #' @aliases dapply,SparkDataFrame,function,characterOrstructType-method
 #' @name dapply
 #' @seealso \link{dapplyCollect}
-#' @export
 #' @examples
 #' \dontrun{
 #'   df <- createDataFrame(iris)
@@ -1519,7 +1479,6 @@ setMethod("dapply",
 #' @aliases dapplyCollect,SparkDataFrame,function-method
 #' @name dapplyCollect
 #' @seealso \link{dapply}
-#' @export
 #' @examples
 #' \dontrun{
 #'   df <- createDataFrame(iris)
@@ -1576,7 +1535,6 @@ setMethod("dapplyCollect",
 #' @rdname gapply
 #' @name gapply
 #' @seealso \link{gapplyCollect}
-#' @export
 #' @examples
 #'
 #' \dontrun{
@@ -1673,7 +1631,6 @@ setMethod("gapply",
 #' @rdname gapplyCollect
 #' @name gapplyCollect
 #' @seealso \link{gapply}
-#' @export
 #' @examples
 #'
 #' \dontrun{
@@ -1947,7 +1904,6 @@ setMethod("[", signature(x = "SparkDataFrame"),
 #' @param ... currently not used.
 #' @return A new SparkDataFrame containing only the rows that meet the condition with selected
 #'         columns.
-#' @export
 #' @family SparkDataFrame functions
 #' @aliases subset,SparkDataFrame-method
 #' @seealso \link{withColumn}
@@ -1992,7 +1948,6 @@ setMethod("subset", signature(x = "SparkDataFrame"),
 #'            If more than one column is assigned in \code{col}, \code{...}
 #'            should be left empty.
 #' @return A new SparkDataFrame with selected columns.
-#' @export
 #' @family SparkDataFrame functions
 #' @rdname select
 #' @aliases select,SparkDataFrame,character-method
@@ -2024,7 +1979,6 @@ setMethod("select", signature(x = "SparkDataFrame", col = "character"),
           })
 
 #' @rdname select
-#' @export
 #' @aliases select,SparkDataFrame,Column-method
 #' @note select(SparkDataFrame, Column) since 1.4.0
 setMethod("select", signature(x = "SparkDataFrame", col = "Column"),
@@ -2037,7 +1991,6 @@ setMethod("select", signature(x = "SparkDataFrame", col = "Column"),
           })
 
 #' @rdname select
-#' @export
 #' @aliases select,SparkDataFrame,list-method
 #' @note select(SparkDataFrame, list) since 1.4.0
 setMethod("select",
@@ -2066,7 +2019,6 @@ setMethod("select",
 #' @aliases selectExpr,SparkDataFrame,character-method
 #' @rdname selectExpr
 #' @name selectExpr
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -2098,7 +2050,6 @@ setMethod("selectExpr",
 #' @rdname withColumn
 #' @name withColumn
 #' @seealso \link{rename} \link{mutate} \link{subset}
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -2137,7 +2088,6 @@ setMethod("withColumn",
 #' @rdname mutate
 #' @name mutate
 #' @seealso \link{rename} \link{withColumn}
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -2208,7 +2158,6 @@ setMethod("mutate",
           })
 
 #' @param _data a SparkDataFrame.
-#' @export
 #' @rdname mutate
 #' @aliases transform,SparkDataFrame-method
 #' @name transform
@@ -2232,7 +2181,6 @@ setMethod("transform",
 #' @name withColumnRenamed
 #' @aliases withColumnRenamed,SparkDataFrame,character,character-method
 #' @seealso \link{mutate}
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -2258,7 +2206,6 @@ setMethod("withColumnRenamed",
 #' @rdname rename
 #' @name rename
 #' @aliases rename,SparkDataFrame-method
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -2304,7 +2251,6 @@ setClassUnion("characterOrColumn", c("character", "Column"))
 #' @aliases arrange,SparkDataFrame,Column-method
 #' @rdname arrange
 #' @name arrange
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -2335,7 +2281,6 @@ setMethod("arrange",
 #' @rdname arrange
 #' @name arrange
 #' @aliases arrange,SparkDataFrame,character-method
-#' @export
 #' @note arrange(SparkDataFrame, character) since 1.4.0
 setMethod("arrange",
           signature(x = "SparkDataFrame", col = "character"),
@@ -2368,7 +2313,6 @@ setMethod("arrange",
 
 #' @rdname arrange
 #' @aliases orderBy,SparkDataFrame,characterOrColumn-method
-#' @export
 #' @note orderBy(SparkDataFrame, characterOrColumn) since 1.4.0
 setMethod("orderBy",
           signature(x = "SparkDataFrame", col = "characterOrColumn"),
@@ -2389,7 +2333,6 @@ setMethod("orderBy",
 #' @rdname filter
 #' @name filter
 #' @family subsetting functions
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -2432,7 +2375,6 @@ setMethod("where",
 #' @aliases dropDuplicates,SparkDataFrame-method
 #' @rdname dropDuplicates
 #' @name dropDuplicates
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -2481,7 +2423,6 @@ setMethod("dropDuplicates",
 #' @rdname join
 #' @name join
 #' @seealso \link{merge} \link{crossJoin}
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -2533,7 +2474,6 @@ setMethod("join",
 #' @rdname crossJoin
 #' @name crossJoin
 #' @seealso \link{merge} \link{join}
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -2581,7 +2521,6 @@ setMethod("crossJoin",
 #' @aliases merge,SparkDataFrame,SparkDataFrame-method
 #' @rdname merge
 #' @seealso \link{join} \link{crossJoin}
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -2721,7 +2660,6 @@ genAliasesForIntersectedCols <- function(x, intersectedColNames, suffix) {
 #' @name union
 #' @aliases union,SparkDataFrame,SparkDataFrame-method
 #' @seealso \link{rbind} \link{unionByName}
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -2742,7 +2680,6 @@ setMethod("union",
 #' @rdname union
 #' @name unionAll
 #' @aliases unionAll,SparkDataFrame,SparkDataFrame-method
-#' @export
 #' @note unionAll since 1.4.0
 setMethod("unionAll",
           signature(x = "SparkDataFrame", y = "SparkDataFrame"),
@@ -2769,7 +2706,6 @@ setMethod("unionAll",
 #' @name unionByName
 #' @aliases unionByName,SparkDataFrame,SparkDataFrame-method
 #' @seealso \link{rbind} \link{union}
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -2802,7 +2738,6 @@ setMethod("unionByName",
 #' @rdname rbind
 #' @name rbind
 #' @seealso \link{union} \link{unionByName}
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -2835,7 +2770,6 @@ setMethod("rbind",
 #' @aliases intersect,SparkDataFrame,SparkDataFrame-method
 #' @rdname intersect
 #' @name intersect
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -2863,7 +2797,6 @@ setMethod("intersect",
 #' @aliases except,SparkDataFrame,SparkDataFrame-method
 #' @rdname except
 #' @name except
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -2872,7 +2805,6 @@ setMethod("intersect",
 #' exceptDF <- except(df, df2)
 #' }
 #' @rdname except
-#' @export
 #' @note except since 1.4.0
 setMethod("except",
           signature(x = "SparkDataFrame", y = "SparkDataFrame"),
@@ -2909,7 +2841,6 @@ setMethod("except",
 #' @aliases write.df,SparkDataFrame-method
 #' @rdname write.df
 #' @name write.df
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -2944,7 +2875,6 @@ setMethod("write.df",
 #' @rdname write.df
 #' @name saveDF
 #' @aliases saveDF,SparkDataFrame,character-method
-#' @export
 #' @note saveDF since 1.4.0
 setMethod("saveDF",
           signature(df = "SparkDataFrame", path = "character"),
@@ -2978,7 +2908,6 @@ setMethod("saveDF",
 #' @aliases saveAsTable,SparkDataFrame,character-method
 #' @rdname saveAsTable
 #' @name saveAsTable
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -3015,7 +2944,6 @@ setMethod("saveAsTable",
 #' @aliases describe,SparkDataFrame,character-method describe,SparkDataFrame,ANY-method
 #' @rdname describe
 #' @name describe
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -3071,7 +2999,6 @@ setMethod("describe",
 #' @rdname summary
 #' @name summary
 #' @aliases summary,SparkDataFrame-method
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -3117,7 +3044,6 @@ setMethod("summary",
 #' @rdname nafunctions
 #' @aliases dropna,SparkDataFrame-method
 #' @name dropna
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -3148,7 +3074,6 @@ setMethod("dropna",
 #' @rdname nafunctions
 #' @name na.omit
 #' @aliases na.omit,SparkDataFrame-method
-#' @export
 #' @note na.omit since 1.5.0
 setMethod("na.omit",
           signature(object = "SparkDataFrame"),
@@ -3168,7 +3093,6 @@ setMethod("na.omit",
 #' @rdname nafunctions
 #' @name fillna
 #' @aliases fillna,SparkDataFrame-method
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -3399,7 +3323,6 @@ setMethod("str",
 #' @rdname drop
 #' @name drop
 #' @aliases drop,SparkDataFrame-method
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -3427,7 +3350,6 @@ setMethod("drop",
 #' @name drop
 #' @rdname drop
 #' @aliases drop,ANY-method
-#' @export
 setMethod("drop",
           signature(x = "ANY"),
           function(x) {
@@ -3446,7 +3368,6 @@ setMethod("drop",
 #' @rdname histogram
 #' @aliases histogram,SparkDataFrame,characterOrColumn-method
 #' @family SparkDataFrame functions
-#' @export
 #' @examples
 #' \dontrun{
 #'
@@ -3582,7 +3503,6 @@ setMethod("histogram",
 #' @rdname write.jdbc
 #' @name write.jdbc
 #' @aliases write.jdbc,SparkDataFrame,character,character-method
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -3611,7 +3531,6 @@ setMethod("write.jdbc",
 #' @aliases randomSplit,SparkDataFrame,numeric-method
 #' @rdname randomSplit
 #' @name randomSplit
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -3645,7 +3564,6 @@ setMethod("randomSplit",
 #' @aliases getNumPartitions,SparkDataFrame-method
 #' @rdname getNumPartitions
 #' @name getNumPartitions
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -3672,7 +3590,6 @@ setMethod("getNumPartitions",
 #' @rdname isStreaming
 #' @name isStreaming
 #' @seealso \link{read.stream} \link{write.stream}
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -3726,7 +3643,6 @@ setMethod("isStreaming",
 #' @aliases write.stream,SparkDataFrame-method
 #' @rdname write.stream
 #' @name write.stream
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -3819,7 +3735,6 @@ setMethod("write.stream",
 #' @rdname checkpoint
 #' @name checkpoint
 #' @seealso \link{setCheckpointDir}
-#' @export
 #' @examples
 #'\dontrun{
 #' setCheckpointDir("/checkpoint")
@@ -3847,7 +3762,6 @@ setMethod("checkpoint",
 #' @aliases localCheckpoint,SparkDataFrame-method
 #' @rdname localCheckpoint
 #' @name localCheckpoint
-#' @export
 #' @examples
 #'\dontrun{
 #' df <- localCheckpoint(df)
@@ -3874,7 +3788,6 @@ setMethod("localCheckpoint",
 #' @aliases cube,SparkDataFrame-method
 #' @rdname cube
 #' @name cube
-#' @export
 #' @examples
 #' \dontrun{
 #' df <- createDataFrame(mtcars)
@@ -3909,7 +3822,6 @@ setMethod("cube",
 #' @aliases rollup,SparkDataFrame-method
 #' @rdname rollup
 #' @name rollup
-#' @export
 #' @examples
 #'\dontrun{
 #' df <- createDataFrame(mtcars)
@@ -3942,7 +3854,6 @@ setMethod("rollup",
 #' @aliases hint,SparkDataFrame,character-method
 #' @rdname hint
 #' @name hint
-#' @export
 #' @examples
 #' \dontrun{
 #' df <- createDataFrame(mtcars)
@@ -3966,7 +3877,6 @@ setMethod("hint",
 #' @family SparkDataFrame functions
 #' @rdname alias
 #' @name alias
-#' @export
 #' @examples
 #' \dontrun{
 #' df <- alias(createDataFrame(mtcars), "mtcars")
@@ -3997,7 +3907,6 @@ setMethod("alias",
 #' @family SparkDataFrame functions
 #' @rdname broadcast
 #' @name broadcast
-#' @export
 #' @examples
 #' \dontrun{
 #' df <- createDataFrame(mtcars)
@@ -4041,7 +3950,6 @@ setMethod("broadcast",
 #' @family SparkDataFrame functions
 #' @rdname withWatermark
 #' @name withWatermark
-#' @export
 #' @examples
 #' \dontrun{
 #' sparkR.session()

http://git-wip-us.apache.org/repos/asf/spark/blob/4586eada/R/pkg/R/SQLContext.R
----------------------------------------------------------------------
diff --git a/R/pkg/R/SQLContext.R b/R/pkg/R/SQLContext.R
index 9d0a2d5..ebec0ce 100644
--- a/R/pkg/R/SQLContext.R
+++ b/R/pkg/R/SQLContext.R
@@ -123,7 +123,6 @@ infer_type <- function(x) {
 #' @return a list of config values with keys as their names
 #' @rdname sparkR.conf
 #' @name sparkR.conf
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -163,7 +162,6 @@ sparkR.conf <- function(key, defaultValue) {
 #' @return a character string of the Spark version
 #' @rdname sparkR.version
 #' @name sparkR.version
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -191,7 +189,6 @@ getDefaultSqlSource <- function() {
 #'        limited by length of the list or number of rows of the data.frame
 #' @return A SparkDataFrame.
 #' @rdname createDataFrame
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -294,7 +291,6 @@ createDataFrame <- function(x, ...) {
 
 #' @rdname createDataFrame
 #' @aliases createDataFrame
-#' @export
 #' @method as.DataFrame default
 #' @note as.DataFrame since 1.6.0
 as.DataFrame.default <- function(data, schema = NULL, samplingRatio = 1.0, numPartitions = NULL) {
@@ -304,7 +300,6 @@ as.DataFrame.default <- function(data, schema = NULL, samplingRatio = 1.0, numPa
 #' @param ... additional argument(s).
 #' @rdname createDataFrame
 #' @aliases as.DataFrame
-#' @export
 as.DataFrame <- function(data, ...) {
   dispatchFunc("as.DataFrame(data, schema = NULL)", data, ...)
 }
@@ -342,7 +337,6 @@ setMethod("toDF", signature(x = "RDD"),
 #' @param ... additional external data source specific named properties.
 #' @return SparkDataFrame
 #' @rdname read.json
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -371,7 +365,6 @@ read.json <- function(x, ...) {
 
 #' @rdname read.json
 #' @name jsonFile
-#' @export
 #' @method jsonFile default
 #' @note jsonFile since 1.4.0
 jsonFile.default <- function(path) {
@@ -423,7 +416,6 @@ jsonRDD <- function(sqlContext, rdd, schema = NULL, samplingRatio = 1.0) {
 #' @param ... additional external data source specific named properties.
 #' @return SparkDataFrame
 #' @rdname read.orc
-#' @export
 #' @name read.orc
 #' @note read.orc since 2.0.0
 read.orc <- function(path, ...) {
@@ -444,7 +436,6 @@ read.orc <- function(path, ...) {
 #' @param path path of file to read. A vector of multiple paths is allowed.
 #' @return SparkDataFrame
 #' @rdname read.parquet
-#' @export
 #' @name read.parquet
 #' @method read.parquet default
 #' @note read.parquet since 1.6.0
@@ -466,7 +457,6 @@ read.parquet <- function(x, ...) {
 #' @param ... argument(s) passed to the method.
 #' @rdname read.parquet
 #' @name parquetFile
-#' @export
 #' @method parquetFile default
 #' @note parquetFile since 1.4.0
 parquetFile.default <- function(...) {
@@ -490,7 +480,6 @@ parquetFile <- function(x, ...) {
 #' @param ... additional external data source specific named properties.
 #' @return SparkDataFrame
 #' @rdname read.text
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -522,7 +511,6 @@ read.text <- function(x, ...) {
 #' @param sqlQuery A character vector containing the SQL query
 #' @return SparkDataFrame
 #' @rdname sql
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -556,7 +544,6 @@ sql <- function(x, ...) {
 #' @return SparkDataFrame
 #' @rdname tableToDF
 #' @name tableToDF
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -591,7 +578,6 @@ tableToDF <- function(tableName) {
 #' @rdname read.df
 #' @name read.df
 #' @seealso \link{read.json}
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -681,7 +667,6 @@ loadDF <- function(x = NULL, ...) {
 #' @return SparkDataFrame
 #' @rdname read.jdbc
 #' @name read.jdbc
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -734,7 +719,6 @@ read.jdbc <- function(url, tableName,
 #' @rdname read.stream
 #' @name read.stream
 #' @seealso \link{write.stream}
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()

http://git-wip-us.apache.org/repos/asf/spark/blob/4586eada/R/pkg/R/WindowSpec.R
----------------------------------------------------------------------
diff --git a/R/pkg/R/WindowSpec.R b/R/pkg/R/WindowSpec.R
index debc7cb..ee7f4ad 100644
--- a/R/pkg/R/WindowSpec.R
+++ b/R/pkg/R/WindowSpec.R
@@ -28,7 +28,6 @@ NULL
 #' @seealso \link{windowPartitionBy}, \link{windowOrderBy}
 #'
 #' @param sws A Java object reference to the backing Scala WindowSpec
-#' @export
 #' @note WindowSpec since 2.0.0
 setClass("WindowSpec",
          slots = list(sws = "jobj"))
@@ -44,7 +43,6 @@ windowSpec <- function(sws) {
 }
 
 #' @rdname show
-#' @export
 #' @note show(WindowSpec) since 2.0.0
 setMethod("show", "WindowSpec",
           function(object) {
@@ -63,7 +61,6 @@ setMethod("show", "WindowSpec",
 #' @name partitionBy
 #' @aliases partitionBy,WindowSpec-method
 #' @family windowspec_method
-#' @export
 #' @examples
 #' \dontrun{
 #'   partitionBy(ws, "col1", "col2")
@@ -97,7 +94,6 @@ setMethod("partitionBy",
 #' @aliases orderBy,WindowSpec,character-method
 #' @family windowspec_method
 #' @seealso See \link{arrange} for use in sorting a SparkDataFrame
-#' @export
 #' @examples
 #' \dontrun{
 #'   orderBy(ws, "col1", "col2")
@@ -113,7 +109,6 @@ setMethod("orderBy",
 #' @rdname orderBy
 #' @name orderBy
 #' @aliases orderBy,WindowSpec,Column-method
-#' @export
 #' @note orderBy(WindowSpec, Column) since 2.0.0
 setMethod("orderBy",
           signature(x = "WindowSpec", col = "Column"),
@@ -142,7 +137,6 @@ setMethod("orderBy",
 #' @aliases rowsBetween,WindowSpec,numeric,numeric-method
 #' @name rowsBetween
 #' @family windowspec_method
-#' @export
 #' @examples
 #' \dontrun{
 #'   rowsBetween(ws, 0, 3)
@@ -174,7 +168,6 @@ setMethod("rowsBetween",
 #' @aliases rangeBetween,WindowSpec,numeric,numeric-method
 #' @name rangeBetween
 #' @family windowspec_method
-#' @export
 #' @examples
 #' \dontrun{
 #'   rangeBetween(ws, 0, 3)
@@ -202,7 +195,6 @@ setMethod("rangeBetween",
 #' @name over
 #' @aliases over,Column,WindowSpec-method
 #' @family colum_func
-#' @export
 #' @examples
 #' \dontrun{
 #'   df <- createDataFrame(mtcars)

http://git-wip-us.apache.org/repos/asf/spark/blob/4586eada/R/pkg/R/broadcast.R
----------------------------------------------------------------------
diff --git a/R/pkg/R/broadcast.R b/R/pkg/R/broadcast.R
index 398dffc..282f8a6 100644
--- a/R/pkg/R/broadcast.R
+++ b/R/pkg/R/broadcast.R
@@ -32,14 +32,12 @@
 # @seealso broadcast
 #
 # @param id Id of the backing Spark broadcast variable
-# @export
 setClass("Broadcast", slots = list(id = "character"))
 
 # @rdname broadcast-class
 # @param value Value of the broadcast variable
 # @param jBroadcastRef reference to the backing Java broadcast object
 # @param objName name of broadcasted object
-# @export
 Broadcast <- function(id, value, jBroadcastRef, objName) {
   .broadcastValues[[id]] <- value
   .broadcastNames[[as.character(objName)]] <- jBroadcastRef
@@ -73,7 +71,6 @@ setMethod("value",
 
 # @param bcastId The id of broadcast variable to set
 # @param value The value to be set
-# @export
 setBroadcastValue <- function(bcastId, value) {
   bcastIdStr <- as.character(bcastId)
   .broadcastValues[[bcastIdStr]] <- value

http://git-wip-us.apache.org/repos/asf/spark/blob/4586eada/R/pkg/R/catalog.R
----------------------------------------------------------------------
diff --git a/R/pkg/R/catalog.R b/R/pkg/R/catalog.R
index e59a702..baf4d86 100644
--- a/R/pkg/R/catalog.R
+++ b/R/pkg/R/catalog.R
@@ -34,7 +34,6 @@
 #' @return A SparkDataFrame.
 #' @rdname createExternalTable-deprecated
 #' @seealso \link{createTable}
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -71,7 +70,6 @@ createExternalTable <- function(x, ...) {
 #' @return A SparkDataFrame.
 #' @rdname createTable
 #' @seealso \link{createExternalTable}
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -110,7 +108,6 @@ createTable <- function(tableName, path = NULL, source = NULL, schema = NULL, ..
 #'                  identifier is provided, it refers to a table in the current database.
 #' @return SparkDataFrame
 #' @rdname cacheTable
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -140,7 +137,6 @@ cacheTable <- function(x, ...) {
 #'                  identifier is provided, it refers to a table in the current database.
 #' @return SparkDataFrame
 #' @rdname uncacheTable
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -167,7 +163,6 @@ uncacheTable <- function(x, ...) {
 #' Removes all cached tables from the in-memory cache.
 #'
 #' @rdname clearCache
-#' @export
 #' @examples
 #' \dontrun{
 #' clearCache()
@@ -193,7 +188,6 @@ clearCache <- function() {
 #' @param tableName The name of the SparkSQL table to be dropped.
 #' @seealso \link{dropTempView}
 #' @rdname dropTempTable-deprecated
-#' @export
 #' @examples
 #' \dontrun{
 #' sparkR.session()
@@ -225,7 +219,6 @@ dropTempTable <- function(x, ...) {
 #' @return TRUE if the view is dropped successfully, FALSE otherwise.
 #' @rdname dropTempView
 #' @name dropTempView
-#' @export
 #' @examples
 #' \dontrun{
 #' sparkR.session()
@@ -251,7 +244,6 @@ dropTempView <- function(viewName) {
 #' @return a SparkDataFrame
 #' @rdname tables
 #' @seealso \link{listTables}
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -276,7 +268,6 @@ tables <- function(x, ...) {
 #' @param databaseName (optional) name of the database
 #' @return a list of table names
 #' @rdname tableNames
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -304,7 +295,6 @@ tableNames <- function(x, ...) {
 #' @return name of the current default database.
 #' @rdname currentDatabase
 #' @name currentDatabase
-#' @export
 #' @examples
 #' \dontrun{
 #' sparkR.session()
@@ -324,7 +314,6 @@ currentDatabase <- function() {
 #' @param databaseName name of the database
 #' @rdname setCurrentDatabase
 #' @name setCurrentDatabase
-#' @export
 #' @examples
 #' \dontrun{
 #' sparkR.session()
@@ -347,7 +336,6 @@ setCurrentDatabase <- function(databaseName) {
 #' @return a SparkDataFrame of the list of databases.
 #' @rdname listDatabases
 #' @name listDatabases
-#' @export
 #' @examples
 #' \dontrun{
 #' sparkR.session()
@@ -370,7 +358,6 @@ listDatabases <- function() {
 #' @rdname listTables
 #' @name listTables
 #' @seealso \link{tables}
-#' @export
 #' @examples
 #' \dontrun{
 #' sparkR.session()
@@ -403,7 +390,6 @@ listTables <- function(databaseName = NULL) {
 #' @return a SparkDataFrame of the list of column descriptions.
 #' @rdname listColumns
 #' @name listColumns
-#' @export
 #' @examples
 #' \dontrun{
 #' sparkR.session()
@@ -433,7 +419,6 @@ listColumns <- function(tableName, databaseName = NULL) {
 #' @return a SparkDataFrame of the list of function descriptions.
 #' @rdname listFunctions
 #' @name listFunctions
-#' @export
 #' @examples
 #' \dontrun{
 #' sparkR.session()
@@ -463,7 +448,6 @@ listFunctions <- function(databaseName = NULL) {
 #'                  identifier is provided, it refers to a table in the current database.
 #' @rdname recoverPartitions
 #' @name recoverPartitions
-#' @export
 #' @examples
 #' \dontrun{
 #' sparkR.session()
@@ -490,7 +474,6 @@ recoverPartitions <- function(tableName) {
 #'                  identifier is provided, it refers to a table in the current database.
 #' @rdname refreshTable
 #' @name refreshTable
-#' @export
 #' @examples
 #' \dontrun{
 #' sparkR.session()
@@ -512,7 +495,6 @@ refreshTable <- function(tableName) {
 #' @param path the path of the data source.
 #' @rdname refreshByPath
 #' @name refreshByPath
-#' @export
 #' @examples
 #' \dontrun{
 #' sparkR.session()

http://git-wip-us.apache.org/repos/asf/spark/blob/4586eada/R/pkg/R/column.R
----------------------------------------------------------------------
diff --git a/R/pkg/R/column.R b/R/pkg/R/column.R
index 3095adb..9727efc 100644
--- a/R/pkg/R/column.R
+++ b/R/pkg/R/column.R
@@ -29,7 +29,6 @@ setOldClass("jobj")
 #' @rdname column
 #'
 #' @slot jc reference to JVM SparkDataFrame column
-#' @export
 #' @note Column since 1.4.0
 setClass("Column",
          slots = list(jc = "jobj"))
@@ -56,7 +55,6 @@ setMethod("column",
 #' @rdname show
 #' @name show
 #' @aliases show,Column-method
-#' @export
 #' @note show(Column) since 1.4.0
 setMethod("show", "Column",
           function(object) {
@@ -134,7 +132,6 @@ createMethods()
 #' @name alias
 #' @aliases alias,Column-method
 #' @family colum_func
-#' @export
 #' @examples
 #' \dontrun{
 #' df <- createDataFrame(iris)
@@ -270,7 +267,6 @@ setMethod("cast",
 #' @name %in%
 #' @aliases %in%,Column-method
 #' @return A matched values as a result of comparing with given values.
-#' @export
 #' @examples
 #' \dontrun{
 #' filter(df, "age in (10, 30)")
@@ -296,7 +292,6 @@ setMethod("%in%",
 #' @name otherwise
 #' @family colum_func
 #' @aliases otherwise,Column-method
-#' @export
 #' @note otherwise since 1.5.0
 setMethod("otherwise",
           signature(x = "Column", value = "ANY"),
@@ -318,7 +313,6 @@ setMethod("otherwise",
 #' @rdname eq_null_safe
 #' @name %<=>%
 #' @aliases %<=>%,Column-method
-#' @export
 #' @examples
 #' \dontrun{
 #' df1 <- createDataFrame(data.frame(
@@ -348,7 +342,6 @@ setMethod("%<=>%",
 #' @rdname not
 #' @name not
 #' @aliases !,Column-method
-#' @export
 #' @examples
 #' \dontrun{
 #' df <- createDataFrame(data.frame(x = c(-1, 0, 1)))

http://git-wip-us.apache.org/repos/asf/spark/blob/4586eada/R/pkg/R/context.R
----------------------------------------------------------------------
diff --git a/R/pkg/R/context.R b/R/pkg/R/context.R
index 443c2ff..8ec727d 100644
--- a/R/pkg/R/context.R
+++ b/R/pkg/R/context.R
@@ -308,7 +308,6 @@ setCheckpointDirSC <- function(sc, dirName) {
 #' @rdname spark.addFile
 #' @param path The path of the file to be added
 #' @param recursive Whether to add files recursively from the path. Default is FALSE.
-#' @export
 #' @examples
 #'\dontrun{
 #' spark.addFile("~/myfile")
@@ -323,7 +322,6 @@ spark.addFile <- function(path, recursive = FALSE) {
 #'
 #' @rdname spark.getSparkFilesRootDirectory
 #' @return the root directory that contains files added through spark.addFile
-#' @export
 #' @examples
 #'\dontrun{
 #' spark.getSparkFilesRootDirectory()
@@ -344,7 +342,6 @@ spark.getSparkFilesRootDirectory <- function() { # nolint
 #' @rdname spark.getSparkFiles
 #' @param fileName The name of the file added through spark.addFile
 #' @return the absolute path of a file added through spark.addFile.
-#' @export
 #' @examples
 #'\dontrun{
 #' spark.getSparkFiles("myfile")
@@ -391,7 +388,6 @@ spark.getSparkFiles <- function(fileName) {
 #' @param list the list of elements
 #' @param func a function that takes one argument.
 #' @return a list of results (the exact type being determined by the function)
-#' @export
 #' @examples
 #'\dontrun{
 #' sparkR.session()
@@ -412,7 +408,6 @@ spark.lapply <- function(list, func) {
 #'
 #' @rdname setLogLevel
 #' @param level New log level
-#' @export
 #' @examples
 #'\dontrun{
 #' setLogLevel("ERROR")
@@ -431,7 +426,6 @@ setLogLevel <- function(level) {
 #' @rdname setCheckpointDir
 #' @param directory Directory path to checkpoint to
 #' @seealso \link{checkpoint}
-#' @export
 #' @examples
 #'\dontrun{
 #' setCheckpointDir("/checkpoint")

http://git-wip-us.apache.org/repos/asf/spark/blob/4586eada/R/pkg/R/functions.R
----------------------------------------------------------------------
diff --git a/R/pkg/R/functions.R b/R/pkg/R/functions.R
index 29ee146..a527426 100644
--- a/R/pkg/R/functions.R
+++ b/R/pkg/R/functions.R
@@ -244,7 +244,6 @@ NULL
 #' If the parameter is a Column, it is returned unchanged.
 #'
 #' @rdname column_nonaggregate_functions
-#' @export
 #' @aliases lit lit,ANY-method
 #' @examples
 #'
@@ -267,7 +266,6 @@ setMethod("lit", signature("ANY"),
 #' \code{abs}: Computes the absolute value.
 #'
 #' @rdname column_math_functions
-#' @export
 #' @aliases abs abs,Column-method
 #' @note abs since 1.5.0
 setMethod("abs",
@@ -282,7 +280,6 @@ setMethod("abs",
 #' as if computed by \code{java.lang.Math.acos()}
 #'
 #' @rdname column_math_functions
-#' @export
 #' @aliases acos acos,Column-method
 #' @note acos since 1.5.0
 setMethod("acos",
@@ -296,7 +293,6 @@ setMethod("acos",
 #' \code{approxCountDistinct}: Returns the approximate number of distinct items in a group.
 #'
 #' @rdname column_aggregate_functions
-#' @export
 #' @aliases approxCountDistinct approxCountDistinct,Column-method
 #' @examples
 #'
@@ -319,7 +315,6 @@ setMethod("approxCountDistinct",
 #' and returns the result as an int column.
 #'
 #' @rdname column_string_functions
-#' @export
 #' @aliases ascii ascii,Column-method
 #' @examples
 #'
@@ -338,7 +333,6 @@ setMethod("ascii",
 #' as if computed by \code{java.lang.Math.asin()}
 #'
 #' @rdname column_math_functions
-#' @export
 #' @aliases asin asin,Column-method
 #' @note asin since 1.5.0
 setMethod("asin",
@@ -353,7 +347,6 @@ setMethod("asin",
 #' as if computed by \code{java.lang.Math.atan()}
 #'
 #' @rdname column_math_functions
-#' @export
 #' @aliases atan atan,Column-method
 #' @note atan since 1.5.0
 setMethod("atan",
@@ -370,7 +363,6 @@ setMethod("atan",
 #' @rdname avg
 #' @name avg
 #' @family aggregate functions
-#' @export
 #' @aliases avg,Column-method
 #' @examples \dontrun{avg(df$c)}
 #' @note avg since 1.4.0
@@ -386,7 +378,6 @@ setMethod("avg",
 #' a string column. This is the reverse of unbase64.
 #'
 #' @rdname column_string_functions
-#' @export
 #' @aliases base64 base64,Column-method
 #' @examples
 #'
@@ -410,7 +401,6 @@ setMethod("base64",
 #' of the given long column. For example, bin("12") returns "1100".
 #'
 #' @rdname column_math_functions
-#' @export
 #' @aliases bin bin,Column-method
 #' @note bin since 1.5.0
 setMethod("bin",
@@ -424,7 +414,6 @@ setMethod("bin",
 #' \code{bitwiseNOT}: Computes bitwise NOT.
 #'
 #' @rdname column_nonaggregate_functions
-#' @export
 #' @aliases bitwiseNOT bitwiseNOT,Column-method
 #' @examples
 #'
@@ -442,7 +431,6 @@ setMethod("bitwiseNOT",
 #' \code{cbrt}: Computes the cube-root of the given value.
 #'
 #' @rdname column_math_functions
-#' @export
 #' @aliases cbrt cbrt,Column-method
 #' @note cbrt since 1.4.0
 setMethod("cbrt",
@@ -456,7 +444,6 @@ setMethod("cbrt",
 #' \code{ceil}: Computes the ceiling of the given value.
 #'
 #' @rdname column_math_functions
-#' @export
 #' @aliases ceil ceil,Column-method
 #' @note ceil since 1.5.0
 setMethod("ceil",
@@ -471,7 +458,6 @@ setMethod("ceil",
 #'
 #' @rdname column_math_functions
 #' @aliases ceiling ceiling,Column-method
-#' @export
 #' @note ceiling since 1.5.0
 setMethod("ceiling",
           signature(x = "Column"),
@@ -483,7 +469,6 @@ setMethod("ceiling",
 #' \code{coalesce}: Returns the first column that is not NA, or NA if all inputs are.
 #'
 #' @rdname column_nonaggregate_functions
-#' @export
 #' @aliases coalesce,Column-method
 #' @note coalesce(Column) since 2.1.1
 setMethod("coalesce",
@@ -514,7 +499,6 @@ col <- function(x) {
 #' @rdname column
 #' @name column
 #' @family non-aggregate functions
-#' @export
 #' @aliases column,character-method
 #' @examples \dontrun{column("name")}
 #' @note column since 1.6.0
@@ -533,7 +517,6 @@ setMethod("column",
 #' @rdname corr
 #' @name corr
 #' @family aggregate functions
-#' @export
 #' @aliases corr,Column-method
 #' @examples
 #' \dontrun{
@@ -557,7 +540,6 @@ setMethod("corr", signature(x = "Column"),
 #' @rdname cov
 #' @name cov
 #' @family aggregate functions
-#' @export
 #' @aliases cov,characterOrColumn-method
 #' @examples
 #' \dontrun{
@@ -598,7 +580,6 @@ setMethod("covar_samp", signature(col1 = "characterOrColumn", col2 = "characterO
 #'
 #' @rdname cov
 #' @name covar_pop
-#' @export
 #' @aliases covar_pop,characterOrColumn,characterOrColumn-method
 #' @note covar_pop since 2.0.0
 setMethod("covar_pop", signature(col1 = "characterOrColumn", col2 = "characterOrColumn"),
@@ -618,7 +599,6 @@ setMethod("covar_pop", signature(col1 = "characterOrColumn", col2 = "characterOr
 #'
 #' @rdname column_math_functions
 #' @aliases cos cos,Column-method
-#' @export
 #' @note cos since 1.5.0
 setMethod("cos",
           signature(x = "Column"),
@@ -633,7 +613,6 @@ setMethod("cos",
 #'
 #' @rdname column_math_functions
 #' @aliases cosh cosh,Column-method
-#' @export
 #' @note cosh since 1.5.0
 setMethod("cosh",
           signature(x = "Column"),
@@ -651,7 +630,6 @@ setMethod("cosh",
 #' @name count
 #' @family aggregate functions
 #' @aliases count,Column-method
-#' @export
 #' @examples \dontrun{count(df$c)}
 #' @note count since 1.4.0
 setMethod("count",
@@ -667,7 +645,6 @@ setMethod("count",
 #'
 #' @rdname column_misc_functions
 #' @aliases crc32 crc32,Column-method
-#' @export
 #' @note crc32 since 1.5.0
 setMethod("crc32",
           signature(x = "Column"),
@@ -682,7 +659,6 @@ setMethod("crc32",
 #'
 #' @rdname column_misc_functions
 #' @aliases hash hash,Column-method
-#' @export
 #' @note hash since 2.0.0
 setMethod("hash",
           signature(x = "Column"),
@@ -701,7 +677,6 @@ setMethod("hash",
 #'
 #' @rdname column_datetime_functions
 #' @aliases dayofmonth dayofmonth,Column-method
-#' @export
 #' @examples
 #'
 #' \dontrun{
@@ -723,7 +698,6 @@ setMethod("dayofmonth",
 #'
 #' @rdname column_datetime_functions
 #' @aliases dayofweek dayofweek,Column-method
-#' @export
 #' @note dayofweek since 2.3.0
 setMethod("dayofweek",
           signature(x = "Column"),
@@ -738,7 +712,6 @@ setMethod("dayofweek",
 #'
 #' @rdname column_datetime_functions
 #' @aliases dayofyear dayofyear,Column-method
-#' @export
 #' @note dayofyear since 1.5.0
 setMethod("dayofyear",
           signature(x = "Column"),
@@ -756,7 +729,6 @@ setMethod("dayofyear",
 #'
 #' @rdname column_string_functions
 #' @aliases decode decode,Column,character-method
-#' @export
 #' @note decode since 1.6.0
 setMethod("decode",
           signature(x = "Column", charset = "character"),
@@ -771,7 +743,6 @@ setMethod("decode",
 #'
 #' @rdname column_string_functions
 #' @aliases encode encode,Column,character-method
-#' @export
 #' @note encode since 1.6.0
 setMethod("encode",
           signature(x = "Column", charset = "character"),
@@ -785,7 +756,6 @@ setMethod("encode",
 #'
 #' @rdname column_math_functions
 #' @aliases exp exp,Column-method
-#' @export
 #' @note exp since 1.5.0
 setMethod("exp",
           signature(x = "Column"),
@@ -799,7 +769,6 @@ setMethod("exp",
 #'
 #' @rdname column_math_functions
 #' @aliases expm1 expm1,Column-method
-#' @export
 #' @note expm1 since 1.5.0
 setMethod("expm1",
           signature(x = "Column"),
@@ -813,7 +782,6 @@ setMethod("expm1",
 #'
 #' @rdname column_math_functions
 #' @aliases factorial factorial,Column-method
-#' @export
 #' @note factorial since 1.5.0
 setMethod("factorial",
           signature(x = "Column"),
@@ -836,7 +804,6 @@ setMethod("factorial",
 #' @name first
 #' @aliases first,characterOrColumn-method
 #' @family aggregate functions
-#' @export
 #' @examples
 #' \dontrun{
 #' first(df$c)
@@ -860,7 +827,6 @@ setMethod("first",
 #'
 #' @rdname column_math_functions
 #' @aliases floor floor,Column-method
-#' @export
 #' @note floor since 1.5.0
 setMethod("floor",
           signature(x = "Column"),
@@ -874,7 +840,6 @@ setMethod("floor",
 #'
 #' @rdname column_math_functions
 #' @aliases hex hex,Column-method
-#' @export
 #' @note hex since 1.5.0
 setMethod("hex",
           signature(x = "Column"),
@@ -888,7 +853,6 @@ setMethod("hex",
 #'
 #' @rdname column_datetime_functions
 #' @aliases hour hour,Column-method
-#' @export
 #' @examples
 #'
 #' \dontrun{
@@ -911,7 +875,6 @@ setMethod("hour",
 #'
 #' @rdname column_string_functions
 #' @aliases initcap initcap,Column-method
-#' @export
 #' @examples
 #'
 #' \dontrun{
@@ -946,7 +909,6 @@ setMethod("isnan",
 #'
 #' @rdname column_nonaggregate_functions
 #' @aliases is.nan is.nan,Column-method
-#' @export
 #' @note is.nan since 2.0.0
 setMethod("is.nan",
           signature(x = "Column"),
@@ -959,7 +921,6 @@ setMethod("is.nan",
 #'
 #' @rdname column_aggregate_functions
 #' @aliases kurtosis kurtosis,Column-method
-#' @export
 #' @examples
 #'
 #' \dontrun{
@@ -988,7 +949,6 @@ setMethod("kurtosis",
 #' @name last
 #' @aliases last,characterOrColumn-method
 #' @family aggregate functions
-#' @export
 #' @examples
 #' \dontrun{
 #' last(df$c)
@@ -1014,7 +974,6 @@ setMethod("last",
 #'
 #' @rdname column_datetime_functions
 #' @aliases last_day last_day,Column-method
-#' @export
 #' @examples
 #'
 #' \dontrun{
@@ -1034,7 +993,6 @@ setMethod("last_day",
 #'
 #' @rdname column_string_functions
 #' @aliases length length,Column-method
-#' @export
 #' @note length since 1.5.0
 setMethod("length",
           signature(x = "Column"),
@@ -1048,7 +1006,6 @@ setMethod("length",
 #'
 #' @rdname column_math_functions
 #' @aliases log log,Column-method
-#' @export
 #' @note log since 1.5.0
 setMethod("log",
           signature(x = "Column"),
@@ -1062,7 +1019,6 @@ setMethod("log",
 #'
 #' @rdname column_math_functions
 #' @aliases log10 log10,Column-method
-#' @export
 #' @note log10 since 1.5.0
 setMethod("log10",
           signature(x = "Column"),
@@ -1076,7 +1032,6 @@ setMethod("log10",
 #'
 #' @rdname column_math_functions
 #' @aliases log1p log1p,Column-method
-#' @export
 #' @note log1p since 1.5.0
 setMethod("log1p",
           signature(x = "Column"),
@@ -1090,7 +1045,6 @@ setMethod("log1p",
 #'
 #' @rdname column_math_functions
 #' @aliases log2 log2,Column-method
-#' @export
 #' @note log2 since 1.5.0
 setMethod("log2",
           signature(x = "Column"),
@@ -1104,7 +1058,6 @@ setMethod("log2",
 #'
 #' @rdname column_string_functions
 #' @aliases lower lower,Column-method
-#' @export
 #' @note lower since 1.4.0
 setMethod("lower",
           signature(x = "Column"),
@@ -1119,7 +1072,6 @@ setMethod("lower",
 #'
 #' @rdname column_string_functions
 #' @aliases ltrim ltrim,Column,missing-method
-#' @export
 #' @examples
 #'
 #' \dontrun{
@@ -1143,7 +1095,6 @@ setMethod("ltrim",
 #' @param trimString a character string to trim with
 #' @rdname column_string_functions
 #' @aliases ltrim,Column,character-method
-#' @export
 #' @note ltrim(Column, character) since 2.3.0
 setMethod("ltrim",
           signature(x = "Column", trimString = "character"),
@@ -1171,7 +1122,6 @@ setMethod("max",
 #'
 #' @rdname column_misc_functions
 #' @aliases md5 md5,Column-method
-#' @export
 #' @note md5 since 1.5.0
 setMethod("md5",
           signature(x = "Column"),
@@ -1185,7 +1135,6 @@ setMethod("md5",
 #'
 #' @rdname column_aggregate_functions
 #' @aliases mean mean,Column-method
-#' @export
 #' @examples
 #'
 #' \dontrun{
@@ -1211,7 +1160,6 @@ setMethod("mean",
 #'
 #' @rdname column_aggregate_functions
 #' @aliases min min,Column-method
-#' @export
 #' @note min since 1.5.0
 setMethod("min",
           signature(x = "Column"),
@@ -1225,7 +1173,6 @@ setMethod("min",
 #'
 #' @rdname column_datetime_functions
 #' @aliases minute minute,Column-method
-#' @export
 #' @note minute since 1.5.0
 setMethod("minute",
           signature(x = "Column"),
@@ -1248,7 +1195,6 @@ setMethod("minute",
 #'
 #' @rdname column_nonaggregate_functions
 #' @aliases monotonically_increasing_id monotonically_increasing_id,missing-method
-#' @export
 #' @examples
 #'
 #' \dontrun{head(select(df, monotonically_increasing_id()))}
@@ -1264,7 +1210,6 @@ setMethod("monotonically_increasing_id",
 #'
 #' @rdname column_datetime_functions
 #' @aliases month month,Column-method
-#' @export
 #' @note month since 1.5.0
 setMethod("month",
           signature(x = "Column"),
@@ -1278,7 +1223,6 @@ setMethod("month",
 #'
 #' @rdname column_nonaggregate_functions
 #' @aliases negate negate,Column-method
-#' @export
 #' @note negate since 1.5.0
 setMethod("negate",
           signature(x = "Column"),
@@ -1292,7 +1236,6 @@ setMethod("negate",
 #'
 #' @rdname column_datetime_functions
 #' @aliases quarter quarter,Column-method
-#' @export
 #' @note quarter since 1.5.0
 setMethod("quarter",
           signature(x = "Column"),
@@ -1306,7 +1249,6 @@ setMethod("quarter",
 #'
 #' @rdname column_string_functions
 #' @aliases reverse reverse,Column-method
-#' @export
 #' @note reverse since 1.5.0
 setMethod("reverse",
           signature(x = "Column"),
@@ -1321,7 +1263,6 @@ setMethod("reverse",
 #'
 #' @rdname column_math_functions
 #' @aliases rint rint,Column-method
-#' @export
 #' @note rint since 1.5.0
 setMethod("rint",
           signature(x = "Column"),
@@ -1336,7 +1277,6 @@ setMethod("rint",
 #'
 #' @rdname column_math_functions
 #' @aliases round round,Column-method
-#' @export
 #' @note round since 1.5.0
 setMethod("round",
           signature(x = "Column"),
@@ -1356,7 +1296,6 @@ setMethod("round",
 #'        to the left of the decimal point when \code{scale} < 0.
 #' @rdname column_math_functions
 #' @aliases bround bround,Column-method
-#' @export
 #' @note bround since 2.0.0
 setMethod("bround",
           signature(x = "Column"),
@@ -1371,7 +1310,6 @@ setMethod("bround",
 #'
 #' @rdname column_string_functions
 #' @aliases rtrim rtrim,Column,missing-method
-#' @export
 #' @note rtrim since 1.5.0
 setMethod("rtrim",
           signature(x = "Column", trimString = "missing"),
@@ -1382,7 +1320,6 @@ setMethod("rtrim",
 
 #' @rdname column_string_functions
 #' @aliases rtrim,Column,character-method
-#' @export
 #' @note rtrim(Column, character) since 2.3.0
 setMethod("rtrim",
           signature(x = "Column", trimString = "character"),
@@ -1396,7 +1333,6 @@ setMethod("rtrim",
 #'
 #' @rdname column_aggregate_functions
 #' @aliases sd sd,Column-method
-#' @export
 #' @examples
 #'
 #' \dontrun{
@@ -1414,7 +1350,6 @@ setMethod("sd",
 #'
 #' @rdname column_datetime_functions
 #' @aliases second second,Column-method
-#' @export
 #' @note second since 1.5.0
 setMethod("second",
           signature(x = "Column"),
@@ -1429,7 +1364,6 @@ setMethod("second",
 #'
 #' @rdname column_misc_functions
 #' @aliases sha1 sha1,Column-method
-#' @export
 #' @note sha1 since 1.5.0
 setMethod("sha1",
           signature(x = "Column"),
@@ -1443,7 +1377,6 @@ setMethod("sha1",
 #'
 #' @rdname column_math_functions
 #' @aliases signum signum,Column-method
-#' @export
 #' @note signum since 1.5.0
 setMethod("signum",
           signature(x = "Column"),
@@ -1457,7 +1390,6 @@ setMethod("signum",
 #'
 #' @rdname column_math_functions
 #' @aliases sign sign,Column-method
-#' @export
 #' @note sign since 1.5.0
 setMethod("sign", signature(x = "Column"),
           function(x) {
@@ -1470,7 +1402,6 @@ setMethod("sign", signature(x = "Column"),
 #'
 #' @rdname column_math_functions
 #' @aliases sin sin,Column-method
-#' @export
 #' @note sin since 1.5.0
 setMethod("sin",
           signature(x = "Column"),
@@ -1485,7 +1416,6 @@ setMethod("sin",
 #'
 #' @rdname column_math_functions
 #' @aliases sinh sinh,Column-method
-#' @export
 #' @note sinh since 1.5.0
 setMethod("sinh",
           signature(x = "Column"),
@@ -1499,7 +1429,6 @@ setMethod("sinh",
 #'
 #' @rdname column_aggregate_functions
 #' @aliases skewness skewness,Column-method
-#' @export
 #' @note skewness since 1.6.0
 setMethod("skewness",
           signature(x = "Column"),
@@ -1513,7 +1442,6 @@ setMethod("skewness",
 #'
 #' @rdname column_string_functions
 #' @aliases soundex soundex,Column-method
-#' @export
 #' @note soundex since 1.5.0
 setMethod("soundex",
           signature(x = "Column"),
@@ -1530,7 +1458,6 @@ setMethod("soundex",
 #'
 #' @rdname column_nonaggregate_functions
 #' @aliases spark_partition_id spark_partition_id,missing-method
-#' @export
 #' @examples
 #'
 #' \dontrun{head(select(df, spark_partition_id()))}
@@ -1560,7 +1487,6 @@ setMethod("stddev",
 #'
 #' @rdname column_aggregate_functions
 #' @aliases stddev_pop stddev_pop,Column-method
-#' @export
 #' @note stddev_pop since 1.6.0
 setMethod("stddev_pop",
           signature(x = "Column"),
@@ -1574,7 +1500,6 @@ setMethod("stddev_pop",
 #'
 #' @rdname column_aggregate_functions
 #' @aliases stddev_samp stddev_samp,Column-method
-#' @export
 #' @note stddev_samp since 1.6.0
 setMethod("stddev_samp",
           signature(x = "Column"),
@@ -1588,7 +1513,6 @@ setMethod("stddev_samp",
 #'
 #' @rdname column_nonaggregate_functions
 #' @aliases struct struct,characterOrColumn-method
-#' @export
 #' @examples
 #'
 #' \dontrun{
@@ -1614,7 +1538,6 @@ setMethod("struct",
 #'
 #' @rdname column_math_functions
 #' @aliases sqrt sqrt,Column-method
-#' @export
 #' @note sqrt since 1.5.0
 setMethod("sqrt",
           signature(x = "Column"),
@@ -1628,7 +1551,6 @@ setMethod("sqrt",
 #'
 #' @rdname column_aggregate_functions
 #' @aliases sum sum,Column-method
-#' @export
 #' @note sum since 1.5.0
 setMethod("sum",
           signature(x = "Column"),
@@ -1642,7 +1564,6 @@ setMethod("sum",
 #'
 #' @rdname column_aggregate_functions
 #' @aliases sumDistinct sumDistinct,Column-method
-#' @export
 #' @examples
 #'
 #' \dontrun{
@@ -1663,7 +1584,6 @@ setMethod("sumDistinct",
 #'
 #' @rdname column_math_functions
 #' @aliases tan tan,Column-method
-#' @export
 #' @note tan since 1.5.0
 setMethod("tan",
           signature(x = "Column"),
@@ -1678,7 +1598,6 @@ setMethod("tan",
 #'
 #' @rdname column_math_functions
 #' @aliases tanh tanh,Column-method
-#' @export
 #' @note tanh since 1.5.0
 setMethod("tanh",
           signature(x = "Column"),
@@ -1693,7 +1612,6 @@ setMethod("tanh",
 #'
 #' @rdname column_math_functions
 #' @aliases toDegrees toDegrees,Column-method
-#' @export
 #' @note toDegrees since 1.4.0
 setMethod("toDegrees",
           signature(x = "Column"),
@@ -1708,7 +1626,6 @@ setMethod("toDegrees",
 #'
 #' @rdname column_math_functions
 #' @aliases toRadians toRadians,Column-method
-#' @export
 #' @note toRadians since 1.4.0
 setMethod("toRadians",
           signature(x = "Column"),
@@ -1728,7 +1645,6 @@ setMethod("toRadians",
 #'
 #' @rdname column_datetime_functions
 #' @aliases to_date to_date,Column,missing-method
-#' @export
 #' @examples
 #'
 #' \dontrun{
@@ -1749,7 +1665,6 @@ setMethod("to_date",
 
 #' @rdname column_datetime_functions
 #' @aliases to_date,Column,character-method
-#' @export
 #' @note to_date(Column, character) since 2.2.0
 setMethod("to_date",
           signature(x = "Column", format = "character"),
@@ -1765,7 +1680,6 @@ setMethod("to_date",
 #'
 #' @rdname column_collection_functions
 #' @aliases to_json to_json,Column-method
-#' @export
 #' @examples
 #'
 #' \dontrun{
@@ -1803,7 +1717,6 @@ setMethod("to_json", signature(x = "Column"),
 #'
 #' @rdname column_datetime_functions
 #' @aliases to_timestamp to_timestamp,Column,missing-method
-#' @export
 #' @note to_timestamp(Column) since 2.2.0
 setMethod("to_timestamp",
           signature(x = "Column", format = "missing"),
@@ -1814,7 +1727,6 @@ setMethod("to_timestamp",
 
 #' @rdname column_datetime_functions
 #' @aliases to_timestamp,Column,character-method
-#' @export
 #' @note to_timestamp(Column, character) since 2.2.0
 setMethod("to_timestamp",
           signature(x = "Column", format = "character"),
@@ -1829,7 +1741,6 @@ setMethod("to_timestamp",
 #'
 #' @rdname column_string_functions
 #' @aliases trim trim,Column,missing-method
-#' @export
 #' @note trim since 1.5.0
 setMethod("trim",
           signature(x = "Column", trimString = "missing"),
@@ -1840,7 +1751,6 @@ setMethod("trim",
 
 #' @rdname column_string_functions
 #' @aliases trim,Column,character-method
-#' @export
 #' @note trim(Column, character) since 2.3.0
 setMethod("trim",
           signature(x = "Column", trimString = "character"),
@@ -1855,7 +1765,6 @@ setMethod("trim",
 #'
 #' @rdname column_string_functions
 #' @aliases unbase64 unbase64,Column-method
-#' @export
 #' @note unbase64 since 1.5.0
 setMethod("unbase64",
           signature(x = "Column"),
@@ -1870,7 +1779,6 @@ setMethod("unbase64",
 #'
 #' @rdname column_math_functions
 #' @aliases unhex unhex,Column-method
-#' @export
 #' @note unhex since 1.5.0
 setMethod("unhex",
           signature(x = "Column"),
@@ -1884,7 +1792,6 @@ setMethod("unhex",
 #'
 #' @rdname column_string_functions
 #' @aliases upper upper,Column-method
-#' @export
 #' @note upper since 1.4.0
 setMethod("upper",
           signature(x = "Column"),
@@ -1898,7 +1805,6 @@ setMethod("upper",
 #'
 #' @rdname column_aggregate_functions
 #' @aliases var var,Column-method
-#' @export
 #' @examples
 #'
 #'\dontrun{
@@ -1913,7 +1819,6 @@ setMethod("var",
 
 #' @rdname column_aggregate_functions
 #' @aliases variance variance,Column-method
-#' @export
 #' @note variance since 1.6.0
 setMethod("variance",
           signature(x = "Column"),
@@ -1927,7 +1832,6 @@ setMethod("variance",
 #'
 #' @rdname column_aggregate_functions
 #' @aliases var_pop var_pop,Column-method
-#' @export
 #' @note var_pop since 1.5.0
 setMethod("var_pop",
           signature(x = "Column"),
@@ -1941,7 +1845,6 @@ setMethod("var_pop",
 #'
 #' @rdname column_aggregate_functions
 #' @aliases var_samp var_samp,Column-method
-#' @export
 #' @note var_samp since 1.6.0
 setMethod("var_samp",
           signature(x = "Column"),
@@ -1955,7 +1858,6 @@ setMethod("var_samp",
 #'
 #' @rdname column_datetime_functions
 #' @aliases weekofyear weekofyear,Column-method
-#' @export
 #' @note weekofyear since 1.5.0
 setMethod("weekofyear",
           signature(x = "Column"),
@@ -1969,7 +1871,6 @@ setMethod("weekofyear",
 #'
 #' @rdname column_datetime_functions
 #' @aliases year year,Column-method
-#' @export
 #' @note year since 1.5.0
 setMethod("year",
           signature(x = "Column"),
@@ -1985,7 +1886,6 @@ setMethod("year",
 #'
 #' @rdname column_math_functions
 #' @aliases atan2 atan2,Column-method
-#' @export
 #' @note atan2 since 1.5.0
 setMethod("atan2", signature(y = "Column"),
           function(y, x) {
@@ -2001,7 +1901,6 @@ setMethod("atan2", signature(y = "Column"),
 #'
 #' @rdname column_datetime_diff_functions
 #' @aliases datediff datediff,Column-method
-#' @export
 #' @examples
 #'
 #' \dontrun{
@@ -2025,7 +1924,6 @@ setMethod("datediff", signature(y = "Column"),
 #'
 #' @rdname column_math_functions
 #' @aliases hypot hypot,Column-method
-#' @export
 #' @note hypot since 1.4.0
 setMethod("hypot", signature(y = "Column"),
           function(y, x) {
@@ -2041,7 +1939,6 @@ setMethod("hypot", signature(y = "Column"),
 #'
 #' @rdname column_string_functions
 #' @aliases levenshtein levenshtein,Column-method
-#' @export
 #' @examples
 #'
 #' \dontrun{
@@ -2064,7 +1961,6 @@ setMethod("levenshtein", signature(y = "Column"),
 #'
 #' @rdname column_datetime_diff_functions
 #' @aliases months_between months_between,Column-method
-#' @export
 #' @note months_between since 1.5.0
 setMethod("months_between", signature(y = "Column"),
           function(y, x) {
@@ -2082,7 +1978,6 @@ setMethod("months_between", signature(y = "Column"),
 #'
 #' @rdname column_nonaggregate_functions
 #' @aliases nanvl nanvl,Column-method
-#' @export
 #' @note nanvl since 1.5.0
 setMethod("nanvl", signature(y = "Column"),
           function(y, x) {
@@ -2099,7 +1994,6 @@ setMethod("nanvl", signature(y = "Column"),
 #'
 #' @rdname column_math_functions
 #' @aliases pmod pmod,Column-method
-#' @export
 #' @note pmod since 1.5.0
 setMethod("pmod", signature(y = "Column"),
           function(y, x) {
@@ -2114,7 +2008,6 @@ setMethod("pmod", signature(y = "Column"),
 #'
 #' @rdname column_aggregate_functions
 #' @aliases approxCountDistinct,Column-method
-#' @export
 #' @note approxCountDistinct(Column, numeric) since 1.4.0
 setMethod("approxCountDistinct",
           signature(x = "Column"),
@@ -2128,7 +2021,6 @@ setMethod("approxCountDistinct",
 #'
 #' @rdname column_aggregate_functions
 #' @aliases countDistinct countDistinct,Column-method
-#' @export
 #' @note countDistinct since 1.4.0
 setMethod("countDistinct",
           signature(x = "Column"),
@@ -2148,7 +2040,6 @@ setMethod("countDistinct",
 #'
 #' @rdname column_string_functions
 #' @aliases concat concat,Column-method
-#' @export
 #' @examples
 #'
 #' \dontrun{
@@ -2177,7 +2068,6 @@ setMethod("concat",
 #'
 #' @rdname column_nonaggregate_functions
 #' @aliases greatest greatest,Column-method
-#' @export
 #' @note greatest since 1.5.0
 setMethod("greatest",
           signature(x = "Column"),
@@ -2197,7 +2087,6 @@ setMethod("greatest",
 #'
 #' @rdname column_nonaggregate_functions
 #' @aliases least least,Column-method
-#' @export
 #' @note least since 1.5.0
 setMethod("least",
           signature(x = "Column"),
@@ -2216,7 +2105,6 @@ setMethod("least",
 #'
 #' @rdname column_aggregate_functions
 #' @aliases n_distinct n_distinct,Column-method
-#' @export
 #' @note n_distinct since 1.4.0
 setMethod("n_distinct", signature(x = "Column"),
           function(x, ...) {
@@ -2226,7 +2114,6 @@ setMethod("n_distinct", signature(x = "Column"),
 #' @rdname count
 #' @name n
 #' @aliases n,Column-method
-#' @export
 #' @examples \dontrun{n(df$c)}
 #' @note n since 1.4.0
 setMethod("n", signature(x = "Column"),
@@ -2245,7 +2132,6 @@ setMethod("n", signature(x = "Column"),
 #' @rdname column_datetime_diff_functions
 #'
 #' @aliases date_format date_format,Column,character-method
-#' @export
 #' @note date_format since 1.5.0
 setMethod("date_format", signature(y = "Column", x = "character"),
           function(y, x) {
@@ -2263,7 +2149,6 @@ setMethod("date_format", signature(y = "Column", x = "character"),
 #'               Since Spark 2.3, the DDL-formatted string is also supported for the schema.
 #' @param as.json.array indicating if input string is JSON array of objects or a single object.
 #' @aliases from_json from_json,Column,characterOrstructType-method
-#' @export
 #' @examples
 #'
 #' \dontrun{
@@ -2306,7 +2191,6 @@ setMethod("from_json", signature(x = "Column", schema = "characterOrstructType")
 #' @rdname column_datetime_diff_functions
 #'
 #' @aliases from_utc_timestamp from_utc_timestamp,Column,character-method
-#' @export
 #' @examples
 #'
 #' \dontrun{
@@ -2328,7 +2212,6 @@ setMethod("from_utc_timestamp", signature(y = "Column", x = "character"),
 #'
 #' @rdname column_string_functions
 #' @aliases instr instr,Column,character-method
-#' @export
 #' @examples
 #'
 #' \dontrun{
@@ -2351,7 +2234,6 @@ setMethod("instr", signature(y = "Column", x = "character"),
 #'
 #' @rdname column_datetime_diff_functions
 #' @aliases next_day next_day,Column,character-method
-#' @export
 #' @note next_day since 1.5.0
 setMethod("next_day", signature(y = "Column", x = "character"),
           function(y, x) {
@@ -2366,7 +2248,6 @@ setMethod("next_day", signature(y = "Column", x = "character"),
 #'
 #' @rdname column_datetime_diff_functions
 #' @aliases to_utc_timestamp to_utc_timestamp,Column,character-method
-#' @export
 #' @note to_utc_timestamp since 1.5.0
 setMethod("to_utc_timestamp", signature(y = "Column", x = "character"),
           function(y, x) {
@@ -2379,7 +2260,6 @@ setMethod("to_utc_timestamp", signature(y = "Column", x = "character"),
 #'
 #' @rdname column_datetime_diff_functions
 #' @aliases add_months add_months,Column,numeric-method
-#' @export
 #' @examples
 #'
 #' \dontrun{
@@ -2400,7 +2280,6 @@ setMethod("add_months", signature(y = "Column", x = "numeric"),
 #'
 #' @rdname column_datetime_diff_functions
 #' @aliases date_add date_add,Column,numeric-method
-#' @export
 #' @note date_add since 1.5.0
 setMethod("date_add", signature(y = "Column", x = "numeric"),
           function(y, x) {
@@ -2414,7 +2293,6 @@ setMethod("date_add", signature(y = "Column", x = "numeric"),
 #' @rdname column_datetime_diff_functions
 #'
 #' @aliases date_sub date_sub,Column,numeric-method
-#' @export
 #' @note date_sub since 1.5.0
 setMethod("date_sub", signature(y = "Column", x = "numeric"),
           function(y, x) {
@@ -2431,7 +2309,6 @@ setMethod("date_sub", signature(y = "Column", x = "numeric"),
 #'
 #' @rdname column_string_functions
 #' @aliases format_number format_number,Column,numeric-method
-#' @export
 #' @examples
 #'
 #' \dontrun{
@@ -2454,7 +2331,6 @@ setMethod("format_number", signature(y = "Column", x = "numeric"),
 #'
 #' @rdname column_misc_functions
 #' @aliases sha2 sha2,Column,numeric-method
-#' @export
 #' @note sha2 since 1.5.0
 setMethod("sha2", signature(y = "Column", x = "numeric"),
           function(y, x) {
@@ -2468,7 +2344,6 @@ setMethod("sha2", signature(y = "Column", x = "numeric"),
 #'
 #' @rdname column_math_functions
 #' @aliases shiftLeft shiftLeft,Column,numeric-method
-#' @export
 #' @note shiftLeft since 1.5.0
 setMethod("shiftLeft", signature(y = "Column", x = "numeric"),
           function(y, x) {
@@ -2484,7 +2359,6 @@ setMethod("shiftLeft", signature(y = "Column", x = "numeric"),
 #'
 #' @rdname column_math_functions
 #' @aliases shiftRight shiftRight,Column,numeric-method
-#' @export
 #' @note shiftRight since 1.5.0
 setMethod("shiftRight", signature(y = "Column", x = "numeric"),
           function(y, x) {
@@ -2500,7 +2374,6 @@ setMethod("shiftRight", signature(y = "Column", x = "numeric"),
 #'
 #' @rdname column_math_functions
 #' @aliases shiftRightUnsigned shiftRightUnsigned,Column,numeric-method
-#' @export
 #' @note shiftRightUnsigned since 1.5.0
 setMethod("shiftRightUnsigned", signature(y = "Column", x = "numeric"),
           function(y, x) {
@@ -2517,7 +2390,6 @@ setMethod("shiftRightUnsigned", signature(y = "Column", x = "numeric"),
 #' @param sep separator to use.
 #' @rdname column_string_functions
 #' @aliases concat_ws concat_ws,character,Column-method
-#' @export
 #' @note concat_ws since 1.5.0
 setMethod("concat_ws", signature(sep = "character", x = "Column"),
           function(sep, x, ...) {
@@ -2533,7 +2405,6 @@ setMethod("concat_ws", signature(sep = "character", x = "Column"),
 #' @param toBase base to convert to.
 #' @rdname column_math_functions
 #' @aliases conv conv,Column,numeric,numeric-method
-#' @export
 #' @note conv since 1.5.0
 setMethod("conv", signature(x = "Column", fromBase = "numeric", toBase = "numeric"),
           function(x, fromBase, toBase) {
@@ -2551,7 +2422,6 @@ setMethod("conv", signature(x = "Column", fromBase = "numeric", toBase = "numeri
 #'
 #' @rdname column_nonaggregate_functions
 #' @aliases expr expr,character-method
-#' @export
 #' @note expr since 1.5.0
 setMethod("expr", signature(x = "character"),
           function(x) {
@@ -2566,7 +2436,6 @@ setMethod("expr", signature(x = "character"),
 #' @param format a character object of format strings.
 #' @rdname column_string_functions
 #' @aliases format_string format_string,character,Column-method
-#' @export
 #' @note format_string since 1.5.0
 setMethod("format_string", signature(format = "character", x = "Column"),
           function(format, x, ...) {
@@ -2587,7 +2456,6 @@ setMethod("format_string", signature(format = "character", x = "Column"),
 #' @rdname column_datetime_functions
 #'
 #' @aliases from_unixtime from_unixtime,Column-method
-#' @export
 #' @examples
 #'
 #' \dontrun{
@@ -2629,7 +2497,6 @@ setMethod("from_unixtime", signature(x = "Column"),
 #'                  \code{startTime} as \code{"15 minutes"}.
 #' @rdname column_datetime_functions
 #' @aliases window window,Column-method
-#' @export
 #' @examples
 #'
 #' \dontrun{
@@ -2680,7 +2547,6 @@ setMethod("window", signature(x = "Column"),
 #' @param pos start position of search.
 #' @rdname column_string_functions
 #' @aliases locate locate,character,Column-method
-#' @export
 #' @note locate since 1.5.0
 setMethod("locate", signature(substr = "character", str = "Column"),
           function(substr, str, pos = 1) {
@@ -2697,7 +2563,6 @@ setMethod("locate", signature(substr = "character", str = "Column"),
 #' @param pad a character string to be padded with.
 #' @rdname column_string_functions
 #' @aliases lpad lpad,Column,numeric,character-method
-#' @export
 #' @note lpad since 1.5.0
 setMethod("lpad", signature(x = "Column", len = "numeric", pad = "character"),
           function(x, len, pad) {
@@ -2714,7 +2579,6 @@ setMethod("lpad", signature(x = "Column", len = "numeric", pad = "character"),
 #' @rdname column_nonaggregate_functions
 #' @param seed a random seed. Can be missing.
 #' @aliases rand rand,missing-method
-#' @export
 #' @examples
 #'
 #' \dontrun{
@@ -2729,7 +2593,6 @@ setMethod("rand", signature(seed = "missing"),
 
 #' @rdname column_nonaggregate_functions
 #' @aliases rand,numeric-method
-#' @export
 #' @note rand(numeric) since 1.5.0
 setMethod("rand", signature(seed = "numeric"),
           function(seed) {
@@ -2743,7 +2606,6 @@ setMethod("rand", signature(seed = "numeric"),
 #'
 #' @rdname column_nonaggregate_functions
 #' @aliases randn randn,missing-method
-#' @export
 #' @note randn since 1.5.0
 setMethod("randn", signature(seed = "missing"),
           function(seed) {
@@ -2753,7 +2615,6 @@ setMethod("randn", signature(seed = "missing"),
 
 #' @rdname column_nonaggregate_functions
 #' @aliases randn,numeric-method
-#' @export
 #' @note randn(numeric) since 1.5.0
 setMethod("randn", signature(seed = "numeric"),
           function(seed) {
@@ -2770,7 +2631,6 @@ setMethod("randn", signature(seed = "numeric"),
 #' @param idx a group index.
 #' @rdname column_string_functions
 #' @aliases regexp_extract regexp_extract,Column,character,numeric-method
-#' @export
 #' @examples
 #'
 #' \dontrun{
@@ -2799,7 +2659,6 @@ setMethod("regexp_extract",
 #' @param replacement a character string that a matched \code{pattern} is replaced with.
 #' @rdname column_string_functions
 #' @aliases regexp_replace regexp_replace,Column,character,character-method
-#' @export
 #' @note regexp_replace since 1.5.0
 setMethod("regexp_replace",
           signature(x = "Column", pattern = "character", replacement = "character"),
@@ -2815,7 +2674,6 @@ setMethod("regexp_replace",
 #'
 #' @rdname column_string_functions
 #' @aliases rpad rpad,Column,numeric,character-method
-#' @export
 #' @note rpad since 1.5.0
 setMethod("rpad", signature(x = "Column", len = "numeric", pad = "character"),
           function(x, len, pad) {
@@ -2838,7 +2696,6 @@ setMethod("rpad", signature(x = "Column", len = "numeric", pad = "character"),
 #'              counting from the right.
 #' @rdname column_string_functions
 #' @aliases substring_index substring_index,Column,character,numeric-method
-#' @export
 #' @note substring_index since 1.5.0
 setMethod("substring_index",
           signature(x = "Column", delim = "character", count = "numeric"),
@@ -2861,7 +2718,6 @@ setMethod("substring_index",
 #'                      at the same location, if any.
 #' @rdname column_string_functions
 #' @aliases translate translate,Column,character,character-method
-#' @export
 #' @note translate since 1.5.0
 setMethod("translate",
           signature(x = "Column", matchingString = "character", replaceString = "character"),
@@ -2876,7 +2732,6 @@ setMethod("translate",
 #'
 #' @rdname column_datetime_functions
 #' @aliases unix_timestamp unix_timestamp,missing,missing-method
-#' @export
 #' @note unix_timestamp since 1.5.0
 setMethod("unix_timestamp", signature(x = "missing", format = "missing"),
           function(x, format) {
@@ -2886,7 +2741,6 @@ setMethod("unix_timestamp", signature(x = "missing", format = "missing"),
 
 #' @rdname column_datetime_functions
 #' @aliases unix_timestamp,Column,missing-method
-#' @export
 #' @note unix_timestamp(Column) since 1.5.0
 setMethod("unix_timestamp", signature(x = "Column", format = "missing"),
           function(x, format) {
@@ -2896,7 +2750,6 @@ setMethod("unix_timestamp", signature(x = "Column", format = "missing"),
 
 #' @rdname column_datetime_functions
 #' @aliases unix_timestamp,Column,character-method
-#' @export
 #' @note unix_timestamp(Column, character) since 1.5.0
 setMethod("unix_timestamp", signature(x = "Column", format = "character"),
           function(x, format = "yyyy-MM-dd HH:mm:ss") {
@@ -2912,7 +2765,6 @@ setMethod("unix_timestamp", signature(x = "Column", format = "character"),
 #' @param condition the condition to test on. Must be a Column expression.
 #' @param value result expression.
 #' @aliases when when,Column-method
-#' @export
 #' @examples
 #'
 #' \dontrun{
@@ -2941,7 +2793,6 @@ setMethod("when", signature(condition = "Column", value = "ANY"),
 #' @param yes return values for \code{TRUE} elements of test.
 #' @param no return values for \code{FALSE} elements of test.
 #' @aliases ifelse ifelse,Column-method
-#' @export
 #' @note ifelse since 1.5.0
 setMethod("ifelse",
           signature(test = "Column", yes = "ANY", no = "ANY"),
@@ -2967,7 +2818,6 @@ setMethod("ifelse",
 #'
 #' @rdname column_window_functions
 #' @aliases cume_dist cume_dist,missing-method
-#' @export
 #' @note cume_dist since 1.6.0
 setMethod("cume_dist",
           signature("missing"),
@@ -2988,7 +2838,6 @@ setMethod("cume_dist",
 #'
 #' @rdname column_window_functions
 #' @aliases dense_rank dense_rank,missing-method
-#' @export
 #' @note dense_rank since 1.6.0
 setMethod("dense_rank",
           signature("missing"),
@@ -3005,7 +2854,6 @@ setMethod("dense_rank",
 #'
 #' @rdname column_window_functions
 #' @aliases lag lag,characterOrColumn-method
-#' @export
 #' @note lag since 1.6.0
 setMethod("lag",
           signature(x = "characterOrColumn"),
@@ -3030,7 +2878,6 @@ setMethod("lag",
 #'
 #' @rdname column_window_functions
 #' @aliases lead lead,characterOrColumn,numeric-method
-#' @export
 #' @note lead since 1.6.0
 setMethod("lead",
           signature(x = "characterOrColumn", offset = "numeric", defaultValue = "ANY"),
@@ -3054,7 +2901,6 @@ setMethod("lead",
 #'
 #' @rdname column_window_functions
 #' @aliases ntile ntile,numeric-method
-#' @export
 #' @note ntile since 1.6.0
 setMethod("ntile",
           signature(x = "numeric"),
@@ -3072,7 +2918,6 @@ setMethod("ntile",
 #'
 #' @rdname column_window_functions
 #' @aliases percent_rank percent_rank,missing-method
-#' @export
 #' @note percent_rank since 1.6.0
 setMethod("percent_rank",
           signature("missing"),
@@ -3093,7 +2938,6 @@ setMethod("percent_rank",
 #'
 #' @rdname column_window_functions
 #' @aliases rank rank,missing-method
-#' @export
 #' @note rank since 1.6.0
 setMethod("rank",
           signature(x = "missing"),
@@ -3104,7 +2948,6 @@ setMethod("rank",
 
 #' @rdname column_window_functions
 #' @aliases rank,ANY-method
-#' @export
 setMethod("rank",
           signature(x = "ANY"),
           function(x, ...) {
@@ -3118,7 +2961,6 @@ setMethod("rank",
 #'
 #' @rdname column_window_functions
 #' @aliases row_number row_number,missing-method
-#' @export
 #' @note row_number since 1.6.0
 setMethod("row_number",
           signature("missing"),
@@ -3136,7 +2978,6 @@ setMethod("row_number",
 #' @param value a value to be checked if contained in the column
 #' @rdname column_collection_functions
 #' @aliases array_contains array_contains,Column-method
-#' @export
 #' @note array_contains since 1.6.0
 setMethod("array_contains",
           signature(x = "Column", value = "ANY"),
@@ -3150,7 +2991,6 @@ setMethod("array_contains",
 #'
 #' @rdname column_collection_functions
 #' @aliases map_keys map_keys,Column-method
-#' @export
 #' @note map_keys since 2.3.0
 setMethod("map_keys",
           signature(x = "Column"),
@@ -3164,7 +3004,6 @@ setMethod("map_keys",
 #'
 #' @rdname column_collection_functions
 #' @aliases map_values map_values,Column-method
-#' @export
 #' @note map_values since 2.3.0
 setMethod("map_values",
           signature(x = "Column"),
@@ -3178,7 +3017,6 @@ setMethod("map_values",
 #'
 #' @rdname column_collection_functions
 #' @aliases explode explode,Column-method
-#' @export
 #' @note explode since 1.5.0
 setMethod("explode",
           signature(x = "Column"),
@@ -3192,7 +3030,6 @@ setMethod("explode",
 #'
 #' @rdname column_collection_functions
 #' @aliases size size,Column-method
-#' @export
 #' @note size since 1.5.0
 setMethod("size",
           signature(x = "Column"),
@@ -3210,7 +3047,6 @@ setMethod("size",
 #'            TRUE, sorting is in ascending order.
 #'            FALSE, sorting is in descending order.
 #' @aliases sort_array sort_array,Column-method
-#' @export
 #' @note sort_array since 1.6.0
 setMethod("sort_array",
           signature(x = "Column"),
@@ -3225,7 +3061,6 @@ setMethod("sort_array",
 #'
 #' @rdname column_collection_functions
 #' @aliases posexplode posexplode,Column-method
-#' @export
 #' @note posexplode since 2.1.0
 setMethod("posexplode",
           signature(x = "Column"),
@@ -3240,7 +3075,6 @@ setMethod("posexplode",
 #'
 #' @rdname column_nonaggregate_functions
 #' @aliases create_array create_array,Column-method
-#' @export
 #' @note create_array since 2.3.0
 setMethod("create_array",
           signature(x = "Column"),
@@ -3261,7 +3095,6 @@ setMethod("create_array",
 #'
 #' @rdname column_nonaggregate_functions
 #' @aliases create_map create_map,Column-method
-#' @export
 #' @note create_map since 2.3.0
 setMethod("create_map",
           signature(x = "Column"),
@@ -3279,7 +3112,6 @@ setMethod("create_map",
 #'
 #' @rdname column_aggregate_functions
 #' @aliases collect_list collect_list,Column-method
-#' @export
 #' @examples
 #'
 #' \dontrun{
@@ -3299,7 +3131,6 @@ setMethod("collect_list",
 #'
 #' @rdname column_aggregate_functions
 #' @aliases collect_set collect_set,Column-method
-#' @export
 #' @note collect_set since 2.3.0
 setMethod("collect_set",
           signature(x = "Column"),
@@ -3314,7 +3145,6 @@ setMethod("collect_set",
 #'
 #' @rdname column_string_functions
 #' @aliases split_string split_string,Column-method
-#' @export
 #' @examples
 #'
 #' \dontrun{
@@ -3337,7 +3167,6 @@ setMethod("split_string",
 #' @param n number of repetitions.
 #' @rdname column_string_functions
 #' @aliases repeat_string repeat_string,Column-method
-#' @export
 #' @examples
 #'
 #' \dontrun{
@@ -3360,7 +3189,6 @@ setMethod("repeat_string",
 #'
 #' @rdname column_collection_functions
 #' @aliases explode_outer explode_outer,Column-method
-#' @export
 #' @examples
 #'
 #' \dontrun{
@@ -3385,7 +3213,6 @@ setMethod("explode_outer",
 #'
 #' @rdname column_collection_functions
 #' @aliases posexplode_outer posexplode_outer,Column-method
-#' @export
 #' @note posexplode_outer since 2.3.0
 setMethod("posexplode_outer",
           signature(x = "Column"),
@@ -3406,7 +3233,6 @@ setMethod("posexplode_outer",
 #' @name not
 #' @aliases not,Column-method
 #' @family non-aggregate functions
-#' @export
 #' @examples
 #' \dontrun{
 #' df <- createDataFrame(data.frame(
@@ -3434,7 +3260,6 @@ setMethod("not",
 #'
 #' @rdname column_aggregate_functions
 #' @aliases grouping_bit grouping_bit,Column-method
-#' @export
 #' @examples
 #'
 #' \dontrun{
@@ -3467,7 +3292,6 @@ setMethod("grouping_bit",
 #'
 #' @rdname column_aggregate_functions
 #' @aliases grouping_id grouping_id,Column-method
-#' @export
 #' @examples
 #'
 #' \dontrun{
@@ -3502,7 +3326,6 @@ setMethod("grouping_id",
 #'
 #' @rdname column_nonaggregate_functions
 #' @aliases input_file_name input_file_name,missing-method
-#' @export
 #' @examples
 #'
 #' \dontrun{
@@ -3520,7 +3343,6 @@ setMethod("input_file_name", signature("missing"),
 #'
 #' @rdname column_datetime_functions
 #' @aliases trunc trunc,Column-method
-#' @export
 #' @examples
 #'
 #' \dontrun{
@@ -3540,7 +3362,6 @@ setMethod("trunc",
 #'
 #' @rdname column_datetime_functions
 #' @aliases date_trunc date_trunc,character,Column-method
-#' @export
 #' @examples
 #'
 #' \dontrun{
@@ -3559,7 +3380,6 @@ setMethod("date_trunc",
 #'
 #' @rdname column_datetime_functions
 #' @aliases current_date current_date,missing-method
-#' @export
 #' @examples
 #' \dontrun{
 #' head(select(df, current_date(), current_timestamp()))}
@@ -3576,7 +3396,6 @@ setMethod("current_date",
 #'
 #' @rdname column_datetime_functions
 #' @aliases current_timestamp current_timestamp,missing-method
-#' @export
 #' @note current_timestamp since 2.3.0
 setMethod("current_timestamp",
           signature("missing"),


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org