You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by gu...@apache.org on 2018/07/04 04:41:48 UTC

[1/7] spark-website git commit: Fix signature description broken in PySpark API documentation in 2.3.1

Repository: spark-website
Updated Branches:
  refs/heads/asf-site 26b527127 -> 5660fb9a4


http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/searchindex.js
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/searchindex.js b/site/docs/2.3.1/api/python/searchindex.js
index 0a5ec65..b5c8344 100644
--- a/site/docs/2.3.1/api/python/searchindex.js
+++ b/site/docs/2.3.1/api/python/searchindex.js
@@ -1 +1 @@
-Search.setIndex({docnames:["index","pyspark","pyspark.ml","pyspark.mllib","pyspark.sql","pyspark.streaming"],envversion:52,filenames:["index.rst","pyspark.rst","pyspark.ml.rst","pyspark.mllib.rst","pyspark.sql.rst","pyspark.streaming.rst"],objects:{"":{pyspark:[1,0,0,"-"]},"pyspark.Accumulator":{add:[1,2,1,""],value:[1,3,1,""]},"pyspark.AccumulatorParam":{addInPlace:[1,2,1,""],zero:[1,2,1,""]},"pyspark.BasicProfiler":{profile:[1,2,1,""],stats:[1,2,1,""]},"pyspark.Broadcast":{destroy:[1,2,1,""],dump:[1,2,1,""],load:[1,2,1,""],unpersist:[1,2,1,""],value:[1,3,1,""]},"pyspark.MarshalSerializer":{dumps:[1,2,1,""],loads:[1,2,1,""]},"pyspark.PickleSerializer":{dumps:[1,2,1,""],loads:[1,2,1,""]},"pyspark.Profiler":{dump:[1,2,1,""],profile:[1,2,1,""],show:[1,2,1,""],stats:[1,2,1,""]},"pyspark.RDD":{aggregate:[1,2,1,""],aggregateByKey:[1,2,1,""],cache:[1,2,1,""],cartesian:[1,2,1,""],checkpoint:[1,2,1,""],coalesce:[1,2,1,""],cogroup:[1,2,1,""],collect:[1,2,1,""],collectAsMap:[1,2,1,""],combine
 ByKey:[1,2,1,""],context:[1,3,1,""],count:[1,2,1,""],countApprox:[1,2,1,""],countApproxDistinct:[1,2,1,""],countByKey:[1,2,1,""],countByValue:[1,2,1,""],distinct:[1,2,1,""],filter:[1,2,1,""],first:[1,2,1,""],flatMap:[1,2,1,""],flatMapValues:[1,2,1,""],fold:[1,2,1,""],foldByKey:[1,2,1,""],foreach:[1,2,1,""],foreachPartition:[1,2,1,""],fullOuterJoin:[1,2,1,""],getCheckpointFile:[1,2,1,""],getNumPartitions:[1,2,1,""],getStorageLevel:[1,2,1,""],glom:[1,2,1,""],groupBy:[1,2,1,""],groupByKey:[1,2,1,""],groupWith:[1,2,1,""],histogram:[1,2,1,""],id:[1,2,1,""],intersection:[1,2,1,""],isCheckpointed:[1,2,1,""],isEmpty:[1,2,1,""],isLocallyCheckpointed:[1,2,1,""],join:[1,2,1,""],keyBy:[1,2,1,""],keys:[1,2,1,""],leftOuterJoin:[1,2,1,""],localCheckpoint:[1,2,1,""],lookup:[1,2,1,""],map:[1,2,1,""],mapPartitions:[1,2,1,""],mapPartitionsWithIndex:[1,2,1,""],mapPartitionsWithSplit:[1,2,1,""],mapValues:[1,2,1,""],max:[1,2,1,""],mean:[1,2,1,""],meanApprox:[1,2,1,""],min:[1,2,1,""],name:[1,2,1,""],parti
 tionBy:[1,2,1,""],persist:[1,2,1,""],pipe:[1,2,1,""],randomSplit:[1,2,1,""],reduce:[1,2,1,""],reduceByKey:[1,2,1,""],reduceByKeyLocally:[1,2,1,""],repartition:[1,2,1,""],repartitionAndSortWithinPartitions:[1,2,1,""],rightOuterJoin:[1,2,1,""],sample:[1,2,1,""],sampleByKey:[1,2,1,""],sampleStdev:[1,2,1,""],sampleVariance:[1,2,1,""],saveAsHadoopDataset:[1,2,1,""],saveAsHadoopFile:[1,2,1,""],saveAsNewAPIHadoopDataset:[1,2,1,""],saveAsNewAPIHadoopFile:[1,2,1,""],saveAsPickleFile:[1,2,1,""],saveAsSequenceFile:[1,2,1,""],saveAsTextFile:[1,2,1,""],setName:[1,2,1,""],sortBy:[1,2,1,""],sortByKey:[1,2,1,""],stats:[1,2,1,""],stdev:[1,2,1,""],subtract:[1,2,1,""],subtractByKey:[1,2,1,""],sum:[1,2,1,""],sumApprox:[1,2,1,""],take:[1,2,1,""],takeOrdered:[1,2,1,""],takeSample:[1,2,1,""],toDebugString:[1,2,1,""],toLocalIterator:[1,2,1,""],top:[1,2,1,""],treeAggregate:[1,2,1,""],treeReduce:[1,2,1,""],union:[1,2,1,""],unpersist:[1,2,1,""],values:[1,2,1,""],variance:[1,2,1,""],zip:[1,2,1,""],zipWithIndex
 :[1,2,1,""],zipWithUniqueId:[1,2,1,""]},"pyspark.SparkConf":{contains:[1,2,1,""],get:[1,2,1,""],getAll:[1,2,1,""],set:[1,2,1,""],setAll:[1,2,1,""],setAppName:[1,2,1,""],setExecutorEnv:[1,2,1,""],setIfMissing:[1,2,1,""],setMaster:[1,2,1,""],setSparkHome:[1,2,1,""],toDebugString:[1,2,1,""]},"pyspark.SparkContext":{PACKAGE_EXTENSIONS:[1,3,1,""],accumulator:[1,2,1,""],addFile:[1,2,1,""],addPyFile:[1,2,1,""],applicationId:[1,3,1,""],binaryFiles:[1,2,1,""],binaryRecords:[1,2,1,""],broadcast:[1,2,1,""],cancelAllJobs:[1,2,1,""],cancelJobGroup:[1,2,1,""],defaultMinPartitions:[1,3,1,""],defaultParallelism:[1,3,1,""],dump_profiles:[1,2,1,""],emptyRDD:[1,2,1,""],getConf:[1,2,1,""],getLocalProperty:[1,2,1,""],getOrCreate:[1,4,1,""],hadoopFile:[1,2,1,""],hadoopRDD:[1,2,1,""],newAPIHadoopFile:[1,2,1,""],newAPIHadoopRDD:[1,2,1,""],parallelize:[1,2,1,""],pickleFile:[1,2,1,""],range:[1,2,1,""],runJob:[1,2,1,""],sequenceFile:[1,2,1,""],setCheckpointDir:[1,2,1,""],setJobDescription:[1,2,1,""],setJobGro
 up:[1,2,1,""],setLocalProperty:[1,2,1,""],setLogLevel:[1,2,1,""],setSystemProperty:[1,4,1,""],show_profiles:[1,2,1,""],sparkUser:[1,2,1,""],startTime:[1,3,1,""],statusTracker:[1,2,1,""],stop:[1,2,1,""],textFile:[1,2,1,""],uiWebUrl:[1,3,1,""],union:[1,2,1,""],version:[1,3,1,""],wholeTextFiles:[1,2,1,""]},"pyspark.SparkFiles":{get:[1,4,1,""],getRootDirectory:[1,4,1,""]},"pyspark.StatusTracker":{getActiveJobsIds:[1,2,1,""],getActiveStageIds:[1,2,1,""],getJobIdsForGroup:[1,2,1,""],getJobInfo:[1,2,1,""],getStageInfo:[1,2,1,""]},"pyspark.StorageLevel":{DISK_ONLY:[1,3,1,""],DISK_ONLY_2:[1,3,1,""],MEMORY_AND_DISK:[1,3,1,""],MEMORY_AND_DISK_2:[1,3,1,""],MEMORY_AND_DISK_SER:[1,3,1,""],MEMORY_AND_DISK_SER_2:[1,3,1,""],MEMORY_ONLY:[1,3,1,""],MEMORY_ONLY_2:[1,3,1,""],MEMORY_ONLY_SER:[1,3,1,""],MEMORY_ONLY_SER_2:[1,3,1,""],OFF_HEAP:[1,3,1,""]},"pyspark.TaskContext":{attemptNumber:[1,2,1,""],get:[1,4,1,""],partitionId:[1,2,1,""],stageId:[1,2,1,""],taskAttemptId:[1,2,1,""]},"pyspark.ml":{Estimator:
 [2,1,1,""],Model:[2,1,1,""],Pipeline:[2,1,1,""],PipelineModel:[2,1,1,""],Transformer:[2,1,1,""],UnaryTransformer:[2,1,1,""],classification:[2,0,0,"-"],clustering:[2,0,0,"-"],evaluation:[2,0,0,"-"],feature:[2,0,0,"-"],fpm:[2,0,0,"-"],image:[2,0,0,"-"],linalg:[2,0,0,"-"],param:[2,0,0,"-"],recommendation:[2,0,0,"-"],regression:[2,0,0,"-"],stat:[2,0,0,"-"],tuning:[2,0,0,"-"],util:[2,0,0,"-"]},"pyspark.ml.Estimator":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],fit:[2,2,1,""],fitMultiple:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],params:[2,3,1,""],set:[2,2,1,""]},"pyspark.ml.Model":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],params:[2,3,1,""],set:[2,2,1,""],transform:[2,2,1,""]},"p
 yspark.ml.Pipeline":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],fit:[2,2,1,""],fitMultiple:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],getStages:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],params:[2,3,1,""],read:[2,4,1,""],save:[2,2,1,""],set:[2,2,1,""],setParams:[2,2,1,""],setStages:[2,2,1,""],stages:[2,3,1,""],write:[2,2,1,""]},"pyspark.ml.PipelineModel":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],params:[2,3,1,""],read:[2,4,1,""],save:[2,2,1,""],set:[2,2,1,""],transform:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.Transformer":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,
 1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],params:[2,3,1,""],set:[2,2,1,""],transform:[2,2,1,""]},"pyspark.ml.UnaryTransformer":{copy:[2,2,1,""],createTransformFunc:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],getInputCol:[2,2,1,""],getOrDefault:[2,2,1,""],getOutputCol:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],inputCol:[2,3,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],outputCol:[2,3,1,""],outputDataType:[2,2,1,""],params:[2,3,1,""],set:[2,2,1,""],setInputCol:[2,2,1,""],setOutputCol:[2,2,1,""],transform:[2,2,1,""],transformSchema:[2,2,1,""],validateInputType:[2,2,1,""]},"pyspark.ml.classification":{BinaryLogisticRegressionSummary:[2,1,1,""],BinaryLogisticRegressionTrainingSummary:[2,1,1,""],DecisionTreeClassificationModel:[2,1,1,""],DecisionTreeClassifier:[2,1,1,""],GBTClassificationModel:[2,1,1,""],GBTClassifier:[2,1,1,""],LinearSVC:[2,1,1,""],LinearSVCModel:[2,1,1,""],LogisticRegression:[2,
 1,1,""],LogisticRegressionModel:[2,1,1,""],LogisticRegressionSummary:[2,1,1,""],LogisticRegressionTrainingSummary:[2,1,1,""],MultilayerPerceptronClassificationModel:[2,1,1,""],MultilayerPerceptronClassifier:[2,1,1,""],NaiveBayes:[2,1,1,""],NaiveBayesModel:[2,1,1,""],OneVsRest:[2,1,1,""],OneVsRestModel:[2,1,1,""],RandomForestClassificationModel:[2,1,1,""],RandomForestClassifier:[2,1,1,""]},"pyspark.ml.classification.BinaryLogisticRegressionSummary":{accuracy:[2,3,1,""],areaUnderROC:[2,3,1,""],fMeasureByLabel:[2,2,1,""],fMeasureByThreshold:[2,3,1,""],falsePositiveRateByLabel:[2,3,1,""],featuresCol:[2,3,1,""],labelCol:[2,3,1,""],labels:[2,3,1,""],pr:[2,3,1,""],precisionByLabel:[2,3,1,""],precisionByThreshold:[2,3,1,""],predictionCol:[2,3,1,""],predictions:[2,3,1,""],probabilityCol:[2,3,1,""],recallByLabel:[2,3,1,""],recallByThreshold:[2,3,1,""],roc:[2,3,1,""],truePositiveRateByLabel:[2,3,1,""],weightedFMeasure:[2,2,1,""],weightedFalsePositiveRate:[2,3,1,""],weightedPrecision:[2,3,1,""]
 ,weightedRecall:[2,3,1,""],weightedTruePositiveRate:[2,3,1,""]},"pyspark.ml.classification.BinaryLogisticRegressionTrainingSummary":{accuracy:[2,3,1,""],areaUnderROC:[2,3,1,""],fMeasureByLabel:[2,2,1,""],fMeasureByThreshold:[2,3,1,""],falsePositiveRateByLabel:[2,3,1,""],featuresCol:[2,3,1,""],labelCol:[2,3,1,""],labels:[2,3,1,""],objectiveHistory:[2,3,1,""],pr:[2,3,1,""],precisionByLabel:[2,3,1,""],precisionByThreshold:[2,3,1,""],predictionCol:[2,3,1,""],predictions:[2,3,1,""],probabilityCol:[2,3,1,""],recallByLabel:[2,3,1,""],recallByThreshold:[2,3,1,""],roc:[2,3,1,""],totalIterations:[2,3,1,""],truePositiveRateByLabel:[2,3,1,""],weightedFMeasure:[2,2,1,""],weightedFalsePositiveRate:[2,3,1,""],weightedPrecision:[2,3,1,""],weightedRecall:[2,3,1,""],weightedTruePositiveRate:[2,3,1,""]},"pyspark.ml.classification.DecisionTreeClassificationModel":{copy:[2,2,1,""],depth:[2,3,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],featureImportances:[2,3,1,""],g
 etOrDefault:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],numClasses:[2,3,1,""],numFeatures:[2,3,1,""],numNodes:[2,3,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],toDebugString:[2,3,1,""],transform:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.classification.DecisionTreeClassifier":{cacheNodeIds:[2,3,1,""],checkpointInterval:[2,3,1,""],copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],featuresCol:[2,3,1,""],fit:[2,2,1,""],fitMultiple:[2,2,1,""],getCacheNodeIds:[2,2,1,""],getCheckpointInterval:[2,2,1,""],getFeaturesCol:[2,2,1,""],getImpurity:[2,2,1,""],getLabelCol:[2,2,1,""],getMaxBins:[2,2,1,""],getMaxDepth:[2,2,1,""],getMaxMemoryInMB:[2,2,1,""],getMinInfoGain:[2,2,1,""],getMinInstancesPerNode:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],getPredictionCol:[2,2,1,""],getProbabilityCol:[2,2,1,""],getRawPredictionCol:[2,2,1,""],getSeed:
 [2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],impurity:[2,3,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],labelCol:[2,3,1,""],load:[2,2,1,""],maxBins:[2,3,1,""],maxDepth:[2,3,1,""],maxMemoryInMB:[2,3,1,""],minInfoGain:[2,3,1,""],minInstancesPerNode:[2,3,1,""],params:[2,3,1,""],predictionCol:[2,3,1,""],probabilityCol:[2,3,1,""],rawPredictionCol:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],seed:[2,3,1,""],set:[2,2,1,""],setCacheNodeIds:[2,2,1,""],setCheckpointInterval:[2,2,1,""],setFeaturesCol:[2,2,1,""],setImpurity:[2,2,1,""],setLabelCol:[2,2,1,""],setMaxBins:[2,2,1,""],setMaxDepth:[2,2,1,""],setMaxMemoryInMB:[2,2,1,""],setMinInfoGain:[2,2,1,""],setMinInstancesPerNode:[2,2,1,""],setParams:[2,2,1,""],setPredictionCol:[2,2,1,""],setProbabilityCol:[2,2,1,""],setRawPredictionCol:[2,2,1,""],setSeed:[2,2,1,""],supportedImpurities:[2,3,1,""],write:[2,2,1,""]},"pyspark.ml.classification.GBTClassificationModel":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamM
 ap:[2,2,1,""],featureImportances:[2,3,1,""],getNumTrees:[2,3,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],numFeatures:[2,3,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],toDebugString:[2,3,1,""],totalNumNodes:[2,3,1,""],transform:[2,2,1,""],treeWeights:[2,3,1,""],trees:[2,3,1,""],write:[2,2,1,""]},"pyspark.ml.classification.GBTClassifier":{cacheNodeIds:[2,3,1,""],checkpointInterval:[2,3,1,""],copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],featuresCol:[2,3,1,""],fit:[2,2,1,""],fitMultiple:[2,2,1,""],getCacheNodeIds:[2,2,1,""],getCheckpointInterval:[2,2,1,""],getFeaturesCol:[2,2,1,""],getLabelCol:[2,2,1,""],getLossType:[2,2,1,""],getMaxBins:[2,2,1,""],getMaxDepth:[2,2,1,""],getMaxIter:[2,2,1,""],getMaxMemoryInMB:[2,2,1,""],getMinInfoGain:[2,2,1,""],getMinInstancesPerNode:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,
 2,1,""],getPredictionCol:[2,2,1,""],getSeed:[2,2,1,""],getStepSize:[2,2,1,""],getSubsamplingRate:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],labelCol:[2,3,1,""],load:[2,2,1,""],lossType:[2,3,1,""],maxBins:[2,3,1,""],maxDepth:[2,3,1,""],maxIter:[2,3,1,""],maxMemoryInMB:[2,3,1,""],minInfoGain:[2,3,1,""],minInstancesPerNode:[2,3,1,""],params:[2,3,1,""],predictionCol:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],seed:[2,3,1,""],set:[2,2,1,""],setCacheNodeIds:[2,2,1,""],setCheckpointInterval:[2,2,1,""],setFeaturesCol:[2,2,1,""],setLabelCol:[2,2,1,""],setLossType:[2,2,1,""],setMaxBins:[2,2,1,""],setMaxDepth:[2,2,1,""],setMaxIter:[2,2,1,""],setMaxMemoryInMB:[2,2,1,""],setMinInfoGain:[2,2,1,""],setMinInstancesPerNode:[2,2,1,""],setParams:[2,2,1,""],setPredictionCol:[2,2,1,""],setSeed:[2,2,1,""],setStepSize:[2,2,1,""],setSubsamplingRate:[2,2,1,""],stepSize:[2,3,1,""],subsamplingRate:[2,3,1,""],supportedLossTypes:[2,3,1,""],write:[2,2,1,""]},"pyspar
 k.ml.classification.LinearSVC":{aggregationDepth:[2,3,1,""],copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],featuresCol:[2,3,1,""],fit:[2,2,1,""],fitIntercept:[2,3,1,""],fitMultiple:[2,2,1,""],getAggregationDepth:[2,2,1,""],getFeaturesCol:[2,2,1,""],getFitIntercept:[2,2,1,""],getLabelCol:[2,2,1,""],getMaxIter:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],getPredictionCol:[2,2,1,""],getRawPredictionCol:[2,2,1,""],getRegParam:[2,2,1,""],getStandardization:[2,2,1,""],getThreshold:[2,2,1,""],getTol:[2,2,1,""],getWeightCol:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],labelCol:[2,3,1,""],load:[2,2,1,""],maxIter:[2,3,1,""],params:[2,3,1,""],predictionCol:[2,3,1,""],rawPredictionCol:[2,3,1,""],read:[2,2,1,""],regParam:[2,3,1,""],save:[2,2,1,""],set:[2,2,1,""],setAggregationDepth:[2,2,1,""],setFeaturesCol:[2,2,1,""],setFitIntercept:[2,2,1,""],setLabelCol:[2,2,1,""],setMaxIter:[2,2,1,""],setPara
 ms:[2,2,1,""],setPredictionCol:[2,2,1,""],setRawPredictionCol:[2,2,1,""],setRegParam:[2,2,1,""],setStandardization:[2,2,1,""],setThreshold:[2,2,1,""],setTol:[2,2,1,""],setWeightCol:[2,2,1,""],standardization:[2,3,1,""],threshold:[2,3,1,""],tol:[2,3,1,""],weightCol:[2,3,1,""],write:[2,2,1,""]},"pyspark.ml.classification.LinearSVCModel":{coefficients:[2,3,1,""],copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],intercept:[2,3,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],numClasses:[2,3,1,""],numFeatures:[2,3,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],transform:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.classification.LogisticRegression":{aggregationDepth:[2,3,1,""],copy:[2,2,1,""],elasticNetParam:[2,3,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],family:[2,3,1,""],featuresCol:[2,3,1,"
 "],fit:[2,2,1,""],fitIntercept:[2,3,1,""],fitMultiple:[2,2,1,""],getAggregationDepth:[2,2,1,""],getElasticNetParam:[2,2,1,""],getFamily:[2,2,1,""],getFeaturesCol:[2,2,1,""],getFitIntercept:[2,2,1,""],getLabelCol:[2,2,1,""],getLowerBoundsOnCoefficients:[2,2,1,""],getLowerBoundsOnIntercepts:[2,2,1,""],getMaxIter:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],getPredictionCol:[2,2,1,""],getProbabilityCol:[2,2,1,""],getRawPredictionCol:[2,2,1,""],getRegParam:[2,2,1,""],getStandardization:[2,2,1,""],getThreshold:[2,2,1,""],getThresholds:[2,2,1,""],getTol:[2,2,1,""],getUpperBoundsOnCoefficients:[2,2,1,""],getUpperBoundsOnIntercepts:[2,2,1,""],getWeightCol:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],labelCol:[2,3,1,""],load:[2,2,1,""],lowerBoundsOnCoefficients:[2,3,1,""],lowerBoundsOnIntercepts:[2,3,1,""],maxIter:[2,3,1,""],params:[2,3,1,""],predictionCol:[2,3,1,""],probabilityCol:[2,3,1,""],rawPredictionCol:[2,3,1,""],read:[2,2,1,""],
 regParam:[2,3,1,""],save:[2,2,1,""],set:[2,2,1,""],setAggregationDepth:[2,2,1,""],setElasticNetParam:[2,2,1,""],setFamily:[2,2,1,""],setFeaturesCol:[2,2,1,""],setFitIntercept:[2,2,1,""],setLabelCol:[2,2,1,""],setLowerBoundsOnCoefficients:[2,2,1,""],setLowerBoundsOnIntercepts:[2,2,1,""],setMaxIter:[2,2,1,""],setParams:[2,2,1,""],setPredictionCol:[2,2,1,""],setProbabilityCol:[2,2,1,""],setRawPredictionCol:[2,2,1,""],setRegParam:[2,2,1,""],setStandardization:[2,2,1,""],setThreshold:[2,2,1,""],setThresholds:[2,2,1,""],setTol:[2,2,1,""],setUpperBoundsOnCoefficients:[2,2,1,""],setUpperBoundsOnIntercepts:[2,2,1,""],setWeightCol:[2,2,1,""],standardization:[2,3,1,""],threshold:[2,3,1,""],thresholds:[2,3,1,""],tol:[2,3,1,""],upperBoundsOnCoefficients:[2,3,1,""],upperBoundsOnIntercepts:[2,3,1,""],weightCol:[2,3,1,""],write:[2,2,1,""]},"pyspark.ml.classification.LogisticRegressionModel":{coefficientMatrix:[2,3,1,""],coefficients:[2,3,1,""],copy:[2,2,1,""],evaluate:[2,2,1,""],explainParam:[2,2,1
 ,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],hasSummary:[2,3,1,""],intercept:[2,3,1,""],interceptVector:[2,3,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],numClasses:[2,3,1,""],numFeatures:[2,3,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],summary:[2,3,1,""],transform:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.classification.LogisticRegressionSummary":{accuracy:[2,3,1,""],fMeasureByLabel:[2,2,1,""],falsePositiveRateByLabel:[2,3,1,""],featuresCol:[2,3,1,""],labelCol:[2,3,1,""],labels:[2,3,1,""],precisionByLabel:[2,3,1,""],predictionCol:[2,3,1,""],predictions:[2,3,1,""],probabilityCol:[2,3,1,""],recallByLabel:[2,3,1,""],truePositiveRateByLabel:[2,3,1,""],weightedFMeasure:[2,2,1,""],weightedFalsePositiveRate:[2,3,1,""],weightedPrecision:[2,3,1,""],weightedRecall:[2,3,1,""],weightedTruePositiveRate:[2,3,1,""]},"pyspark.ml.classification.LogisticRegressio
 nTrainingSummary":{accuracy:[2,3,1,""],fMeasureByLabel:[2,2,1,""],falsePositiveRateByLabel:[2,3,1,""],featuresCol:[2,3,1,""],labelCol:[2,3,1,""],labels:[2,3,1,""],objectiveHistory:[2,3,1,""],precisionByLabel:[2,3,1,""],predictionCol:[2,3,1,""],predictions:[2,3,1,""],probabilityCol:[2,3,1,""],recallByLabel:[2,3,1,""],totalIterations:[2,3,1,""],truePositiveRateByLabel:[2,3,1,""],weightedFMeasure:[2,2,1,""],weightedFalsePositiveRate:[2,3,1,""],weightedPrecision:[2,3,1,""],weightedRecall:[2,3,1,""],weightedTruePositiveRate:[2,3,1,""]},"pyspark.ml.classification.MultilayerPerceptronClassificationModel":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],layers:[2,3,1,""],load:[2,2,1,""],numClasses:[2,3,1,""],numFeatures:[2,3,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],transform:[2,2,1,""],weights:[2
 ,3,1,""],write:[2,2,1,""]},"pyspark.ml.classification.MultilayerPerceptronClassifier":{blockSize:[2,3,1,""],copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],featuresCol:[2,3,1,""],fit:[2,2,1,""],fitMultiple:[2,2,1,""],getBlockSize:[2,2,1,""],getFeaturesCol:[2,2,1,""],getInitialWeights:[2,2,1,""],getLabelCol:[2,2,1,""],getLayers:[2,2,1,""],getMaxIter:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],getPredictionCol:[2,2,1,""],getProbabilityCol:[2,2,1,""],getRawPredictionCol:[2,2,1,""],getSeed:[2,2,1,""],getSolver:[2,2,1,""],getStepSize:[2,2,1,""],getTol:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],initialWeights:[2,3,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],labelCol:[2,3,1,""],layers:[2,3,1,""],load:[2,2,1,""],maxIter:[2,3,1,""],params:[2,3,1,""],predictionCol:[2,3,1,""],probabilityCol:[2,3,1,""],rawPredictionCol:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],seed:[2,3,1,""],set:[2,2,1,""],setBlockSize:[2,2,1,""],setFeaturesCol:
 [2,2,1,""],setInitialWeights:[2,2,1,""],setLabelCol:[2,2,1,""],setLayers:[2,2,1,""],setMaxIter:[2,2,1,""],setParams:[2,2,1,""],setPredictionCol:[2,2,1,""],setProbabilityCol:[2,2,1,""],setRawPredictionCol:[2,2,1,""],setSeed:[2,2,1,""],setSolver:[2,2,1,""],setStepSize:[2,2,1,""],setTol:[2,2,1,""],solver:[2,3,1,""],stepSize:[2,3,1,""],tol:[2,3,1,""],write:[2,2,1,""]},"pyspark.ml.classification.NaiveBayes":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],featuresCol:[2,3,1,""],fit:[2,2,1,""],fitMultiple:[2,2,1,""],getFeaturesCol:[2,2,1,""],getLabelCol:[2,2,1,""],getModelType:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],getPredictionCol:[2,2,1,""],getProbabilityCol:[2,2,1,""],getRawPredictionCol:[2,2,1,""],getSmoothing:[2,2,1,""],getThresholds:[2,2,1,""],getWeightCol:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],labelCol:[2,3,1,""],load:[2,2,1,""],modelType:[2,3,1,""],params:[2,3,1,""],pred
 ictionCol:[2,3,1,""],probabilityCol:[2,3,1,""],rawPredictionCol:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],setFeaturesCol:[2,2,1,""],setLabelCol:[2,2,1,""],setModelType:[2,2,1,""],setParams:[2,2,1,""],setPredictionCol:[2,2,1,""],setProbabilityCol:[2,2,1,""],setRawPredictionCol:[2,2,1,""],setSmoothing:[2,2,1,""],setThresholds:[2,2,1,""],setWeightCol:[2,2,1,""],smoothing:[2,3,1,""],thresholds:[2,3,1,""],weightCol:[2,3,1,""],write:[2,2,1,""]},"pyspark.ml.classification.NaiveBayesModel":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],numClasses:[2,3,1,""],numFeatures:[2,3,1,""],params:[2,3,1,""],pi:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],theta:[2,3,1,""],transform:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.classification.OneVsRest":{classifier:[2,3,1,""],copy:[2,2,1,""],expl
 ainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],featuresCol:[2,3,1,""],fit:[2,2,1,""],fitMultiple:[2,2,1,""],getClassifier:[2,2,1,""],getFeaturesCol:[2,2,1,""],getLabelCol:[2,2,1,""],getOrDefault:[2,2,1,""],getParallelism:[2,2,1,""],getParam:[2,2,1,""],getPredictionCol:[2,2,1,""],getWeightCol:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],labelCol:[2,3,1,""],load:[2,2,1,""],parallelism:[2,3,1,""],params:[2,3,1,""],predictionCol:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],setClassifier:[2,2,1,""],setFeaturesCol:[2,2,1,""],setLabelCol:[2,2,1,""],setParallelism:[2,2,1,""],setParams:[2,2,1,""],setPredictionCol:[2,2,1,""],setWeightCol:[2,2,1,""],weightCol:[2,3,1,""],write:[2,2,1,""]},"pyspark.ml.classification.OneVsRestModel":{classifier:[2,3,1,""],copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],featuresCol:[2,3,1,""],getClassifier:[2,2,1,""],getFeaturesCol:[2,2,1
 ,""],getLabelCol:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],getPredictionCol:[2,2,1,""],getWeightCol:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],labelCol:[2,3,1,""],load:[2,2,1,""],params:[2,3,1,""],predictionCol:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],setClassifier:[2,2,1,""],setFeaturesCol:[2,2,1,""],setLabelCol:[2,2,1,""],setPredictionCol:[2,2,1,""],setWeightCol:[2,2,1,""],transform:[2,2,1,""],weightCol:[2,3,1,""],write:[2,2,1,""]},"pyspark.ml.classification.RandomForestClassificationModel":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],featureImportances:[2,3,1,""],getNumTrees:[2,3,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],numClasses:[2,3,1,""],numFeatures:[2,3,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],toDebugString:[2,3,1,""],
 totalNumNodes:[2,3,1,""],transform:[2,2,1,""],treeWeights:[2,3,1,""],trees:[2,3,1,""],write:[2,2,1,""]},"pyspark.ml.classification.RandomForestClassifier":{cacheNodeIds:[2,3,1,""],checkpointInterval:[2,3,1,""],copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],featureSubsetStrategy:[2,3,1,""],featuresCol:[2,3,1,""],fit:[2,2,1,""],fitMultiple:[2,2,1,""],getCacheNodeIds:[2,2,1,""],getCheckpointInterval:[2,2,1,""],getFeatureSubsetStrategy:[2,2,1,""],getFeaturesCol:[2,2,1,""],getImpurity:[2,2,1,""],getLabelCol:[2,2,1,""],getMaxBins:[2,2,1,""],getMaxDepth:[2,2,1,""],getMaxMemoryInMB:[2,2,1,""],getMinInfoGain:[2,2,1,""],getMinInstancesPerNode:[2,2,1,""],getNumTrees:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],getPredictionCol:[2,2,1,""],getProbabilityCol:[2,2,1,""],getRawPredictionCol:[2,2,1,""],getSeed:[2,2,1,""],getSubsamplingRate:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],impurity:[2,3,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,
 ""],labelCol:[2,3,1,""],load:[2,2,1,""],maxBins:[2,3,1,""],maxDepth:[2,3,1,""],maxMemoryInMB:[2,3,1,""],minInfoGain:[2,3,1,""],minInstancesPerNode:[2,3,1,""],numTrees:[2,3,1,""],params:[2,3,1,""],predictionCol:[2,3,1,""],probabilityCol:[2,3,1,""],rawPredictionCol:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],seed:[2,3,1,""],set:[2,2,1,""],setCacheNodeIds:[2,2,1,""],setCheckpointInterval:[2,2,1,""],setFeatureSubsetStrategy:[2,2,1,""],setFeaturesCol:[2,2,1,""],setImpurity:[2,2,1,""],setLabelCol:[2,2,1,""],setMaxBins:[2,2,1,""],setMaxDepth:[2,2,1,""],setMaxMemoryInMB:[2,2,1,""],setMinInfoGain:[2,2,1,""],setMinInstancesPerNode:[2,2,1,""],setNumTrees:[2,2,1,""],setParams:[2,2,1,""],setPredictionCol:[2,2,1,""],setProbabilityCol:[2,2,1,""],setRawPredictionCol:[2,2,1,""],setSeed:[2,2,1,""],setSubsamplingRate:[2,2,1,""],subsamplingRate:[2,3,1,""],supportedFeatureSubsetStrategies:[2,3,1,""],supportedImpurities:[2,3,1,""],write:[2,2,1,""]},"pyspark.ml.clustering":{BisectingKMeans:[2,1,1,""],Bisec
 tingKMeansModel:[2,1,1,""],BisectingKMeansSummary:[2,1,1,""],DistributedLDAModel:[2,1,1,""],GaussianMixture:[2,1,1,""],GaussianMixtureModel:[2,1,1,""],GaussianMixtureSummary:[2,1,1,""],KMeans:[2,1,1,""],KMeansModel:[2,1,1,""],LDA:[2,1,1,""],LDAModel:[2,1,1,""],LocalLDAModel:[2,1,1,""]},"pyspark.ml.clustering.BisectingKMeans":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],featuresCol:[2,3,1,""],fit:[2,2,1,""],fitMultiple:[2,2,1,""],getFeaturesCol:[2,2,1,""],getK:[2,2,1,""],getMaxIter:[2,2,1,""],getMinDivisibleClusterSize:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],getPredictionCol:[2,2,1,""],getSeed:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],k:[2,3,1,""],load:[2,2,1,""],maxIter:[2,3,1,""],minDivisibleClusterSize:[2,3,1,""],params:[2,3,1,""],predictionCol:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],seed:[2,3,1,""],set:[2,2,1,""],setFeaturesCol:[2,2,1,""],setK:[2,2,1,""],setMaxIter:[
 2,2,1,""],setMinDivisibleClusterSize:[2,2,1,""],setParams:[2,2,1,""],setPredictionCol:[2,2,1,""],setSeed:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.clustering.BisectingKMeansModel":{clusterCenters:[2,2,1,""],computeCost:[2,2,1,""],copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],hasSummary:[2,3,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],summary:[2,3,1,""],transform:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.clustering.BisectingKMeansSummary":{cluster:[2,3,1,""],clusterSizes:[2,3,1,""],featuresCol:[2,3,1,""],k:[2,3,1,""],predictionCol:[2,3,1,""],predictions:[2,3,1,""]},"pyspark.ml.clustering.DistributedLDAModel":{copy:[2,2,1,""],describeTopics:[2,2,1,""],estimatedDocConcentration:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],getCheckpointFi
 les:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isDistributed:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],logLikelihood:[2,2,1,""],logPerplexity:[2,2,1,""],logPrior:[2,2,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],toLocal:[2,2,1,""],topicsMatrix:[2,2,1,""],trainingLogLikelihood:[2,2,1,""],transform:[2,2,1,""],vocabSize:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.clustering.GaussianMixture":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],featuresCol:[2,3,1,""],fit:[2,2,1,""],fitMultiple:[2,2,1,""],getFeaturesCol:[2,2,1,""],getK:[2,2,1,""],getMaxIter:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],getPredictionCol:[2,2,1,""],getProbabilityCol:[2,2,1,""],getSeed:[2,2,1,""],getTol:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],k:[2,3,1,""],load:[2,2,1,""],maxIter:[2,3,1,""],params:[2,3,1,""],pred
 ictionCol:[2,3,1,""],probabilityCol:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],seed:[2,3,1,""],set:[2,2,1,""],setFeaturesCol:[2,2,1,""],setK:[2,2,1,""],setMaxIter:[2,2,1,""],setParams:[2,2,1,""],setPredictionCol:[2,2,1,""],setProbabilityCol:[2,2,1,""],setSeed:[2,2,1,""],setTol:[2,2,1,""],tol:[2,3,1,""],write:[2,2,1,""]},"pyspark.ml.clustering.GaussianMixtureModel":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],gaussiansDF:[2,3,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],hasSummary:[2,3,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],summary:[2,3,1,""],transform:[2,2,1,""],weights:[2,3,1,""],write:[2,2,1,""]},"pyspark.ml.clustering.GaussianMixtureSummary":{cluster:[2,3,1,""],clusterSizes:[2,3,1,""],featuresCol:[2,3,1,""],k:[2,3,1,""],logLikelihood:[2,3,1,""],predictionCol:[2,3,1,""],predictions:[2,3,1,""],probabilit
 y:[2,3,1,""],probabilityCol:[2,3,1,""]},"pyspark.ml.clustering.KMeans":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],featuresCol:[2,3,1,""],fit:[2,2,1,""],fitMultiple:[2,2,1,""],getFeaturesCol:[2,2,1,""],getInitMode:[2,2,1,""],getInitSteps:[2,2,1,""],getK:[2,2,1,""],getMaxIter:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],getPredictionCol:[2,2,1,""],getSeed:[2,2,1,""],getTol:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],initMode:[2,3,1,""],initSteps:[2,3,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],k:[2,3,1,""],load:[2,2,1,""],maxIter:[2,3,1,""],params:[2,3,1,""],predictionCol:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],seed:[2,3,1,""],set:[2,2,1,""],setFeaturesCol:[2,2,1,""],setInitMode:[2,2,1,""],setInitSteps:[2,2,1,""],setK:[2,2,1,""],setMaxIter:[2,2,1,""],setParams:[2,2,1,""],setPredictionCol:[2,2,1,""],setSeed:[2,2,1,""],setTol:[2,2,1,""],tol:[2,3,1,""],write:[2,2,1,""]},"pyspark.ml.clustering.KMeansModel":{clusterCen
 ters:[2,2,1,""],computeCost:[2,2,1,""],copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],hasSummary:[2,3,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],summary:[2,3,1,""],transform:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.clustering.LDA":{checkpointInterval:[2,3,1,""],copy:[2,2,1,""],docConcentration:[2,3,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],featuresCol:[2,3,1,""],fit:[2,2,1,""],fitMultiple:[2,2,1,""],getCheckpointInterval:[2,2,1,""],getDocConcentration:[2,2,1,""],getFeaturesCol:[2,2,1,""],getK:[2,2,1,""],getKeepLastCheckpoint:[2,2,1,""],getLearningDecay:[2,2,1,""],getLearningOffset:[2,2,1,""],getMaxIter:[2,2,1,""],getOptimizeDocConcentration:[2,2,1,""],getOptimizer:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],getSeed:[2,2,1,"
 "],getSubsamplingRate:[2,2,1,""],getTopicConcentration:[2,2,1,""],getTopicDistributionCol:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],k:[2,3,1,""],keepLastCheckpoint:[2,3,1,""],learningDecay:[2,3,1,""],learningOffset:[2,3,1,""],load:[2,2,1,""],maxIter:[2,3,1,""],optimizeDocConcentration:[2,3,1,""],optimizer:[2,3,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],seed:[2,3,1,""],set:[2,2,1,""],setCheckpointInterval:[2,2,1,""],setDocConcentration:[2,2,1,""],setFeaturesCol:[2,2,1,""],setK:[2,2,1,""],setKeepLastCheckpoint:[2,2,1,""],setLearningDecay:[2,2,1,""],setLearningOffset:[2,2,1,""],setMaxIter:[2,2,1,""],setOptimizeDocConcentration:[2,2,1,""],setOptimizer:[2,2,1,""],setParams:[2,2,1,""],setSeed:[2,2,1,""],setSubsamplingRate:[2,2,1,""],setTopicConcentration:[2,2,1,""],setTopicDistributionCol:[2,2,1,""],subsamplingRate:[2,3,1,""],topicConcentration:[2,3,1,""],topicDistributionCol:[2,3,1,""],write:[2,2,1,""]},"pyspark.ml.clustering.
 LDAModel":{copy:[2,2,1,""],describeTopics:[2,2,1,""],estimatedDocConcentration:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isDistributed:[2,2,1,""],isSet:[2,2,1,""],logLikelihood:[2,2,1,""],logPerplexity:[2,2,1,""],params:[2,3,1,""],set:[2,2,1,""],topicsMatrix:[2,2,1,""],transform:[2,2,1,""],vocabSize:[2,2,1,""]},"pyspark.ml.clustering.LocalLDAModel":{copy:[2,2,1,""],describeTopics:[2,2,1,""],estimatedDocConcentration:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isDistributed:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],logLikelihood:[2,2,1,""],logPerplexity:[2,2,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],topicsMatrix:[2,2,1,""],transform:[2,2,1,""],vocabSize:[2,
 2,1,""],write:[2,2,1,""]},"pyspark.ml.evaluation":{BinaryClassificationEvaluator:[2,1,1,""],ClusteringEvaluator:[2,1,1,""],Evaluator:[2,1,1,""],MulticlassClassificationEvaluator:[2,1,1,""],RegressionEvaluator:[2,1,1,""]},"pyspark.ml.evaluation.BinaryClassificationEvaluator":{copy:[2,2,1,""],evaluate:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],getLabelCol:[2,2,1,""],getMetricName:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],getRawPredictionCol:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isLargerBetter:[2,2,1,""],isSet:[2,2,1,""],labelCol:[2,3,1,""],load:[2,2,1,""],metricName:[2,3,1,""],params:[2,3,1,""],rawPredictionCol:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],setLabelCol:[2,2,1,""],setMetricName:[2,2,1,""],setParams:[2,2,1,""],setRawPredictionCol:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.evaluation.ClusteringEvaluator":{copy:[2,2,1,""],evaluate:[2,2,1,""],explainParam:[2,2,1,""],explai
 nParams:[2,2,1,""],extractParamMap:[2,2,1,""],featuresCol:[2,3,1,""],getFeaturesCol:[2,2,1,""],getMetricName:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],getPredictionCol:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isLargerBetter:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],metricName:[2,3,1,""],params:[2,3,1,""],predictionCol:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],setFeaturesCol:[2,2,1,""],setMetricName:[2,2,1,""],setParams:[2,2,1,""],setPredictionCol:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.evaluation.Evaluator":{copy:[2,2,1,""],evaluate:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isLargerBetter:[2,2,1,""],isSet:[2,2,1,""],params:[2,3,1,""],set:[2,2,1,""]},"pyspark.ml.evaluation.MulticlassClassificationEvaluator":{copy:[2,2,1,""],evaluate:[2,2,1,""],explainParam:[2,2,1,""],exp
 lainParams:[2,2,1,""],extractParamMap:[2,2,1,""],getLabelCol:[2,2,1,""],getMetricName:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],getPredictionCol:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isLargerBetter:[2,2,1,""],isSet:[2,2,1,""],labelCol:[2,3,1,""],load:[2,2,1,""],metricName:[2,3,1,""],params:[2,3,1,""],predictionCol:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],setLabelCol:[2,2,1,""],setMetricName:[2,2,1,""],setParams:[2,2,1,""],setPredictionCol:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.evaluation.RegressionEvaluator":{copy:[2,2,1,""],evaluate:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],getLabelCol:[2,2,1,""],getMetricName:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],getPredictionCol:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isLargerBetter:[2,2,1,""],isSet:[2,2,1,""],labelCol:[2,3,1,""],load:[2,2,1,""],metricName:[2,3,1,""],params:[2,3,1,""],
 predictionCol:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],setLabelCol:[2,2,1,""],setMetricName:[2,2,1,""],setParams:[2,2,1,""],setPredictionCol:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.feature":{Binarizer:[2,1,1,""],BucketedRandomProjectionLSH:[2,1,1,""],BucketedRandomProjectionLSHModel:[2,1,1,""],Bucketizer:[2,1,1,""],ChiSqSelector:[2,1,1,""],ChiSqSelectorModel:[2,1,1,""],CountVectorizer:[2,1,1,""],CountVectorizerModel:[2,1,1,""],DCT:[2,1,1,""],ElementwiseProduct:[2,1,1,""],FeatureHasher:[2,1,1,""],HashingTF:[2,1,1,""],IDF:[2,1,1,""],IDFModel:[2,1,1,""],Imputer:[2,1,1,""],ImputerModel:[2,1,1,""],IndexToString:[2,1,1,""],MaxAbsScaler:[2,1,1,""],MaxAbsScalerModel:[2,1,1,""],MinHashLSH:[2,1,1,""],MinHashLSHModel:[2,1,1,""],MinMaxScaler:[2,1,1,""],MinMaxScalerModel:[2,1,1,""],NGram:[2,1,1,""],Normalizer:[2,1,1,""],OneHotEncoder:[2,1,1,""],OneHotEncoderEstimator:[2,1,1,""],OneHotEncoderModel:[2,1,1,""],PCA:[2,1,1,""],PCAModel:[2,1,1,""],PolynomialExpansion:[2,1,1,""],Quant
 ileDiscretizer:[2,1,1,""],RFormula:[2,1,1,""],RFormulaModel:[2,1,1,""],RegexTokenizer:[2,1,1,""],SQLTransformer:[2,1,1,""],StandardScaler:[2,1,1,""],StandardScalerModel:[2,1,1,""],StopWordsRemover:[2,1,1,""],StringIndexer:[2,1,1,""],StringIndexerModel:[2,1,1,""],Tokenizer:[2,1,1,""],VectorAssembler:[2,1,1,""],VectorIndexer:[2,1,1,""],VectorIndexerModel:[2,1,1,""],VectorSizeHint:[2,1,1,""],VectorSlicer:[2,1,1,""],Word2Vec:[2,1,1,""],Word2VecModel:[2,1,1,""]},"pyspark.ml.feature.Binarizer":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],getInputCol:[2,2,1,""],getOrDefault:[2,2,1,""],getOutputCol:[2,2,1,""],getParam:[2,2,1,""],getThreshold:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],inputCol:[2,3,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],outputCol:[2,3,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],setInputCol:[2,2,1,""],setOutputCol:[2,2,1,""],setParams:[2,2,1,""],setThreshold:[2,2,1,""],th
 reshold:[2,3,1,""],transform:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.feature.BucketedRandomProjectionLSH":{bucketLength:[2,3,1,""],copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],fit:[2,2,1,""],fitMultiple:[2,2,1,""],getBucketLength:[2,2,1,""],getInputCol:[2,2,1,""],getNumHashTables:[2,2,1,""],getOrDefault:[2,2,1,""],getOutputCol:[2,2,1,""],getParam:[2,2,1,""],getSeed:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],inputCol:[2,3,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],numHashTables:[2,3,1,""],outputCol:[2,3,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],seed:[2,3,1,""],set:[2,2,1,""],setBucketLength:[2,2,1,""],setInputCol:[2,2,1,""],setNumHashTables:[2,2,1,""],setOutputCol:[2,2,1,""],setParams:[2,2,1,""],setSeed:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.feature.BucketedRandomProjectionLSHModel":{approxNearestNeighbors:[2,2,1,""],approxSimilarityJoin:[2,2,1,""],copy:[2,2,1,""],explainParam:[2,2,1,""],explai
 nParams:[2,2,1,""],extractParamMap:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],transform:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.feature.Bucketizer":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],getHandleInvalid:[2,2,1,""],getInputCol:[2,2,1,""],getOrDefault:[2,2,1,""],getOutputCol:[2,2,1,""],getParam:[2,2,1,""],getSplits:[2,2,1,""],handleInvalid:[2,3,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],inputCol:[2,3,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],outputCol:[2,3,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],setHandleInvalid:[2,2,1,""],setInputCol:[2,2,1,""],setOutputCol:[2,2,1,""],setParams:[2,2,1,""],setSplits:[2,2,1,""],splits:[2,3,1,""],transform:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.feature.ChiSqSelector":{copy:
 [2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],fdr:[2,3,1,""],featuresCol:[2,3,1,""],fit:[2,2,1,""],fitMultiple:[2,2,1,""],fpr:[2,3,1,""],fwe:[2,3,1,""],getFdr:[2,2,1,""],getFeaturesCol:[2,2,1,""],getFpr:[2,2,1,""],getFwe:[2,2,1,""],getLabelCol:[2,2,1,""],getNumTopFeatures:[2,2,1,""],getOrDefault:[2,2,1,""],getOutputCol:[2,2,1,""],getParam:[2,2,1,""],getPercentile:[2,2,1,""],getSelectorType:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],labelCol:[2,3,1,""],load:[2,2,1,""],numTopFeatures:[2,3,1,""],outputCol:[2,3,1,""],params:[2,3,1,""],percentile:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],selectorType:[2,3,1,""],set:[2,2,1,""],setFdr:[2,2,1,""],setFeaturesCol:[2,2,1,""],setFpr:[2,2,1,""],setFwe:[2,2,1,""],setLabelCol:[2,2,1,""],setNumTopFeatures:[2,2,1,""],setOutputCol:[2,2,1,""],setParams:[2,2,1,""],setPercentile:[2,2,1,""],setSelectorType:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.feature.ChiSqSelect
 orModel":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],selectedFeatures:[2,3,1,""],set:[2,2,1,""],transform:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.feature.CountVectorizer":{binary:[2,3,1,""],copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],fit:[2,2,1,""],fitMultiple:[2,2,1,""],getBinary:[2,2,1,""],getInputCol:[2,2,1,""],getMinDF:[2,2,1,""],getMinTF:[2,2,1,""],getOrDefault:[2,2,1,""],getOutputCol:[2,2,1,""],getParam:[2,2,1,""],getVocabSize:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],inputCol:[2,3,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],minDF:[2,3,1,""],minTF:[2,3,1,""],outputCol:[2,3,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],setBinary:[2,2,1,""],setI
 nputCol:[2,2,1,""],setMinDF:[2,2,1,""],setMinTF:[2,2,1,""],setOutputCol:[2,2,1,""],setParams:[2,2,1,""],setVocabSize:[2,2,1,""],vocabSize:[2,3,1,""],write:[2,2,1,""]},"pyspark.ml.feature.CountVectorizerModel":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],transform:[2,2,1,""],vocabulary:[2,3,1,""],write:[2,2,1,""]},"pyspark.ml.feature.DCT":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],getInputCol:[2,2,1,""],getInverse:[2,2,1,""],getOrDefault:[2,2,1,""],getOutputCol:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],inputCol:[2,3,1,""],inverse:[2,3,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],outputCol:[2,3,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,
 2,1,""],set:[2,2,1,""],setInputCol:[2,2,1,""],setInverse:[2,2,1,""],setOutputCol:[2,2,1,""],setParams:[2,2,1,""],transform:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.feature.ElementwiseProduct":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],getInputCol:[2,2,1,""],getOrDefault:[2,2,1,""],getOutputCol:[2,2,1,""],getParam:[2,2,1,""],getScalingVec:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],inputCol:[2,3,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],outputCol:[2,3,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],scalingVec:[2,3,1,""],set:[2,2,1,""],setInputCol:[2,2,1,""],setOutputCol:[2,2,1,""],setParams:[2,2,1,""],setScalingVec:[2,2,1,""],transform:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.feature.FeatureHasher":{categoricalCols:[2,3,1,""],copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],getCategoricalCols:[2,2,1,""],getInputCols:[2,2,1,""],getNumFeatures:[2,2,1,""],getOr
 Default:[2,2,1,""],getOutputCol:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],inputCols:[2,3,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],numFeatures:[2,3,1,""],outputCol:[2,3,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],setCategoricalCols:[2,2,1,""],setInputCols:[2,2,1,""],setNumFeatures:[2,2,1,""],setOutputCol:[2,2,1,""],setParams:[2,2,1,""],transform:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.feature.HashingTF":{binary:[2,3,1,""],copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],getBinary:[2,2,1,""],getInputCol:[2,2,1,""],getNumFeatures:[2,2,1,""],getOrDefault:[2,2,1,""],getOutputCol:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],inputCol:[2,3,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],numFeatures:[2,3,1,""],outputCol:[2,3,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],setBinary:[2,2,1,""],setInputCol:[2,2,1
 ,""],setNumFeatures:[2,2,1,""],setOutputCol:[2,2,1,""],setParams:[2,2,1,""],transform:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.feature.IDF":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],fit:[2,2,1,""],fitMultiple:[2,2,1,""],getInputCol:[2,2,1,""],getMinDocFreq:[2,2,1,""],getOrDefault:[2,2,1,""],getOutputCol:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],inputCol:[2,3,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],minDocFreq:[2,3,1,""],outputCol:[2,3,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],setInputCol:[2,2,1,""],setMinDocFreq:[2,2,1,""],setOutputCol:[2,2,1,""],setParams:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.feature.IDFModel":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],idf:[2,3,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,"
 "],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],transform:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.feature.Imputer":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],fit:[2,2,1,""],fitMultiple:[2,2,1,""],getInputCols:[2,2,1,""],getMissingValue:[2,2,1,""],getOrDefault:[2,2,1,""],getOutputCols:[2,2,1,""],getParam:[2,2,1,""],getStrategy:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],inputCols:[2,3,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],missingValue:[2,3,1,""],outputCols:[2,3,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],setInputCols:[2,2,1,""],setMissingValue:[2,2,1,""],setOutputCols:[2,2,1,""],setParams:[2,2,1,""],setStrategy:[2,2,1,""],strategy:[2,3,1,""],write:[2,2,1,""]},"pyspark.ml.feature.ImputerModel":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam
 :[2,2,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],surrogateDF:[2,3,1,""],transform:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.feature.IndexToString":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],getInputCol:[2,2,1,""],getLabels:[2,2,1,""],getOrDefault:[2,2,1,""],getOutputCol:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],inputCol:[2,3,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],labels:[2,3,1,""],load:[2,2,1,""],outputCol:[2,3,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],setInputCol:[2,2,1,""],setLabels:[2,2,1,""],setOutputCol:[2,2,1,""],setParams:[2,2,1,""],transform:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.feature.MaxAbsScaler":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],fit:[2,2,1,""],fitMultiple:[2,2,1,""],getInputCol:[2,2,1,""],getOrDefault:[2,2,1,""],getO
 utputCol:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],inputCol:[2,3,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],outputCol:[2,3,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],setInputCol:[2,2,1,""],setOutputCol:[2,2,1,""],setParams:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.feature.MaxAbsScalerModel":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],maxAbs:[2,3,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],transform:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.feature.MinHashLSH":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],fit:[2,2,1,""],fitMultiple:[2,2,1,""],getInputCol:[2,2,1,""],getNumHashTables:[2,2,1,""],getOrDefault:[2,2,1,""],getOutputCol:[2,2,1,""],getParam:[2,2
 ,1,""],getSeed:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],inputCol:[2,3,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],numHashTables:[2,3,1,""],outputCol:[2,3,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],seed:[2,3,1,""],set:[2,2,1,""],setInputCol:[2,2,1,""],setNumHashTables:[2,2,1,""],setOutputCol:[2,2,1,""],setParams:[2,2,1,""],setSeed:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.feature.MinHashLSHModel":{approxNearestNeighbors:[2,2,1,""],approxSimilarityJoin:[2,2,1,""],copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],transform:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.feature.MinMaxScaler":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],fit:[2,2,1,""],fitMultiple:[
 2,2,1,""],getInputCol:[2,2,1,""],getMax:[2,2,1,""],getMin:[2,2,1,""],getOrDefault:[2,2,1,""],getOutputCol:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],inputCol:[2,3,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],max:[2,3,1,""],min:[2,3,1,""],outputCol:[2,3,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],setInputCol:[2,2,1,""],setMax:[2,2,1,""],setMin:[2,2,1,""],setOutputCol:[2,2,1,""],setParams:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.feature.MinMaxScalerModel":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],originalMax:[2,3,1,""],originalMin:[2,3,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],transform:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.feature.NGram":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2
 ,2,1,""],extractParamMap:[2,2,1,""],getInputCol:[2,2,1,""],getN:[2,2,1,""],getOrDefault:[2,2,1,""],getOutputCol:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],inputCol:[2,3,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],n:[2,3,1,""],outputCol:[2,3,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],setInputCol:[2,2,1,""],setN:[2,2,1,""],setOutputCol:[2,2,1,""],setParams:[2,2,1,""],transform:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.feature.Normalizer":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],getInputCol:[2,2,1,""],getOrDefault:[2,2,1,""],getOutputCol:[2,2,1,""],getP:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],inputCol:[2,3,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],outputCol:[2,3,1,""],p:[2,3,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],setInputCol:[2,2,1,""],setOutputCol:[2,2,1,""],setP:[2,2,1,""],setPar
 ams:[2,2,1,""],transform:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.feature.OneHotEncoder":{copy:[2,2,1,""],dropLast:[2,3,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],getDropLast:[2,2,1,""],getInputCol:[2,2,1,""],getOrDefault:[2,2,1,""],getOutputCol:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],inputCol:[2,3,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],outputCol:[2,3,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],setDropLast:[2,2,1,""],setInputCol:[2,2,1,""],setOutputCol:[2,2,1,""],setParams:[2,2,1,""],transform:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.feature.OneHotEncoderEstimator":{copy:[2,2,1,""],dropLast:[2,3,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],fit:[2,2,1,""],fitMultiple:[2,2,1,""],getDropLast:[2,2,1,""],getHandleInvalid:[2,2,1,""],getInputCols:[2,2,1,""],getOrDefault:[2,2,1,""],getOutputCols:[2,2,1,""],getParam:[2,2,1,""],handleInva
 lid:[2,3,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],inputCols:[2,3,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],outputCols:[2,3,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],setDropLast:[2,2,1,""],setHandleInvalid:[2,2,1,""],setInputCols:[2,2,1,""],setOutputCols:[2,2,1,""],setParams:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.feature.OneHotEncoderModel":{categorySizes:[2,3,1,""],copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],transform:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.feature.PCA":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],fit:[2,2,1,""],fitMultiple:[2,2,1,""],getInputCol:[2,2,1,""],getK:[2,2,1,""],getOrDefault:[2,2,1,""],getOutputCol:[2,2,
 1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],inputCol:[2,3,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],k:[2,3,1,""],load:[2,2,1,""],outputCol:[2,3,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],setInputCol:[2,2,1,""],setK:[2,2,1,""],setOutputCol:[2,2,1,""],setParams:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.feature.PCAModel":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],explainedVariance:[2,3,1,""],extractParamMap:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],params:[2,3,1,""],pc:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],transform:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.feature.PolynomialExpansion":{copy:[2,2,1,""],degree:[2,3,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],getDegree:[2,2,1,""],getInputCol:[2,2,1,""],getOrDefault:[2,2,1,""],getOutputCol:[2,2,1,""],
 getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],inputCol:[2,3,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],outputCol:[2,3,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],setDegree:[2,2,1,""],setInputCol:[2,2,1,""],setOutputCol:[2,2,1,""],setParams:[2,2,1,""],transform:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.feature.QuantileDiscretizer":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],fit:[2,2,1,""],fitMultiple:[2,2,1,""],getHandleInvalid:[2,2,1,""],getInputCol:[2,2,1,""],getNumBuckets:[2,2,1,""],getOrDefault:[2,2,1,""],getOutputCol:[2,2,1,""],getParam:[2,2,1,""],getRelativeError:[2,2,1,""],handleInvalid:[2,3,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],inputCol:[2,3,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],numBuckets:[2,3,1,""],outputCol:[2,3,1,""],params:[2,3,1,""],read:[2,2,1,""],relativeError:[2,3,1,""],save:[2,2,1,""],set:[2,2,1,""],setHandleInvalid:[2,2,1,""],s
 etInputCol:[2,2,1,""],setNumBuckets:[2,2,1,""],setOutputCol:[2,2,1,""],setParams:[2,2,1,""],setRelativeError:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.feature.RFormula":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],featuresCol:[2,3,1,""],fit:[2,2,1,""],fitMultiple:[2,2,1,""],forceIndexLabel:[2,3,1,""],formula:[2,3,1,""],getFeaturesCol:[2,2,1,""],getForceIndexLabel:[2,2,1,""],getFormula:[2,2,1,""],getHandleInvalid:[2,2,1,""],getLabelCol:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],getStringIndexerOrderType:[2,2,1,""],handleInvalid:[2,3,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],labelCol:[2,3,1,""],load:[2,2,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],setFeaturesCol:[2,2,1,""],setForceIndexLabel:[2,2,1,""],setFormula:[2,2,1,""],setHandleInvalid:[2,2,1,""],setLabelCol:[2,2,1,""],setParams:[2,2,1,""],setStringIndexerOrderType:[2,2,1,""],stringIndexerOrderType:[2,
 3,1,""],write:[2,2,1,""]},"pyspark.ml.feature.RFormulaModel":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],transform:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.feature.RegexTokenizer":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],gaps:[2,3,1,""],getGaps:[2,2,1,""],getInputCol:[2,2,1,""],getMinTokenLength:[2,2,1,""],getOrDefault:[2,2,1,""],getOutputCol:[2,2,1,""],getParam:[2,2,1,""],getPattern:[2,2,1,""],getToLowercase:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],inputCol:[2,3,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],minTokenLength:[2,3,1,""],outputCol:[2,3,1,""],params:[2,3,1,""],pattern:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],setGaps:[2,2,1,""],s
 etInputCol:[2,2,1,""],setMinTokenLength:[2,2,1,""],setOutputCol:[2,2,1,""],setParams:[2,2,1,""],setPattern:[2,2,1,""],setToLowercase:[2,2,1,""],toLowercase:[2,3,1,""],transform:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.feature.SQLTransformer":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],getStatement:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],setParams:[2,2,1,""],setStatement:[2,2,1,""],statement:[2,3,1,""],transform:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.feature.StandardScaler":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],fit:[2,2,1,""],fitMultiple:[2,2,1,""],getInputCol:[2,2,1,""],getOrDefault:[2,2,1,""],getOutputCol:[2,2,1,""],getParam:[2,2,1,""],getWithMean:[2,2,1,""],getWithStd:[2,2,1,""],hasDefault:[2,2,1,""],has
 Param:[2,2,1,""],inputCol:[2,3,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],outputCol:[2,3,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],setInputCol:[2,2,1,""],setOutputCol:[2,2,1,""],setParams:[2,2,1,""],setWithMean:[2,2,1,""],setWithStd:[2,2,1,""],withMean:[2,3,1,""],withStd:[2,3,1,""],write:[2,2,1,""]},"pyspark.ml.feature.StandardScalerModel":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],mean:[2,3,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],std:[2,3,1,""],transform:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.feature.StopWordsRemover":{caseSensitive:[2,3,1,""],copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],getCaseSensitive:[2,2,1,""],getInputCol:[2,2,1,""],getOrDefault:[2,2,1,""],getOutputC
 ol:[2,2,1,""],getParam:[2,2,1,""],getStopWords:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],inputCol:[2,3,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],loadDefaultStopWords:[2,5,1,""],outputCol:[2,3,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],setCaseSensitive:[2,2,1,""],setInputCol:[2,2,1,""],setOutputCol:[2,2,1,""],setParams:[2,2,1,""],setStopWords:[2,2,1,""],stopWords:[2,3,1,""],transform:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.feature.StringIndexer":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],fit:[2,2,1,""],fitMultiple:[2,2,1,""],getHandleInvalid:[2,2,1,""],getInputCol:[2,2,1,""],getOrDefault:[2,2,1,""],getOutputCol:[2,2,1,""],getParam:[2,2,1,""],getStringOrderType:[2,2,1,""],handleInvalid:[2,3,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],inputCol:[2,3,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],outputCol:[2,3,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""
 ],set:[2,2,1,""],setHandleInvalid:[2,2,1,""],setInputCol:[2,2,1,""],setOutputCol:[2,2,1,""],setParams:[2,2,1,""],setStringOrderType:[2,2,1,""],stringOrderType:[2,3,1,""],write:[2,2,1,""]},"pyspark.ml.feature.StringIndexerModel":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],labels:[2,3,1,""],load:[2,2,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],transform:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.feature.Tokenizer":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],getInputCol:[2,2,1,""],getOrDefault:[2,2,1,""],getOutputCol:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],inputCol:[2,3,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],outputCol:[2,3,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,"
 "],setInputCol:[2,2,1,""],setOutputCol:[2,2,1,""],setParams:[2,2,1,""],transform:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.feature.VectorAssembler":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],getInputCols:[2,2,1,""],getOrDefault:[2,2,1,""],getOutputCol:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],inputCols:[2,3,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],outputCol:[2,3,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],setInputCols:[2,2,1,""],setOutputCol:[2,2,1,""],setParams:[2,2,1,""],transform:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.feature.VectorIndexer":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],fit:[2,2,1,""],fitMultiple:[2,2,1,""],getHandleInvalid:[2,2,1,""],getInputCol:[2,2,1,""],getMaxCategories:[2,2,1,""],getOrDefault:[2,2,1,""],getOutputCol:[2,2,1,""],getParam:[2,2,1,""],handleInvalid:[2,3,1,""],hasDefault:[2,2,
 1,""],hasParam:[2,2,1,""],inputCol:[2,3,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],maxCategories:[2,3,1,""],outputCol:[2,3,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],setHandleInvalid:[2,2,1,""],setInputCol:[2,2,1,""],setMaxCategories:[2,2,1,""],setOutputCol:[2,2,1,""],setParams:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.feature.VectorIndexerModel":{categoryMaps:[2,3,1,""],copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],numFeatures:[2,3,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],transform:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.feature.VectorSizeHint":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],getHandleInvalid:[2,2,1,""],getInputCol:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,
 ""],getSize:[2,2,1,""],handleInvalid:[2,3,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],inputCol:[2,3,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],setHandleInvalid:[2,2,1,""],setInputCol:[2,2,1,""],setParams:[2,2,1,""],setSize:[2,2,1,""],size:[2,3,1,""],transform:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.feature.VectorSlicer":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],getIndices:[2,2,1,""],getInputCol:[2,2,1,""],getNames:[2,2,1,""],getOrDefault:[2,2,1,""],getOutputCol:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],indices:[2,3,1,""],inputCol:[2,3,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],names:[2,3,1,""],outputCol:[2,3,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],setIndices:[2,2,1,""],setInputCol:[2,2,1,""],setNames:[2,2,1,""],setOutputCol:[2,2,1,""],setParams:[2,2,1,""],transform:[2,2
 ,1,""],write:[2,2,1,""]},"pyspark.ml.feature.Word2Vec":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],fit:[2,2,1,""],fitMultiple:[2,2,1,""],getInputCol:[2,2,1,""],getMaxIter:[2,2,1,""],getMaxSentenceLength:[2,2,1,""],getMinCount:[2,2,1,""],getNumPartitions:[2,2,1,""],getOrDefault:[2,2,1,""],getOutputCol:[2,2,1,""],getParam:[2,2,1,""],getSeed:[2,2,1,""],getStepSize:[2,2,1,""],getVectorSize:[2,2,1,""],getWindowSize:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],inputCol:[2,3,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],maxIter:[2,3,1,""],maxSentenceLength:[2,3,1,""],minCount:[2,3,1,""],numPartitions:[2,3,1,""],outputCol:[2,3,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],seed:[2,3,1,""],set:[2,2,1,""],setInputCol:[2,2,1,""],setMaxIter:[2,2,1,""],setMaxSentenceLength:[2,2,1,""],setMinCount:[2,2,1,""],setNumPartitions:[2,2,1,""],setOutputCol:[2,2,1,""],setParams:[2,2,1,""],setSeed:[2,2,1,""],setStepSize:[2,2,1,"
 "],setVectorSize:[2,2,1,""],setWindowSize:[2,2,1,""],stepSize:[2,3,1,""],vectorSize:[2,3,1,""],windowSize:[2,3,1,""],write:[2,2,1,""]},"pyspark.ml.feature.Word2VecModel":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],findSynonyms:[2,2,1,""],findSynonymsArray:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],getVectors:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],transform:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.fpm":{FPGrowth:[2,1,1,""],FPGrowthModel:[2,1,1,""]},"pyspark.ml.fpm.FPGrowth":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],fit:[2,2,1,""],fitMultiple:[2,2,1,""],getItemsCol:[2,2,1,""],getMinConfidence:[2,2,1,""],getMinSupport:[2,2,1,""],getNumPartitions:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],getPredictionCol:[2,2,1,""],hasDefault:[2,2,1,
 ""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],itemsCol:[2,3,1,""],load:[2,2,1,""],minConfidence:[2,3,1,""],minSupport:[2,3,1,""],numPartitions:[2,3,1,""],params:[2,3,1,""],predictionCol:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],setItemsCol:[2,2,1,""],setMinConfidence:[2,2,1,""],setMinSupport:[2,2,1,""],setNumPartitions:[2,2,1,""],setParams:[2,2,1,""],setPredictionCol:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.fpm.FPGrowthModel":{associationRules:[2,3,1,""],copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],freqItemsets:[2,3,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],transform:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.image":{ImageSchema:[2,3,1,""],_ImageSchema:[2,1,1,""]},"pyspark.ml.image._ImageSchema":{imageFields:[2,3,1,""],imageSchema:[2,3,1,""]
 ,ocvTypes:[2,3,1,""],readImages:[2,2,1,""],toImage:[2,2,1,""],toNDArray:[2,2,1,""],undefinedImageType:[2,3,1,""]},"pyspark.ml.linalg":{DenseMatrix:[2,1,1,""],DenseVector:[2,1,1,""],Matrices:[2,1,1,""],Matrix:[2,1,1,""],SparseMatrix:[2,1,1,""],SparseVector:[2,1,1,""],Vector:[2,1,1,""],Vectors:[2,1,1,""]},"pyspark.ml.linalg.DenseMatrix":{toArray:[2,2,1,""],toSparse:[2,2,1,""]},"pyspark.ml.linalg.DenseVector":{dot:[2,2,1,""],norm:[2,2,1,""],numNonzeros:[2,2,1,""],squared_distance:[2,2,1,""],toArray:[2,2,1,""],values:[2,3,1,""]},"pyspark.ml.linalg.Matrices":{dense:[2,5,1,""],sparse:[2,5,1,""]},"pyspark.ml.linalg.Matrix":{toArray:[2,2,1,""]},"pyspark.ml.linalg.SparseMatrix":{toArray:[2,2,1,""],toDense:[2,2,1,""]},"pyspark.ml.linalg.SparseVector":{dot:[2,2,1,""],indices:[2,3,1,""],norm:[2,2,1,""],numNonzeros:[2,2,1,""],size:[2,3,1,""],squared_distance:[2,2,1,""],toArray:[2,2,1,""],values:[2,3,1,""]},"pyspark.ml.linalg.Vector":{toArray:[2,2,1,""]},"pyspark.ml.linalg.Vectors":{dense:[2,5,1,
 ""],norm:[2,5,1,""],sparse:[2,5,1,""],squared_distance:[2,5,1,""],zeros:[2,5,1,""]},"pyspark.ml.param":{Param:[2,1,1,""],Params:[2,1,1,""],TypeConverters:[2,1,1,""]},"pyspark.ml.param.Params":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],params:[2,3,1,""],set:[2,2,1,""]},"pyspark.ml.param.TypeConverters":{identity:[2,5,1,""],toBoolean:[2,5,1,""],toFloat:[2,5,1,""],toInt:[2,5,1,""],toList:[2,5,1,""],toListFloat:[2,5,1,""],toListInt:[2,5,1,""],toListString:[2,5,1,""],toMatrix:[2,5,1,""],toString:[2,5,1,""],toVector:[2,5,1,""]},"pyspark.ml.recommendation":{ALS:[2,1,1,""],ALSModel:[2,1,1,""]},"pyspark.ml.recommendation.ALS":{alpha:[2,3,1,""],checkpointInterval:[2,3,1,""],coldStartStrategy:[2,3,1,""],copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],finalStorageLevel:[2,3,1,""],
 fit:[2,2,1,""],fitMultiple:[2,2,1,""],getAlpha:[2,2,1,""],getCheckpointInterval:[2,2,1,""],getColdStartStrategy:[2,2,1,""],getFinalStorageLevel:[2,2,1,""],getImplicitPrefs:[2,2,1,""],getIntermediateStorageLevel:[2,2,1,""],getItemCol:[2,2,1,""],getMaxIter:[2,2,1,""],getNonnegative:[2,2,1,""],getNumItemBlocks:[2,2,1,""],getNumUserBlocks:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],getPredictionCol:[2,2,1,""],getRank:[2,2,1,""],getRatingCol:[2,2,1,""],getRegParam:[2,2,1,""],getSeed:[2,2,1,""],getUserCol:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],implicitPrefs:[2,3,1,""],intermediateStorageLevel:[2,3,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],itemCol:[2,3,1,""],load:[2,2,1,""],maxIter:[2,3,1,""],nonnegative:[2,3,1,""],numItemBlocks:[2,3,1,""],numUserBlocks:[2,3,1,""],params:[2,3,1,""],predictionCol:[2,3,1,""],rank:[2,3,1,""],ratingCol:[2,3,1,""],read:[2,2,1,""],regParam:[2,3,1,""],save:[2,2,1,""],seed:[2,3,1,""],set:[2,2,1,""],setAlpha:[2,2,1,""],setCheckpointInterv
 al:[2,2,1,""],setColdStartStrategy:[2,2,1,""],setFinalStorageLevel:[2,2,1,""],setImplicitPrefs:[2,2,1,""],setIntermediateStorageLevel:[2,2,1,""],setItemCol:[2,2,1,""],setMaxIter:[2,2,1,""],setNonnegative:[2,2,1,""],setNumBlocks:[2,2,1,""],setNumItemBlocks:[2,2,1,""],setNumUserBlocks:[2,2,1,""],setParams:[2,2,1,""],setPredictionCol:[2,2,1,""],setRank:[2,2,1,""],setRatingCol:[2,2,1,""],setRegParam:[2,2,1,""],setSeed:[2,2,1,""],setUserCol:[2,2,1,""],userCol:[2,3,1,""],write:[2,2,1,""]},"pyspark.ml.recommendation.ALSModel":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],itemFactors:[2,3,1,""],load:[2,2,1,""],params:[2,3,1,""],rank:[2,3,1,""],read:[2,2,1,""],recommendForAllItems:[2,2,1,""],recommendForAllUsers:[2,2,1,""],recommendForItemSubset:[2,2,1,""],recommendForUserSubset:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],tra
 nsform:[2,2,1,""],userFactors:[2,3,1,""],write:[2,2,1,""]},"pyspark.ml.regression":{AFTSurvivalRegression:[2,1,1,""],AFTSurvivalRegressionModel:[2,1,1,""],DecisionTreeRegressionModel:[2,1,1,""],DecisionTreeRegressor:[2,1,1,""],GBTRegressionModel:[2,1,1,""],GBTRegressor:[2,1,1,""],GeneralizedLinearRegression:[2,1,1,""],GeneralizedLinearRegressionModel:[2,1,1,""],GeneralizedLinearRegressionSummary:[2,1,1,""],GeneralizedLinearRegressionTrainingSummary:[2,1,1,""],IsotonicRegression:[2,1,1,""],IsotonicRegressionModel:[2,1,1,""],LinearRegression:[2,1,1,""],LinearRegressionModel:[2,1,1,""],LinearRegressionSummary:[2,1,1,""],LinearRegressionTrainingSummary:[2,1,1,""],RandomForestRegressionModel:[2,1,1,""],RandomForestRegressor:[2,1,1,""]},"pyspark.ml.regression.AFTSurvivalRegression":{aggregationDepth:[2,3,1,""],censorCol:[2,3,1,""],copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],featuresCol:[2,3,1,""],fit:[2,2,1,""],fitIntercept:[2,3,1,""],fitMul
 tiple:[2,2,1,""],getAggregationDepth:[2,2,1,""],getCensorCol:[2,2,1,""],getFeaturesCol:[2,2,1,""],getFitIntercept:[2,2,1,""],getLabelCol:[2,2,1,""],getMaxIter:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],getPredictionCol:[2,2,1,""],getQuantileProbabilities:[2,2,1,""],getQuantilesCol:[2,2,1,""],getTol:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],labelCol:[2,3,1,""],load:[2,2,1,""],maxIter:[2,3,1,""],params:[2,3,1,""],predictionCol:[2,3,1,""],quantileProbabilities:[2,3,1,""],quantilesCol:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],setAggregationDepth:[2,2,1,""],setCensorCol:[2,2,1,""],setFeaturesCol:[2,2,1,""],setFitIntercept:[2,2,1,""],setLabelCol:[2,2,1,""],setMaxIter:[2,2,1,""],setParams:[2,2,1,""],setPredictionCol:[2,2,1,""],setQuantileProbabilities:[2,2,1,""],setQuantilesCol:[2,2,1,""],setTol:[2,2,1,""],tol:[2,3,1,""],write:[2,2,1,""]},"pyspark.ml.regression.AFTSurvivalRegressionModel":{coefficients:[2,3,1,""],
 copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],intercept:[2,3,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],params:[2,3,1,""],predict:[2,2,1,""],predictQuantiles:[2,2,1,""],read:[2,2,1,""],save:[2,2,1,""],scale:[2,3,1,""],set:[2,2,1,""],transform:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.regression.DecisionTreeRegressionModel":{copy:[2,2,1,""],depth:[2,3,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],featureImportances:[2,3,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],numFeatures:[2,3,1,""],numNodes:[2,3,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],toDebugString:[2,3,1,""],transform:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.regression.DecisionTreeRegressor":{cacheNodeIds:[2,3,1,""],c
 heckpointInterval:[2,3,1,""],copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],featuresCol:[2,3,1,""],fit:[2,2,1,""],fitMultiple:[2,2,1,""],getCacheNodeIds:[2,2,1,""],getCheckpointInterval:[2,2,1,""],getFeaturesCol:[2,2,1,""],getImpurity:[2,2,1,""],getLabelCol:[2,2,1,""],getMaxBins:[2,2,1,""],getMaxDepth:[2,2,1,""],getMaxMemoryInMB:[2,2,1,""],getMinInfoGain:[2,2,1,""],getMinInstancesPerNode:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],getPredictionCol:[2,2,1,""],getSeed:[2,2,1,""],getVarianceCol:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],impurity:[2,3,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],labelCol:[2,3,1,""],load:[2,2,1,""],maxBins:[2,3,1,""],maxDepth:[2,3,1,""],maxMemoryInMB:[2,3,1,""],minInfoGain:[2,3,1,""],minInstancesPerNode:[2,3,1,""],params:[2,3,1,""],predictionCol:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],seed:[2,3,1,""],set:[2,2,1,""],setCacheNodeIds:[2,2,1,""],setCheckpointInterval:[2,2,1,""],setFeaturesCo
 l:[2,2,1,""],setImpurity:[2,2,1,""],setLabelCol:[2,2,1,""],setMaxBins:[2,2,1,""],setMaxDepth:[2,2,1,""],setMaxMemoryInMB:[2,2,1,""],setMinInfoGain:[2,2,1,""],setMinInstancesPerNode:[2,2,1,""],setParams:[2,2,1,""],setPredictionCol:[2,2,1,""],setSeed:[2,2,1,""],setVarianceCol:[2,2,1,""],supportedImpurities:[2,3,1,""],varianceCol:[2,3,1,""],write:[2,2,1,""]},"pyspark.ml.regression.GBTRegressionModel":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],featureImportances:[2,3,1,""],getNumTrees:[2,3,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],numFeatures:[2,3,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],toDebugString:[2,3,1,""],totalNumNodes:[2,3,1,""],transform:[2,2,1,""],treeWeights:[2,3,1,""],trees:[2,3,1,""],write:[2,2,1,""]},"pyspark.ml.regression.GBTRegressor":{cacheNodeIds:[2,3,1,""],checkpointInterval:[2,3,1,""],c
 opy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],featuresCol:[2,3,1,""],fit:[2,2,1,""],fitMultiple:[2,2,1,""],getCacheNodeIds:[2,2,1,""],getCheckpointInterval:[2,2,1,""],getFeaturesCol:[2,2,1,""],getImpurity:[2,2,1,""],getLabelCol:[2,2,1,""],getLossType:[2,2,1,""],getMaxBins:[2,2,1,""],getMaxDepth:[2,2,1,""],getMaxIter:[2,2,1,""],getMaxMemoryInMB:[2,2,1,""],getMinInfoGain:[2,2,1,""],getMinInstancesPerNode:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],getPredictionCol:[2,2,1,""],getSeed:[2,2,1,""],getStepSize:[2,2,1,""],getSubsamplingRate:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],impurity:[2,3,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],labelCol:[2,3,1,""],load:[2,2,1,""],lossType:[2,3,1,""],maxBins:[2,3,1,""],maxDepth:[2,3,1,""],maxIter:[2,3,1,""],maxMemoryInMB:[2,3,1,""],minInfoGain:[2,3,1,""],minInstancesPerNode:[2,3,1,""],params:[2,3,1,""],predictionCol:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],seed:[2,3,1,""],set:[2,
 2,1,""],setCacheNodeIds:[2,2,1,""],setCheckpointInterval:[2,2,1,""],setFeaturesCol:[2,2,1,""],setImpurity:[2,2,1,""],setLabelCol:[2,2,1,""],setLossType:[2,2,1,""],setMaxBins:[2,2,1,""],setMaxDepth:[2,2,1,""],setMaxIter:[2,2,1,""],setMaxMemoryInMB:[2,2,1,""],setMinInfoGain:[2,2,1,""],setMinInstancesPerNode:[2,2,1,""],setParams:[2,2,1,""],setPredictionCol:[2,2,1,""],setSeed:[2,2,1,""],setStepSize:[2,2,1,""],setSubsamplingRate:[2,2,1,""],stepSize:[2,3,1,""],subsamplingRate:[2,3,1,""],supportedImpurities:[2,3,1,""],supportedLossTypes:[2,3,1,""],write:[2,2,1,""]},"pyspark.ml.regression.GeneralizedLinearRegression":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],family:[2,3,1,""],featuresCol:[2,3,1,""],fit:[2,2,1,""],fitIntercept:[2,3,1,""],fitMultiple:[2,2,1,""],getFamily:[2,2,1,""],getFeaturesCol:[2,2,1,""],getFitIntercept:[2,2,1,""],getLabelCol:[2,2,1,""],getLink:[2,2,1,""],getLinkPower:[2,2,1,""],getLinkPredictionCol:[2,2,1,""],getMaxIter:[
 2,2,1,""],getOffsetCol:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],getPredictionCol:[2,2,1,""],getRegParam:[2,2,1,""],getSolver:[2,2,1,""],getTol:[2,2,1,""],getVariancePower:[2,2,1,""],getWeightCol:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],labelCol:[2,3,1,""],link:[2,3,1,""],linkPower:[2,3,1,""],linkPredictionCol:[2,3,1,""],load:[2,2,1,""],maxIter:[2,3,1,""],offsetCol:[2,3,1,""],params:[2,3,1,""],predictionCol:[2,3,1,""],read:[2,2,1,""],regParam:[2,3,1,""],save:[2,2,1,""],set:[2,2,1,""],setFamily:[2,2,1,""],setFeaturesCol:[2,2,1,""],setFitIntercept:[2,2,1,""],setLabelCol:[2,2,1,""],setLink:[2,2,1,""],setLinkPower:[2,2,1,""],setLinkPredictionCol:[2,2,1,""],setMaxIter:[2,2,1,""],setOffsetCol:[2,2,1,""],setParams:[2,2,1,""],setPredictionCol:[2,2,1,""],setRegParam:[2,2,1,""],setSolver:[2,2,1,""],setTol:[2,2,1,""],setVariancePower:[2,2,1,""],setWeightCol:[2,2,1,""],solver:[2,3,1,""],tol:[2,3,1,""],variancePower:[2,3,1,""],weight
 Col:[2,3,1,""],write:[2,2,1,""]},"pyspark.ml.regression.GeneralizedLinearRegressionModel":{coefficients:[2,3,1,""],copy:[2,2,1,""],evaluate:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],hasSummary:[2,3,1,""],intercept:[2,3,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],numFeatures:[2,3,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],summary:[2,3,1,""],transform:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.regression.GeneralizedLinearRegressionSummary":{aic:[2,3,1,""],degreesOfFreedom:[2,3,1,""],deviance:[2,3,1,""],dispersion:[2,3,1,""],nullDeviance:[2,3,1,""],numInstances:[2,3,1,""],predictionCol:[2,3,1,""],predictions:[2,3,1,""],rank:[2,3,1,""],residualDegreeOfFreedom:[2,3,1,""],residualDegreeOfFreedomNull:[2,3,1,""],residuals:[2,2,1,""]},"pyspark.ml.regression.GeneralizedLinearRegressionTrainingSummary":{aic:[2,3,1,""],coeff
 icientStandardErrors:[2,3,1,""],degreesOfFreedom:[2,3,1,""],deviance:[2,3,1,""],dispersion:[2,3,1,""],nullDeviance:[2,3,1,""],numInstances:[2,3,1,""],numIterations:[2,3,1,""],pValues:[2,3,1,""],predictionCol:[2,3,1,""],predictions:[2,3,1,""],rank:[2,3,1,""],residualDegreeOfFreedom:[2,3,1,""],residualDegreeOfFreedomNull:[2,3,1,""],residuals:[2,2,1,""],solver:[2,3,1,""],tValues:[2,3,1,""]},"pyspark.ml.regression.IsotonicRegression":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],featureIndex:[2,3,1,""],featuresCol:[2,3,1,""],fit:[2,2,1,""],fitMultiple:[2,2,1,""],getFeatureIndex:[2,2,1,""],getFeaturesCol:[2,2,1,""],getIsotonic:[2,2,1,""],getLabelCol:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],getPredictionCol:[2,2,1,""],getWeightCol:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],isotonic:[2,3,1,""],labelCol:[2,3,1,""],load:[2,2,1,""],params:[2,3,1,""],predictionCol:[2,3,1,""],read:[2,2,1
 ,""],save:[2,2,1,""],set:[2,2,1,""],setFeatureIndex:[2,2,1,""],setFeaturesCol:[2,2,1,""],setIsotonic:[2,2,1,""],setLabelCol:[2,2,1,""],setParams:[2,2,1,""],setPredictionCol:[2,2,1,""],setWeightCol:[2,2,1,""],weightCol:[2,3,1,""],write:[2,2,1,""]},"pyspark.ml.regression.IsotonicRegressionModel":{boundaries:[2,3,1,""],copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],params:[2,3,1,""],predictions:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],set:[2,2,1,""],transform:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.regression.LinearRegression":{aggregationDepth:[2,3,1,""],copy:[2,2,1,""],elasticNetParam:[2,3,1,""],epsilon:[2,3,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],featuresCol:[2,3,1,""],fit:[2,2,1,""],fitIntercept:[2,3,1,""],fitMultiple:[2,2,1,""],getAggregationDepth:[2,2,1,
 ""],getElasticNetParam:[2,2,1,""],getEpsilon:[2,2,1,""],getFeaturesCol:[2,2,1,""],getFitIntercept:[2,2,1,""],getLabelCol:[2,2,1,""],getLoss:[2,2,1,""],getMaxIter:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],getPredictionCol:[2,2,1,""],getRegParam:[2,2,1,""],getSolver:[2,2,1,""],getStandardization:[2,2,1,""],getTol:[2,2,1,""],getWeightCol:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],labelCol:[2,3,1,""],load:[2,2,1,""],loss:[2,3,1,""],maxIter:[2,3,1,""],params:[2,3,1,""],predictionCol:[2,3,1,""],read:[2,2,1,""],regParam:[2,3,1,""],save:[2,2,1,""],set:[2,2,1,""],setAggregationDepth:[2,2,1,""],setElasticNetParam:[2,2,1,""],setEpsilon:[2,2,1,""],setFeaturesCol:[2,2,1,""],setFitIntercept:[2,2,1,""],setLabelCol:[2,2,1,""],setLoss:[2,2,1,""],setMaxIter:[2,2,1,""],setParams:[2,2,1,""],setPredictionCol:[2,2,1,""],setRegParam:[2,2,1,""],setSolver:[2,2,1,""],setStandardization:[2,2,1,""],setTol:[2,2,1,""],setWeightCol:[2,2,1,""],solver:[2,
 3,1,""],standardization:[2,3,1,""],tol:[2,3,1,""],weightCol:[2,3,1,""],write:[2,2,1,""]},"pyspark.ml.regression.LinearRegressionModel":{coefficients:[2,3,1,""],copy:[2,2,1,""],evaluate:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],hasSummary:[2,3,1,""],intercept:[2,3,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],numFeatures:[2,3,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],scale:[2,3,1,""],set:[2,2,1,""],summary:[2,3,1,""],transform:[2,2,1,""],write:[2,2,1,""]},"pyspark.ml.regression.LinearRegressionSummary":{coefficientStandardErrors:[2,3,1,""],degreesOfFreedom:[2,3,1,""],devianceResiduals:[2,3,1,""],explainedVariance:[2,3,1,""],featuresCol:[2,3,1,""],labelCol:[2,3,1,""],meanAbsoluteError:[2,3,1,""],meanSquaredError:[2,3,1,""],numInstances:[2,3,1,""],pValues:[2,3,1,""],predictionCol:[2,3,1,""],predictions:[2,3,1,""],r2:[2,3,1,""],re
 siduals:[2,3,1,""],rootMeanSquaredError:[2,3,1,""],tValues:[2,3,1,""]},"pyspark.ml.regression.LinearRegressionTrainingSummary":{coefficientStandardErrors:[2,3,1,""],degreesOfFreedom:[2,3,1,""],devianceResiduals:[2,3,1,""],explainedVariance:[2,3,1,""],featuresCol:[2,3,1,""],labelCol:[2,3,1,""],meanAbsoluteError:[2,3,1,""],meanSquaredError:[2,3,1,""],numInstances:[2,3,1,""],objectiveHistory:[2,3,1,""],pValues:[2,3,1,""],predictionCol:[2,3,1,""],predictions:[2,3,1,""],r2:[2,3,1,""],residuals:[2,3,1,""],rootMeanSquaredError:[2,3,1,""],tValues:[2,3,1,""],totalIterations:[2,3,1,""]},"pyspark.ml.regression.RandomForestRegressionModel":{copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],featureImportances:[2,3,1,""],getNumTrees:[2,3,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],load:[2,2,1,""],numFeatures:[2,3,1,""],params:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],se
 t:[2,2,1,""],toDebugString:[2,3,1,""],totalNumNodes:[2,3,1,""],transform:[2,2,1,""],treeWeights:[2,3,1,""],trees:[2,3,1,""],write:[2,2,1,""]},"pyspark.ml.regression.RandomForestRegressor":{cacheNodeIds:[2,3,1,""],checkpointInterval:[2,3,1,""],copy:[2,2,1,""],explainParam:[2,2,1,""],explainParams:[2,2,1,""],extractParamMap:[2,2,1,""],featureSubsetStrategy:[2,3,1,""],featuresCol:[2,3,1,""],fit:[2,2,1,""],fitMultiple:[2,2,1,""],getCacheNodeIds:[2,2,1,""],getCheckpointInterval:[2,2,1,""],getFeatureSubsetStrategy:[2,2,1,""],getFeaturesCol:[2,2,1,""],getImpurity:[2,2,1,""],getLabelCol:[2,2,1,""],getMaxBins:[2,2,1,""],getMaxDepth:[2,2,1,""],getMaxMemoryInMB:[2,2,1,""],getMinInfoGain:[2,2,1,""],getMinInstancesPerNode:[2,2,1,""],getNumTrees:[2,2,1,""],getOrDefault:[2,2,1,""],getParam:[2,2,1,""],getPredictionCol:[2,2,1,""],getSeed:[2,2,1,""],getSubsamplingRate:[2,2,1,""],hasDefault:[2,2,1,""],hasParam:[2,2,1,""],impurity:[2,3,1,""],isDefined:[2,2,1,""],isSet:[2,2,1,""],labelCol:[2,3,1,""],loa
 d:[2,2,1,""],maxBins:[2,3,1,""],maxDepth:[2,3,1,""],maxMemoryInMB:[2,3,1,""],minInfoGain:[2,3,1,""],minInstancesPerNode:[2,3,1,""],numTrees:[2,3,1,""],params:[2,3,1,""],predictionCol:[2,3,1,""],read:[2,2,1,""],save:[2,2,1,""],seed:[2,3,1,""],set:[2,2,1,""],setCacheNodeIds:[2,2,1,""],setCheckpointInterval:[2,2,1,""],setFeatureSubsetStra

<TRUNCATED>

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org


[2/7] spark-website git commit: Fix signature description broken in PySpark API documentation in 2.3.1

Posted by gu...@apache.org.
http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/pyspark.streaming.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/pyspark.streaming.html b/site/docs/2.3.1/api/python/pyspark.streaming.html
index 7f1dee5..411799a 100644
--- a/site/docs/2.3.1/api/python/pyspark.streaming.html
+++ b/site/docs/2.3.1/api/python/pyspark.streaming.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.streaming module &#8212; PySpark master documentation</title>
+    <title>pyspark.streaming module &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    './',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -39,7 +39,7 @@
           <a href="pyspark.sql.html" title="pyspark.sql module"
              accesskey="P">previous</a> |</li>
     
-        <li class="nav-item nav-item-0"><a href="index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="pyspark.html" accesskey="U">pyspark package</a> &#187;</li> 
       </ul>
@@ -763,7 +763,8 @@ DStream’s batching interval</li>
 <dl class="class">
 <dt id="pyspark.streaming.StreamingListener.Java">
 <em class="property">class </em><code class="descname">Java</code><a class="reference internal" href="_modules/pyspark/streaming/listener.html#StreamingListener.Java"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.streaming.StreamingListener.Java" title="Permalink to this definition">¶</a></dt>
-<dd><dl class="attribute">
+<dd><p>Bases: <code class="xref py py-class docutils literal"><span class="pre">object</span></code></p>
+<dl class="attribute">
 <dt id="pyspark.streaming.StreamingListener.Java.implements">
 <code class="descname">implements</code><em class="property"> = ['org.apache.spark.streaming.api.java.PythonStreamingListener']</em><a class="headerlink" href="#pyspark.streaming.StreamingListener.Java.implements" title="Permalink to this definition">¶</a></dt>
 <dd></dd></dl>
@@ -1239,7 +1240,7 @@ See SPARK-22142.</p>
           <a href="pyspark.sql.html" title="pyspark.sql module"
              >previous</a> |</li>
     
-        <li class="nav-item nav-item-0"><a href="index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="pyspark.html" >pyspark package</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/search.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/search.html b/site/docs/2.3.1/api/python/search.html
index 8b169e6..7757cfe 100644
--- a/site/docs/2.3.1/api/python/search.html
+++ b/site/docs/2.3.1/api/python/search.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>Search &#8212; PySpark master documentation</title>
+    <title>Search &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    './',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -39,7 +39,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="index.html">PySpark 2.3.1 documentation</a> &#187;</li>
  
       </ul>
     </div>  
@@ -89,7 +89,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="index.html">PySpark 2.3.1 documentation</a> &#187;</li>
  
       </ul>
     </div>


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org


[5/7] spark-website git commit: Fix signature description broken in PySpark API documentation in 2.3.1

Posted by gu...@apache.org.
http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/pyspark.ml.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/pyspark.ml.html b/site/docs/2.3.1/api/python/pyspark.ml.html
index 4ada723..986c949 100644
--- a/site/docs/2.3.1/api/python/pyspark.ml.html
+++ b/site/docs/2.3.1/api/python/pyspark.ml.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.ml package &#8212; PySpark master documentation</title>
+    <title>pyspark.ml package &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    './',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -39,7 +39,7 @@
           <a href="pyspark.streaming.html" title="pyspark.streaming module"
              accesskey="P">previous</a> |</li>
     
-        <li class="nav-item nav-item-0"><a href="index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="pyspark.html" accesskey="U">pyspark package</a> &#187;</li> 
       </ul>
@@ -718,7 +718,7 @@ uses <code class="xref py py-func docutils literal"><span class="pre">dir()</spa
 
 <dl class="class">
 <dt id="pyspark.ml.Pipeline">
-<em class="property">class </em><code class="descclassname">pyspark.ml.</code><code class="descname">Pipeline</code><span class="sig-paren">(</span><em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/pipeline.html#Pipeline"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.Pipeline" title="Permalink to this definition">¶</a></dt>
+<em class="property">class </em><code class="descclassname">pyspark.ml.</code><code class="descname">Pipeline</code><span class="sig-paren">(</span><em>stages=None</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/pipeline.html#Pipeline"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.Pipeline" title="Permalink to this definition">¶</a></dt>
 <dd><p>A simple pipeline, which acts as an estimator. A Pipeline consists
 of a sequence of stages, each of which is either an
 <a class="reference internal" href="#pyspark.ml.Estimator" title="pyspark.ml.Estimator"><code class="xref py py-class docutils literal"><span class="pre">Estimator</span></code></a> or a <a class="reference internal" href="#pyspark.ml.Transformer" title="pyspark.ml.Transformer"><code class="xref py py-class docutils literal"><span class="pre">Transformer</span></code></a>. When
@@ -1360,7 +1360,7 @@ uses <code class="xref py py-func docutils literal"><span class="pre">dir()</spa
 <span id="pyspark-ml-feature-module"></span><h2>pyspark.ml.feature module<a class="headerlink" href="#module-pyspark.ml.feature" title="Permalink to this headline">¶</a></h2>
 <dl class="class">
 <dt id="pyspark.ml.feature.Binarizer">
-<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">Binarizer</code><span class="sig-paren">(</span><em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#Binarizer"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.Binarizer" title="Permalink to this definition">¶</a></dt>
+<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">Binarizer</code><span class="sig-paren">(</span><em>threshold=0.0</em>, <em>inputCol=None</em>, <em>outputCol=None</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#Binarizer"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.Binarizer" title="Permalink to this definition">¶</a></dt>
 <dd><p>Binarize a column of continuous features given a threshold.</p>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">df</span> <span class="o">=</span> <span class="n">spark</span><span class="o">.</span><span class="n">createDataFrame</span><span class="p">([(</span><span class="mf">0.5</span><span class="p">,)],</span> <span class="p">[</span><span class="s2">&quot;values&quot;</span><span class="p">])</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">binarizer</span> <span class="o">=</span> <span class="n">Binarizer</span><span class="p">(</span><span class="n">threshold</span><span class="o">=</span><span class="mf">1.0</span><span class="p">,</span> <span class="n">inputCol</span><span class="o">=</span><span class="s2">&quot;values&quot;</span><span class="p">,</span> <span class="n">outputCol</span><span class="o">=</span><span class="s2">&quot;features&quot;</span><span class="p">)</span>
@@ -1606,7 +1606,7 @@ uses <code class="xref py py-func docutils literal"><span class="pre">dir()</spa
 
 <dl class="class">
 <dt id="pyspark.ml.feature.BucketedRandomProjectionLSH">
-<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">BucketedRandomProjectionLSH</code><span class="sig-paren">(</span><em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#BucketedRandomProjectionLSH"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.BucketedRandomProjectionLSH" title="Permalink to this definition">¶</a></dt>
+<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">BucketedRandomProjectionLSH</code><span class="sig-paren">(</span><em>inputCol=None</em>, <em>outputCol=None</em>, <em>seed=None</em>, <em>numHashTables=1</em>, <em>bucketLength=None</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#BucketedRandomProjectionLSH"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.BucketedRandomProjectionLSH" title="Permalink to this definition">¶</a></dt>
 <dd><div class="admonition note">
 <p class="first admonition-title">Note</p>
 <p class="last">Experimental</p>
@@ -2195,7 +2195,7 @@ uses <code class="xref py py-func docutils literal"><span class="pre">dir()</spa
 
 <dl class="class">
 <dt id="pyspark.ml.feature.Bucketizer">
-<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">Bucketizer</code><span class="sig-paren">(</span><em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#Bucketizer"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.Bucketizer" title="Permalink to this definition">¶</a></dt>
+<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">Bucketizer</code><span class="sig-paren">(</span><em>splits=None</em>, <em>inputCol=None</em>, <em>outputCol=None</em>, <em>handleInvalid='error'</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#Bucketizer"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.Bucketizer" title="Permalink to this definition">¶</a></dt>
 <dd><p>Maps a column of continuous features to a column of feature buckets.</p>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">values</span> <span class="o">=</span> <span class="p">[(</span><span class="mf">0.1</span><span class="p">,),</span> <span class="p">(</span><span class="mf">0.4</span><span class="p">,),</span> <span class="p">(</span><span class="mf">1.2</span><span class="p">,),</span> <span class="p">(</span><span class="mf">1.5</span><span class="p">,),</span> <span class="p">(</span><span class="nb">float</span><span class="p">(</span><span class="s2">&quot;nan&quot;</span><span class="p">),),</span> <span class="p">(</span><span class="nb">float</span><span class="p">(</span><span class="s2">&quot;nan&quot;</span><span class="p">),)]</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">df</span> <span class="o">=</span> <span class="n">spark</span><span class="o">.</span><span class="n">createDataFrame</span><span class="p">(</span><span class="n">values</span><span class="p">,</span> <span class="p">[</span><span class="s2">&quot;values&quot;</span><span class="p">])</span>
@@ -2469,7 +2469,7 @@ uses <code class="xref py py-func docutils literal"><span class="pre">dir()</spa
 
 <dl class="class">
 <dt id="pyspark.ml.feature.ChiSqSelector">
-<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">ChiSqSelector</code><span class="sig-paren">(</span><em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#ChiSqSelector"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.ChiSqSelector" title="Permalink to this definition">¶</a></dt>
+<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">ChiSqSelector</code><span class="sig-paren">(</span><em>numTopFeatures=50</em>, <em>featuresCol='features'</em>, <em>outputCol=None</em>, <em>labelCol='label'</em>, <em>selectorType='numTopFeatures'</em>, <em>percentile=0.1</em>, <em>fpr=0.05</em>, <em>fdr=0.05</em>, <em>fwe=0.05</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#ChiSqSelector"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.ChiSqSelector" title="Permalink to this definition">¶</a></dt>
 <dd><div class="admonition note">
 <p class="first admonition-title">Note</p>
 <p class="last">Experimental</p>
@@ -3095,7 +3095,7 @@ uses <code class="xref py py-func docutils literal"><span class="pre">dir()</spa
 
 <dl class="class">
 <dt id="pyspark.ml.feature.CountVectorizer">
-<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">CountVectorizer</code><span class="sig-paren">(</span><em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#CountVectorizer"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.CountVectorizer" title="Permalink to this definition">¶</a></dt>
+<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">CountVectorizer</code><span class="sig-paren">(</span><em>minTF=1.0</em>, <em>minDF=1.0</em>, <em>vocabSize=262144</em>, <em>binary=False</em>, <em>inputCol=None</em>, <em>outputCol=None</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#CountVectorizer"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.CountVectorizer" title="Permalink to this definition">¶</a></dt>
 <dd><p>Extracts a vocabulary from document collections and generates a <a class="reference internal" href="#pyspark.ml.feature.CountVectorizerModel" title="pyspark.ml.feature.CountVectorizerModel"><code class="xref py py-attr docutils literal"><span class="pre">CountVectorizerModel</span></code></a>.</p>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">df</span> <span class="o">=</span> <span class="n">spark</span><span class="o">.</span><span class="n">createDataFrame</span><span class="p">(</span>
 <span class="gp">... </span>   <span class="p">[(</span><span class="mi">0</span><span class="p">,</span> <span class="p">[</span><span class="s2">&quot;a&quot;</span><span class="p">,</span> <span class="s2">&quot;b&quot;</span><span class="p">,</span> <span class="s2">&quot;c&quot;</span><span class="p">]),</span> <span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="p">[</span><span class="s2">&quot;a&quot;</span><span class="p">,</span> <span class="s2">&quot;b&quot;</span><span class="p">,</span> <span class="s2">&quot;b&quot;</span><span class="p">,</span> <span class="s2">&quot;c&quot;</span><span class="p">,</span> <span class="s2">&quot;a&quot;</span><span class="p">])],</span>
@@ -3634,7 +3634,7 @@ uses <code class="xref py py-func docutils literal"><span class="pre">dir()</spa
 
 <dl class="class">
 <dt id="pyspark.ml.feature.DCT">
-<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">DCT</code><span class="sig-paren">(</span><em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#DCT"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.DCT" title="Permalink to this definition">¶</a></dt>
+<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">DCT</code><span class="sig-paren">(</span><em>inverse=False</em>, <em>inputCol=None</em>, <em>outputCol=None</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#DCT"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.DCT" title="Permalink to this definition">¶</a></dt>
 <dd><p>A feature transformer that takes the 1D discrete cosine transform
 of a real vector. No zero padding is performed on the input vector.
 It returns a real vector of the same length representing the DCT.
@@ -3888,7 +3888,7 @@ uses <code class="xref py py-func docutils literal"><span class="pre">dir()</spa
 
 <dl class="class">
 <dt id="pyspark.ml.feature.ElementwiseProduct">
-<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">ElementwiseProduct</code><span class="sig-paren">(</span><em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#ElementwiseProduct"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.ElementwiseProduct" title="Permalink to this definition">¶</a></dt>
+<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">ElementwiseProduct</code><span class="sig-paren">(</span><em>scalingVec=None</em>, <em>inputCol=None</em>, <em>outputCol=None</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#ElementwiseProduct"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.ElementwiseProduct" title="Permalink to this definition">¶</a></dt>
 <dd><p>Outputs the Hadamard product (i.e., the element-wise product) of each input vector
 with a provided “weight” vector. In other words, it scales each column of the dataset
 by a scalar multiplier.</p>
@@ -4135,7 +4135,7 @@ uses <code class="xref py py-func docutils literal"><span class="pre">dir()</spa
 
 <dl class="class">
 <dt id="pyspark.ml.feature.FeatureHasher">
-<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">FeatureHasher</code><span class="sig-paren">(</span><em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#FeatureHasher"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.FeatureHasher" title="Permalink to this definition">¶</a></dt>
+<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">FeatureHasher</code><span class="sig-paren">(</span><em>numFeatures=262144</em>, <em>inputCols=None</em>, <em>outputCol=None</em>, <em>categoricalCols=None</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#FeatureHasher"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.FeatureHasher" title="Permalink to this definition">¶</a></dt>
 <dd><div class="admonition note">
 <p class="first admonition-title">Note</p>
 <p class="last">Experimental</p>
@@ -4437,7 +4437,7 @@ uses <code class="xref py py-func docutils literal"><span class="pre">dir()</spa
 
 <dl class="class">
 <dt id="pyspark.ml.feature.HashingTF">
-<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">HashingTF</code><span class="sig-paren">(</span><em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#HashingTF"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.HashingTF" title="Permalink to this definition">¶</a></dt>
+<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">HashingTF</code><span class="sig-paren">(</span><em>numFeatures=262144</em>, <em>binary=False</em>, <em>inputCol=None</em>, <em>outputCol=None</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#HashingTF"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.HashingTF" title="Permalink to this definition">¶</a></dt>
 <dd><p>Maps a sequence of terms to their term frequencies using the hashing trick.
 Currently we use Austin Appleby’s MurmurHash 3 algorithm (MurmurHash3_x86_32)
 to calculate the hash code value for the term object.
@@ -4705,7 +4705,7 @@ uses <code class="xref py py-func docutils literal"><span class="pre">dir()</spa
 
 <dl class="class">
 <dt id="pyspark.ml.feature.IDF">
-<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">IDF</code><span class="sig-paren">(</span><em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#IDF"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.IDF" title="Permalink to this definition">¶</a></dt>
+<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">IDF</code><span class="sig-paren">(</span><em>minDocFreq=0</em>, <em>inputCol=None</em>, <em>outputCol=None</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#IDF"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.IDF" title="Permalink to this definition">¶</a></dt>
 <dd><p>Compute the Inverse Document Frequency (IDF) given a collection of documents.</p>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pyspark.ml.linalg</span> <span class="k">import</span> <span class="n">DenseVector</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">df</span> <span class="o">=</span> <span class="n">spark</span><span class="o">.</span><span class="n">createDataFrame</span><span class="p">([(</span><span class="n">DenseVector</span><span class="p">([</span><span class="mf">1.0</span><span class="p">,</span> <span class="mf">2.0</span><span class="p">]),),</span>
@@ -5170,7 +5170,7 @@ uses <code class="xref py py-func docutils literal"><span class="pre">dir()</spa
 
 <dl class="class">
 <dt id="pyspark.ml.feature.Imputer">
-<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">Imputer</code><span class="sig-paren">(</span><em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#Imputer"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.Imputer" title="Permalink to this definition">¶</a></dt>
+<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">Imputer</code><span class="sig-paren">(</span><em>strategy='mean'</em>, <em>missingValue=nan</em>, <em>inputCols=None</em>, <em>outputCols=None</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#Imputer"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.Imputer" title="Permalink to this definition">¶</a></dt>
 <dd><div class="admonition note">
 <p class="first admonition-title">Note</p>
 <p class="last">Experimental</p>
@@ -5693,7 +5693,7 @@ which are used to replace the missing values in the input DataFrame.</p>
 
 <dl class="class">
 <dt id="pyspark.ml.feature.IndexToString">
-<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">IndexToString</code><span class="sig-paren">(</span><em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#IndexToString"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.IndexToString" title="Permalink to this definition">¶</a></dt>
+<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">IndexToString</code><span class="sig-paren">(</span><em>inputCol=None</em>, <em>outputCol=None</em>, <em>labels=None</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#IndexToString"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.IndexToString" title="Permalink to this definition">¶</a></dt>
 <dd><p>A <code class="xref py py-class docutils literal"><span class="pre">Transformer</span></code> that maps a column of indices back to a new column of
 corresponding string values.
 The index-string mapping is either from the ML attributes of the input column,
@@ -5927,7 +5927,7 @@ uses <code class="xref py py-func docutils literal"><span class="pre">dir()</spa
 
 <dl class="class">
 <dt id="pyspark.ml.feature.MaxAbsScaler">
-<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">MaxAbsScaler</code><span class="sig-paren">(</span><em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#MaxAbsScaler"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.MaxAbsScaler" title="Permalink to this definition">¶</a></dt>
+<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">MaxAbsScaler</code><span class="sig-paren">(</span><em>inputCol=None</em>, <em>outputCol=None</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#MaxAbsScaler"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.MaxAbsScaler" title="Permalink to this definition">¶</a></dt>
 <dd><p>Rescale each feature individually to range [-1, 1] by dividing through the largest maximum
 absolute value in each feature. It does not shift/center the data, and thus does not destroy
 any sparsity.</p>
@@ -6371,7 +6371,7 @@ uses <code class="xref py py-func docutils literal"><span class="pre">dir()</spa
 
 <dl class="class">
 <dt id="pyspark.ml.feature.MinHashLSH">
-<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">MinHashLSH</code><span class="sig-paren">(</span><em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#MinHashLSH"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.MinHashLSH" title="Permalink to this definition">¶</a></dt>
+<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">MinHashLSH</code><span class="sig-paren">(</span><em>inputCol=None</em>, <em>outputCol=None</em>, <em>seed=None</em>, <em>numHashTables=1</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#MinHashLSH"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.MinHashLSH" title="Permalink to this definition">¶</a></dt>
 <dd><div class="admonition note">
 <p class="first admonition-title">Note</p>
 <p class="last">Experimental</p>
@@ -6937,7 +6937,7 @@ uses <code class="xref py py-func docutils literal"><span class="pre">dir()</spa
 
 <dl class="class">
 <dt id="pyspark.ml.feature.MinMaxScaler">
-<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">MinMaxScaler</code><span class="sig-paren">(</span><em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#MinMaxScaler"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.MinMaxScaler" title="Permalink to this definition">¶</a></dt>
+<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">MinMaxScaler</code><span class="sig-paren">(</span><em>min=0.0</em>, <em>max=1.0</em>, <em>inputCol=None</em>, <em>outputCol=None</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#MinMaxScaler"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.MinMaxScaler" title="Permalink to this definition">¶</a></dt>
 <dd><p>Rescale each feature individually to a common range [min, max] linearly using column summary
 statistics, which is also known as min-max normalization or Rescaling. The rescaled value for
 feature E is calculated as,</p>
@@ -7449,7 +7449,7 @@ uses <code class="xref py py-func docutils literal"><span class="pre">dir()</spa
 
 <dl class="class">
 <dt id="pyspark.ml.feature.NGram">
-<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">NGram</code><span class="sig-paren">(</span><em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#NGram"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.NGram" title="Permalink to this definition">¶</a></dt>
+<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">NGram</code><span class="sig-paren">(</span><em>n=2</em>, <em>inputCol=None</em>, <em>outputCol=None</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#NGram"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.NGram" title="Permalink to this definition">¶</a></dt>
 <dd><p>A feature transformer that converts the input array of strings into an array of n-grams. Null
 values in the input array are ignored.
 It returns an array of n-grams where each n-gram is represented by a space-separated string of
@@ -7460,15 +7460,15 @@ returned.</p>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">df</span> <span class="o">=</span> <span class="n">spark</span><span class="o">.</span><span class="n">createDataFrame</span><span class="p">([</span><span class="n">Row</span><span class="p">(</span><span class="n">inputTokens</span><span class="o">=</span><span class="p">[</span><span class="s2">&quot;a&quot;</span><span class="p">,</span> <span class="s2">&quot;b&quot;</span><span class="p">,</span> <span class="s2">&quot;c&quot;</span><span class="p">,</span> <span class="s2">&quot;d&quot;</span><span class="p">,</span> <span class="s2">&quot;e&quot;</span><span class="p">])])</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">ngram</span> <span class="o">=</span> <span class="n">NGram</span><span class="p">(</span><span class="n">n</span><span class="o">=</span><span class="mi">2</span><span class="p">,</span> <span class="n">inputCol</span><span class="o">=</span><span class="s2">&quot;inputTokens&quot;</span><span class="p">,</span> <span class="n">outputCol</span><span class="o">=</span><span class="s2">&quot;nGrams&quot;</span><span class="p">)</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">ngram</span><span class="o">.</span><span class="n">transform</span><span class="p">(</span><span class="n">df</span><span class="p">)</span><span class="o">.</span><span class="n">head</span><span class="p">()</span>
-<span class="go">Row(inputTokens=[u&#39;a&#39;, u&#39;b&#39;, u&#39;c&#39;, u&#39;d&#39;, u&#39;e&#39;], nGrams=[u&#39;a b&#39;, u&#39;b c&#39;, u&#39;c d&#39;, u&#39;d e&#39;])</span>
+<span class="go">Row(inputTokens=[&#39;a&#39;, &#39;b&#39;, &#39;c&#39;, &#39;d&#39;, &#39;e&#39;], nGrams=[&#39;a b&#39;, &#39;b c&#39;, &#39;c d&#39;, &#39;d e&#39;])</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="c1"># Change n-gram length</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">ngram</span><span class="o">.</span><span class="n">setParams</span><span class="p">(</span><span class="n">n</span><span class="o">=</span><span class="mi">4</span><span class="p">)</span><span class="o">.</span><span class="n">transform</span><span class="p">(</span><span class="n">df</span><span class="p">)</span><span class="o">.</span><span class="n">head</span><span class="p">()</span>
-<span class="go">Row(inputTokens=[u&#39;a&#39;, u&#39;b&#39;, u&#39;c&#39;, u&#39;d&#39;, u&#39;e&#39;], nGrams=[u&#39;a b c d&#39;, u&#39;b c d e&#39;])</span>
+<span class="go">Row(inputTokens=[&#39;a&#39;, &#39;b&#39;, &#39;c&#39;, &#39;d&#39;, &#39;e&#39;], nGrams=[&#39;a b c d&#39;, &#39;b c d e&#39;])</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="c1"># Temporarily modify output column.</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">ngram</span><span class="o">.</span><span class="n">transform</span><span class="p">(</span><span class="n">df</span><span class="p">,</span> <span class="p">{</span><span class="n">ngram</span><span class="o">.</span><span class="n">outputCol</span><span class="p">:</span> <span class="s2">&quot;output&quot;</span><span class="p">})</span><span class="o">.</span><span class="n">head</span><span class="p">()</span>
-<span class="go">Row(inputTokens=[u&#39;a&#39;, u&#39;b&#39;, u&#39;c&#39;, u&#39;d&#39;, u&#39;e&#39;], output=[u&#39;a b c d&#39;, u&#39;b c d e&#39;])</span>
+<span class="go">Row(inputTokens=[&#39;a&#39;, &#39;b&#39;, &#39;c&#39;, &#39;d&#39;, &#39;e&#39;], output=[&#39;a b c d&#39;, &#39;b c d e&#39;])</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">ngram</span><span class="o">.</span><span class="n">transform</span><span class="p">(</span><span class="n">df</span><span class="p">)</span><span class="o">.</span><span class="n">head</span><span class="p">()</span>
-<span class="go">Row(inputTokens=[u&#39;a&#39;, u&#39;b&#39;, u&#39;c&#39;, u&#39;d&#39;, u&#39;e&#39;], nGrams=[u&#39;a b c d&#39;, u&#39;b c d e&#39;])</span>
+<span class="go">Row(inputTokens=[&#39;a&#39;, &#39;b&#39;, &#39;c&#39;, &#39;d&#39;, &#39;e&#39;], nGrams=[&#39;a b c d&#39;, &#39;b c d e&#39;])</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="c1"># Must use keyword arguments to specify params.</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">ngram</span><span class="o">.</span><span class="n">setParams</span><span class="p">(</span><span class="s2">&quot;text&quot;</span><span class="p">)</span>
 <span class="gt">Traceback (most recent call last):</span>
@@ -7709,7 +7709,7 @@ uses <code class="xref py py-func docutils literal"><span class="pre">dir()</spa
 
 <dl class="class">
 <dt id="pyspark.ml.feature.Normalizer">
-<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">Normalizer</code><span class="sig-paren">(</span><em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#Normalizer"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.Normalizer" title="Permalink to this definition">¶</a></dt>
+<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">Normalizer</code><span class="sig-paren">(</span><em>p=2.0</em>, <em>inputCol=None</em>, <em>outputCol=None</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#Normalizer"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.Normalizer" title="Permalink to this definition">¶</a></dt>
 <dd><blockquote>
 <div>Normalize a vector to have unit norm using the given p-norm.</div></blockquote>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pyspark.ml.linalg</span> <span class="k">import</span> <span class="n">Vectors</span>
@@ -7958,7 +7958,7 @@ uses <code class="xref py py-func docutils literal"><span class="pre">dir()</spa
 
 <dl class="class">
 <dt id="pyspark.ml.feature.OneHotEncoder">
-<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">OneHotEncoder</code><span class="sig-paren">(</span><em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#OneHotEncoder"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.OneHotEncoder" title="Permalink to this definition">¶</a></dt>
+<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">OneHotEncoder</code><span class="sig-paren">(</span><em>dropLast=True</em>, <em>inputCol=None</em>, <em>outputCol=None</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#OneHotEncoder"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.OneHotEncoder" title="Permalink to this definition">¶</a></dt>
 <dd><p>A one-hot encoder that maps a column of category indices to a
 column of binary vectors, with at most a single one-value per row
 that indicates the input category index.
@@ -8229,7 +8229,7 @@ uses <code class="xref py py-func docutils literal"><span class="pre">dir()</spa
 
 <dl class="class">
 <dt id="pyspark.ml.feature.OneHotEncoderEstimator">
-<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">OneHotEncoderEstimator</code><span class="sig-paren">(</span><em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#OneHotEncoderEstimator"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.OneHotEncoderEstimator" title="Permalink to this definition">¶</a></dt>
+<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">OneHotEncoderEstimator</code><span class="sig-paren">(</span><em>inputCols=None</em>, <em>outputCols=None</em>, <em>handleInvalid='error'</em>, <em>dropLast=True</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#OneHotEncoderEstimator"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.OneHotEncoderEstimator" title="Permalink to this definition">¶</a></dt>
 <dd><p>A one-hot encoder that maps a column of category indices to a column of binary vectors, with
 at most a single one-value per row that indicates the input category index.
 For example with 5 categories, an input value of 2.0 would map to an output vector of
@@ -8719,7 +8719,7 @@ uses <code class="xref py py-func docutils literal"><span class="pre">dir()</spa
 
 <dl class="class">
 <dt id="pyspark.ml.feature.PCA">
-<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">PCA</code><span class="sig-paren">(</span><em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#PCA"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.PCA" title="Permalink to this definition">¶</a></dt>
+<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">PCA</code><span class="sig-paren">(</span><em>k=None</em>, <em>inputCol=None</em>, <em>outputCol=None</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#PCA"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.PCA" title="Permalink to this definition">¶</a></dt>
 <dd><p>PCA trains a model to project vectors to a lower dimensional space of the
 top <a class="reference internal" href="#pyspark.ml.feature.PCA.k" title="pyspark.ml.feature.PCA.k"><code class="xref py py-attr docutils literal"><span class="pre">k</span></code></a> principal components.</p>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pyspark.ml.linalg</span> <span class="k">import</span> <span class="n">Vectors</span>
@@ -9195,7 +9195,7 @@ Each column is one principal component.</p>
 
 <dl class="class">
 <dt id="pyspark.ml.feature.PolynomialExpansion">
-<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">PolynomialExpansion</code><span class="sig-paren">(</span><em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#PolynomialExpansion"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.PolynomialExpansion" title="Permalink to this definition">¶</a></dt>
+<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">PolynomialExpansion</code><span class="sig-paren">(</span><em>degree=2</em>, <em>inputCol=None</em>, <em>outputCol=None</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#PolynomialExpansion"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.PolynomialExpansion" title="Permalink to this definition">¶</a></dt>
 <dd><p>Perform feature expansion in a polynomial space. As said in <a class="reference external" href="http://en.wikipedia.org/wiki/Polynomial_expansion">wikipedia of Polynomial Expansion</a>, “In mathematics, an
 expansion of a product of sums expresses it as a sum of products by using the fact that
 multiplication distributes over addition”. Take a 2-variable feature vector as an example:
@@ -9442,7 +9442,7 @@ uses <code class="xref py py-func docutils literal"><span class="pre">dir()</spa
 
 <dl class="class">
 <dt id="pyspark.ml.feature.QuantileDiscretizer">
-<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">QuantileDiscretizer</code><span class="sig-paren">(</span><em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#QuantileDiscretizer"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.QuantileDiscretizer" title="Permalink to this definition">¶</a></dt>
+<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">QuantileDiscretizer</code><span class="sig-paren">(</span><em>numBuckets=2</em>, <em>inputCol=None</em>, <em>outputCol=None</em>, <em>relativeError=0.001</em>, <em>handleInvalid='error'</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#QuantileDiscretizer"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.QuantileDiscretizer" title="Permalink to this definition">¶</a></dt>
 <dd><div class="admonition note">
 <p class="first admonition-title">Note</p>
 <p class="last">Experimental</p>
@@ -9792,7 +9792,7 @@ uses <code class="xref py py-func docutils literal"><span class="pre">dir()</spa
 
 <dl class="class">
 <dt id="pyspark.ml.feature.RegexTokenizer">
-<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">RegexTokenizer</code><span class="sig-paren">(</span><em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#RegexTokenizer"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.RegexTokenizer" title="Permalink to this definition">¶</a></dt>
+<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">RegexTokenizer</code><span class="sig-paren">(</span><em>minTokenLength=1</em>, <em>gaps=True</em>, <em>pattern='\s+'</em>, <em>inputCol=None</em>, <em>outputCol=None</em>, <em>toLowercase=True</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#RegexTokenizer"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.RegexTokenizer" title="Permalink to this definition">¶</a></dt>
 <dd><p>A regex based tokenizer that extracts tokens either by using the
 provided regex pattern (in Java dialect) to split the text
 (default) or repeatedly matching the regex (if gaps is false).
@@ -9802,15 +9802,15 @@ It returns an array of strings that can be empty.</p>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">df</span> <span class="o">=</span> <span class="n">spark</span><span class="o">.</span><span class="n">createDataFrame</span><span class="p">([(</span><span class="s2">&quot;A B  c&quot;</span><span class="p">,)],</span> <span class="p">[</span><span class="s2">&quot;text&quot;</span><span class="p">])</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">reTokenizer</span> <span class="o">=</span> <span class="n">RegexTokenizer</span><span class="p">(</span><span class="n">inputCol</span><span class="o">=</span><span class="s2">&quot;text&quot;</span><span class="p">,</span> <span class="n">outputCol</span><span class="o">=</span><span class="s2">&quot;words&quot;</span><span class="p">)</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">reTokenizer</span><span class="o">.</span><span class="n">transform</span><span class="p">(</span><span class="n">df</span><span class="p">)</span><span class="o">.</span><span class="n">head</span><span class="p">()</span>
-<span class="go">Row(text=u&#39;A B  c&#39;, words=[u&#39;a&#39;, u&#39;b&#39;, u&#39;c&#39;])</span>
+<span class="go">Row(text=&#39;A B  c&#39;, words=[&#39;a&#39;, &#39;b&#39;, &#39;c&#39;])</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="c1"># Change a parameter.</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">reTokenizer</span><span class="o">.</span><span class="n">setParams</span><span class="p">(</span><span class="n">outputCol</span><span class="o">=</span><span class="s2">&quot;tokens&quot;</span><span class="p">)</span><span class="o">.</span><span class="n">transform</span><span class="p">(</span><span class="n">df</span><span class="p">)</span><span class="o">.</span><span class="n">head</span><span class="p">()</span>
-<span class="go">Row(text=u&#39;A B  c&#39;, tokens=[u&#39;a&#39;, u&#39;b&#39;, u&#39;c&#39;])</span>
+<span class="go">Row(text=&#39;A B  c&#39;, tokens=[&#39;a&#39;, &#39;b&#39;, &#39;c&#39;])</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="c1"># Temporarily modify a parameter.</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">reTokenizer</span><span class="o">.</span><span class="n">transform</span><span class="p">(</span><span class="n">df</span><span class="p">,</span> <span class="p">{</span><span class="n">reTokenizer</span><span class="o">.</span><span class="n">outputCol</span><span class="p">:</span> <span class="s2">&quot;words&quot;</span><span class="p">})</span><span class="o">.</span><span class="n">head</span><span class="p">()</span>
-<span class="go">Row(text=u&#39;A B  c&#39;, words=[u&#39;a&#39;, u&#39;b&#39;, u&#39;c&#39;])</span>
+<span class="go">Row(text=&#39;A B  c&#39;, words=[&#39;a&#39;, &#39;b&#39;, &#39;c&#39;])</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">reTokenizer</span><span class="o">.</span><span class="n">transform</span><span class="p">(</span><span class="n">df</span><span class="p">)</span><span class="o">.</span><span class="n">head</span><span class="p">()</span>
-<span class="go">Row(text=u&#39;A B  c&#39;, tokens=[u&#39;a&#39;, u&#39;b&#39;, u&#39;c&#39;])</span>
+<span class="go">Row(text=&#39;A B  c&#39;, tokens=[&#39;a&#39;, &#39;b&#39;, &#39;c&#39;])</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="c1"># Must use keyword arguments to specify params.</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">reTokenizer</span><span class="o">.</span><span class="n">setParams</span><span class="p">(</span><span class="s2">&quot;text&quot;</span><span class="p">)</span>
 <span class="gt">Traceback (most recent call last):</span>
@@ -10122,7 +10122,7 @@ uses <code class="xref py py-func docutils literal"><span class="pre">dir()</spa
 
 <dl class="class">
 <dt id="pyspark.ml.feature.RFormula">
-<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">RFormula</code><span class="sig-paren">(</span><em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#RFormula"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.RFormula" title="Permalink to this definition">¶</a></dt>
+<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">RFormula</code><span class="sig-paren">(</span><em>formula=None</em>, <em>featuresCol='features'</em>, <em>labelCol='label'</em>, <em>forceIndexLabel=False</em>, <em>stringIndexerOrderType='frequencyDesc'</em>, <em>handleInvalid='error'</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#RFormula"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.RFormula" title="Permalink to this definition">¶</a></dt>
 <dd><div class="admonition note">
 <p class="first admonition-title">Note</p>
 <p class="last">Experimental</p>
@@ -10682,7 +10682,7 @@ uses <code class="xref py py-func docutils literal"><span class="pre">dir()</spa
 
 <dl class="class">
 <dt id="pyspark.ml.feature.SQLTransformer">
-<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">SQLTransformer</code><span class="sig-paren">(</span><em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#SQLTransformer"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.SQLTransformer" title="Permalink to this definition">¶</a></dt>
+<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">SQLTransformer</code><span class="sig-paren">(</span><em>statement=None</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#SQLTransformer"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.SQLTransformer" title="Permalink to this definition">¶</a></dt>
 <dd><p>Implements the transforms which are defined by SQL statement.
 Currently we only support SQL syntax like ‘SELECT … FROM __THIS__’
 where ‘__THIS__’ represents the underlying table of the input dataset.</p>
@@ -10892,7 +10892,7 @@ uses <code class="xref py py-func docutils literal"><span class="pre">dir()</spa
 
 <dl class="class">
 <dt id="pyspark.ml.feature.StandardScaler">
-<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">StandardScaler</code><span class="sig-paren">(</span><em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#StandardScaler"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.StandardScaler" title="Permalink to this definition">¶</a></dt>
+<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">StandardScaler</code><span class="sig-paren">(</span><em>withMean=False</em>, <em>withStd=True</em>, <em>inputCol=None</em>, <em>outputCol=None</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#StandardScaler"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.StandardScaler" title="Permalink to this definition">¶</a></dt>
 <dd><p>Standardizes features by removing the mean and scaling to unit variance using column summary
 statistics on the samples in the training set.</p>
 <p>The “unit std” is computed using the <a class="reference external" href="https://en.wikipedia.org/wiki/Standard_deviation#Corrected_sample_standard_deviation">corrected sample standard deviation</a>,
@@ -11392,7 +11392,7 @@ uses <code class="xref py py-func docutils literal"><span class="pre">dir()</spa
 
 <dl class="class">
 <dt id="pyspark.ml.feature.StopWordsRemover">
-<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">StopWordsRemover</code><span class="sig-paren">(</span><em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#StopWordsRemover"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.StopWordsRemover" title="Permalink to this definition">¶</a></dt>
+<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">StopWordsRemover</code><span class="sig-paren">(</span><em>inputCol=None</em>, <em>outputCol=None</em>, <em>stopWords=None</em>, <em>caseSensitive=False</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#StopWordsRemover"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.StopWordsRemover" title="Permalink to this definition">¶</a></dt>
 <dd><p>A feature transformer that filters out stop words from input.</p>
 <div class="admonition note">
 <p class="first admonition-title">Note</p>
@@ -11673,7 +11673,7 @@ uses <code class="xref py py-func docutils literal"><span class="pre">dir()</spa
 
 <dl class="class">
 <dt id="pyspark.ml.feature.StringIndexer">
-<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">StringIndexer</code><span class="sig-paren">(</span><em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#StringIndexer"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.StringIndexer" title="Permalink to this definition">¶</a></dt>
+<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">StringIndexer</code><span class="sig-paren">(</span><em>inputCol=None</em>, <em>outputCol=None</em>, <em>handleInvalid='error'</em>, <em>stringOrderType='frequencyDesc'</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#StringIndexer"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.StringIndexer" title="Permalink to this definition">¶</a></dt>
 <dd><p>A label indexer that maps a string column of labels to an ML column of label indices.
 If the input column is numeric, we cast it to string and index the string values.
 The indices are in [0, numLabels). By default, this is ordered by label frequencies
@@ -12171,21 +12171,21 @@ uses <code class="xref py py-func docutils literal"><span class="pre">dir()</spa
 
 <dl class="class">
 <dt id="pyspark.ml.feature.Tokenizer">
-<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">Tokenizer</code><span class="sig-paren">(</span><em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#Tokenizer"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.Tokenizer" title="Permalink to this definition">¶</a></dt>
+<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">Tokenizer</code><span class="sig-paren">(</span><em>inputCol=None</em>, <em>outputCol=None</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#Tokenizer"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.Tokenizer" title="Permalink to this definition">¶</a></dt>
 <dd><p>A tokenizer that converts the input string to lowercase and then
 splits it by white spaces.</p>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">df</span> <span class="o">=</span> <span class="n">spark</span><span class="o">.</span><span class="n">createDataFrame</span><span class="p">([(</span><span class="s2">&quot;a b c&quot;</span><span class="p">,)],</span> <span class="p">[</span><span class="s2">&quot;text&quot;</span><span class="p">])</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">tokenizer</span> <span class="o">=</span> <span class="n">Tokenizer</span><span class="p">(</span><span class="n">inputCol</span><span class="o">=</span><span class="s2">&quot;text&quot;</span><span class="p">,</span> <span class="n">outputCol</span><span class="o">=</span><span class="s2">&quot;words&quot;</span><span class="p">)</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">tokenizer</span><span class="o">.</span><span class="n">transform</span><span class="p">(</span><span class="n">df</span><span class="p">)</span><span class="o">.</span><span class="n">head</span><span class="p">()</span>
-<span class="go">Row(text=u&#39;a b c&#39;, words=[u&#39;a&#39;, u&#39;b&#39;, u&#39;c&#39;])</span>
+<span class="go">Row(text=&#39;a b c&#39;, words=[&#39;a&#39;, &#39;b&#39;, &#39;c&#39;])</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="c1"># Change a parameter.</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">tokenizer</span><span class="o">.</span><span class="n">setParams</span><span class="p">(</span><span class="n">outputCol</span><span class="o">=</span><span class="s2">&quot;tokens&quot;</span><span class="p">)</span><span class="o">.</span><span class="n">transform</span><span class="p">(</span><span class="n">df</span><span class="p">)</span><span class="o">.</span><span class="n">head</span><span class="p">()</span>
-<span class="go">Row(text=u&#39;a b c&#39;, tokens=[u&#39;a&#39;, u&#39;b&#39;, u&#39;c&#39;])</span>
+<span class="go">Row(text=&#39;a b c&#39;, tokens=[&#39;a&#39;, &#39;b&#39;, &#39;c&#39;])</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="c1"># Temporarily modify a parameter.</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">tokenizer</span><span class="o">.</span><span class="n">transform</span><span class="p">(</span><span class="n">df</span><span class="p">,</span> <span class="p">{</span><span class="n">tokenizer</span><span class="o">.</span><span class="n">outputCol</span><span class="p">:</span> <span class="s2">&quot;words&quot;</span><span class="p">})</span><span class="o">.</span><span class="n">head</span><span class="p">()</span>
-<span class="go">Row(text=u&#39;a b c&#39;, words=[u&#39;a&#39;, u&#39;b&#39;, u&#39;c&#39;])</span>
+<span class="go">Row(text=&#39;a b c&#39;, words=[&#39;a&#39;, &#39;b&#39;, &#39;c&#39;])</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">tokenizer</span><span class="o">.</span><span class="n">transform</span><span class="p">(</span><span class="n">df</span><span class="p">)</span><span class="o">.</span><span class="n">head</span><span class="p">()</span>
-<span class="go">Row(text=u&#39;a b c&#39;, tokens=[u&#39;a&#39;, u&#39;b&#39;, u&#39;c&#39;])</span>
+<span class="go">Row(text=&#39;a b c&#39;, tokens=[&#39;a&#39;, &#39;b&#39;, &#39;c&#39;])</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="c1"># Must use keyword arguments to specify params.</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">tokenizer</span><span class="o">.</span><span class="n">setParams</span><span class="p">(</span><span class="s2">&quot;text&quot;</span><span class="p">)</span>
 <span class="gt">Traceback (most recent call last):</span>
@@ -12403,7 +12403,7 @@ uses <code class="xref py py-func docutils literal"><span class="pre">dir()</spa
 
 <dl class="class">
 <dt id="pyspark.ml.feature.VectorAssembler">
-<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">VectorAssembler</code><span class="sig-paren">(</span><em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#VectorAssembler"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.VectorAssembler" title="Permalink to this definition">¶</a></dt>
+<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">VectorAssembler</code><span class="sig-paren">(</span><em>inputCols=None</em>, <em>outputCol=None</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#VectorAssembler"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.VectorAssembler" title="Permalink to this definition">¶</a></dt>
 <dd><p>A feature transformer that merges multiple columns into a vector column.</p>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">df</span> <span class="o">=</span> <span class="n">spark</span><span class="o">.</span><span class="n">createDataFrame</span><span class="p">([(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">0</span><span class="p">,</span> <span class="mi">3</span><span class="p">)],</span> <span class="p">[</span><span class="s2">&quot;a&quot;</span><span class="p">,</span> <span class="s2">&quot;b&quot;</span><span class="p">,</span> <span class="s2">&quot;c&quot;</span><span class="p">])</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">vecAssembler</span> <span class="o">=</span> <span class="n">VectorAssembler</span><span class="p">(</span><span class="n">inputCols</span><span class="o">=</span><span class="p">[</span><span class="s2">&quot;a&quot;</span><span class="p">,</span> <span class="s2">&quot;b&quot;</span><span class="p">,</span> <span class="s2">&quot;c&quot;</span><span class="p">],</span> <span class="n">outputCol</span><span class="o">=</span><span class="s2">&quot;features&quot;</span><span class="p">)</span>
@@ -12626,7 +12626,7 @@ uses <code class="xref py py-func docutils literal"><span class="pre">dir()</spa
 
 <dl class="class">
 <dt id="pyspark.ml.feature.VectorIndexer">
-<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">VectorIndexer</code><span class="sig-paren">(</span><em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#VectorIndexer"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.VectorIndexer" title="Permalink to this definition">¶</a></dt>
+<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">VectorIndexer</code><span class="sig-paren">(</span><em>maxCategories=20</em>, <em>inputCol=None</em>, <em>outputCol=None</em>, <em>handleInvalid='error'</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#VectorIndexer"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.VectorIndexer" title="Permalink to this definition">¶</a></dt>
 <dd><p>Class for indexing categorical feature columns in a dataset of <cite>Vector</cite>.</p>
 <dl class="docutils">
 <dt>This has 2 usage modes:</dt>
@@ -13202,7 +13202,7 @@ uses <code class="xref py py-func docutils literal"><span class="pre">dir()</spa
 
 <dl class="class">
 <dt id="pyspark.ml.feature.VectorSizeHint">
-<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">VectorSizeHint</code><span class="sig-paren">(</span><em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#VectorSizeHint"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.VectorSizeHint" title="Permalink to this definition">¶</a></dt>
+<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">VectorSizeHint</code><span class="sig-paren">(</span><em>inputCol=None</em>, <em>size=None</em>, <em>handleInvalid='error'</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#VectorSizeHint"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.VectorSizeHint" title="Permalink to this definition">¶</a></dt>
 <dd><div class="admonition note">
 <p class="first admonition-title">Note</p>
 <p class="last">Experimental</p>
@@ -13464,7 +13464,7 @@ uses <code class="xref py py-func docutils literal"><span class="pre">dir()</spa
 
 <dl class="class">
 <dt id="pyspark.ml.feature.VectorSlicer">
-<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">VectorSlicer</code><span class="sig-paren">(</span><em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#VectorSlicer"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.VectorSlicer" title="Permalink to this definition">¶</a></dt>
+<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">VectorSlicer</code><span class="sig-paren">(</span><em>inputCol=None</em>, <em>outputCol=None</em>, <em>indices=None</em>, <em>names=None</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#VectorSlicer"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.VectorSlicer" title="Permalink to this definition">¶</a></dt>
 <dd><p>This class takes a feature vector and outputs a new feature vector with a subarray
 of the original features.</p>
 <p>The subset of features can be specified with either indices (<cite>setIndices()</cite>)
@@ -13699,7 +13699,7 @@ uses <code class="xref py py-func docutils literal"><span class="pre">dir()</spa
 
 <dl class="method">
 <dt id="pyspark.ml.feature.VectorSlicer.setParams">
-<code class="descname">setParams</code><span class="sig-paren">(</span><em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#VectorSlicer.setParams"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.VectorSlicer.setParams" title="Permalink to this definition">¶</a></dt>
+<code class="descname">setParams</code><span class="sig-paren">(</span><em>inputCol=None</em>, <em>outputCol=None</em>, <em>indices=None</em>, <em>names=None</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#VectorSlicer.setParams"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.VectorSlicer.setParams" title="Permalink to this definition">¶</a></dt>
 <dd><p>setParams(self, inputCol=None, outputCol=None, indices=None, names=None):
 Sets params for this VectorSlicer.</p>
 <div class="versionadded">
@@ -13741,7 +13741,7 @@ Sets params for this VectorSlicer.</p>
 
 <dl class="class">
 <dt id="pyspark.ml.feature.Word2Vec">
-<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">Word2Vec</code><span class="sig-paren">(</span><em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#Word2Vec"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.Word2Vec" title="Permalink to this definition">¶</a></dt>
+<em class="property">class </em><code class="descclassname">pyspark.ml.feature.</code><code class="descname">Word2Vec</code><span class="sig-paren">(</span><em>vectorSize=100</em>, <em>minCount=5</em>, <em>numPartitions=1</em>, <em>stepSize=0.025</em>, <em>maxIter=1</em>, <em>seed=None</em>, <em>inputCol=None</em>, <em>outputCol=None</em>, <em>windowSize=5</em>, <em>maxSentenceLength=1000</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/feature.html#Word2Vec"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.feature.Word2Vec" title="Permalink to this definition">¶</a></dt>
 <dd><p>Word2Vec trains a model of <cite>Map(String, Vector)</cite>, i.e. transforms a word into a code for further
 natural language processing or machine learning process.</p>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">sent</span> <span class="o">=</span> <span class="p">(</span><span class="s2">&quot;a b &quot;</span> <span class="o">*</span> <span class="mi">100</span> <span class="o">+</span> <span class="s2">&quot;a c &quot;</span> <span class="o">*</span> <span class="mi">10</span><span class="p">)</span><span class="o">.</span><span class="n">split</span><span class="p">(</span><span class="s2">&quot; &quot;</span><span class="p">)</span>
@@ -13758,7 +13758,7 @@ natural language processing or machine learning process.</p>
 <span class="go">+----+--------------------+</span>
 <span class="gp">...</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">model</span><span class="o">.</span><span class="n">findSynonymsArray</span><span class="p">(</span><span class="s2">&quot;a&quot;</span><span class="p">,</span> <span class="mi">2</span><span class="p">)</span>
-<span class="go">[(u&#39;b&#39;, 0.25053444504737854), (u&#39;c&#39;, -0.6980510950088501)]</span>
+<span class="go">[(&#39;b&#39;, 0.25053444504737854), (&#39;c&#39;, -0.6980510950088501)]</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pyspark.sql.functions</span> <span class="k">import</span> <span class="n">format_number</span> <span class="k">as</span> <span class="n">fmt</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">model</span><span class="o">.</span><span class="n">findSynonyms</span><span class="p">(</span><span class="s2">&quot;a&quot;</span><span class="p">,</span> <span class="mi">2</span><span class="p">)</span><span class="o">.</span><span class="n">select</span><span class="p">(</span><span class="s2">&quot;word&quot;</span><span class="p">,</span> <span class="n">fmt</span><span class="p">(</span><span class="s2">&quot;similarity&quot;</span><span class="p">,</span> <span class="mi">5</span><span class="p">)</span><span class="o">.</span><span class="n">alias</span><span class="p">(</span><span class="s2">&quot;similarity&quot;</span><span class="p">))</span><span class="o">.</span><span class="n">show</span><span class="p">()</span>
 <span class="go">+----+----------+</span>
@@ -14396,7 +14396,7 @@ uses <code class="xref py py-func docutils literal"><span class="pre">dir()</spa
 <span id="pyspark-ml-classification-module"></span><h2>pyspark.ml.classification module<a class="headerlink" href="#module-pyspark.ml.classification" title="Permalink to this headline">¶</a></h2>
 <dl class="class">
 <dt id="pyspark.ml.classification.LinearSVC">
-<em class="property">class </em><code class="descclassname">pyspark.ml.classification.</code><code class="descname">LinearSVC</code><span class="sig-paren">(</span><em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/classification.html#LinearSVC"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.classification.LinearSVC" title="Permalink to this definition">¶</a></dt>
+<em class="property">class </em><code class="descclassname">pyspark.ml.classification.</code><code class="descname">LinearSVC</code><span class="sig-paren">(</span><em>featuresCol='features'</em>, <em>labelCol='label'</em>, <em>predictionCol='prediction'</em>, <em>maxIter=100</em>, <em>regParam=0.0</em>, <em>tol=1e-06</em>, <em>rawPredictionCol='rawPrediction'</em>, <em>fitIntercept=True</em>, <em>standardization=True</em>, <em>threshold=0.0</em>, <em>weightCol=None</em>, <em>aggregationDepth=2</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/classification.html#LinearSVC"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.classification.LinearSVC" title="Permalink to this definition">¶</a></dt>
 <dd><div class="admonition note">
 <p class="first admonition-title">Note</p>
 <p class="last">Experimental</p>
@@ -14771,7 +14771,7 @@ uses <code class="xref py py-func docutils literal"><span class="pre">dir()</spa
 
 <dl class="method">
 <dt id="pyspark.ml.classification.LinearSVC.setParams">
-<code class="descname">setParams</code><span class="sig-paren">(</span><em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/classification.html#LinearSVC.setParams"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.classification.LinearSVC.setParams" title="Permalink to this definition">¶</a></dt>
+<code class="descname">setParams</code><span class="sig-paren">(</span><em>featuresCol='features'</em>, <em>labelCol='label'</em>, <em>predictionCol='prediction'</em>, <em>maxIter=100</em>, <em>regParam=0.0</em>, <em>tol=1e-06</em>, <em>rawPredictionCol='rawPrediction'</em>, <em>fitIntercept=True</em>, <em>standardization=True</em>, <em>threshold=0.0</em>, <em>weightCol=None</em>, <em>aggregationDepth=2</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/classification.html#LinearSVC.setParams"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.classification.LinearSVC.setParams" title="Permalink to this definition">¶</a></dt>
 <dd><p>setParams(self, featuresCol=”features”, labelCol=”label”, predictionCol=”prediction”,                   maxIter=100, regParam=0.0, tol=1e-6, rawPredictionCol=”rawPrediction”,                   fitIntercept=True, standardization=True, threshold=0.0, weightCol=None,                   aggregationDepth=2):
 Sets params for Linear SVM Classifier.</p>
 <div class="versionadded">
@@ -15055,7 +15055,7 @@ uses <code class="xref py py-func docutils literal"><span class="pre">dir()</spa
 
 <dl class="class">
 <dt id="pyspark.ml.classification.LogisticRegression">
-<em class="property">class </em><code class="descclassname">pyspark.ml.classification.</code><code class="descname">LogisticRegression</code><span class="sig-paren">(</span><em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/classification.html#LogisticRegression"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.classification.LogisticRegression" title="Permalink to this definition">¶</a></dt>
+<em class="property">class </em><code class="descclassname">pyspark.ml.classification.</code><code class="descname">LogisticRegression</code><span class="sig-paren">(</span><em>featuresCol='features'</em>, <em>labelCol='label'</em>, <em>predictionCol='prediction'</em>, <em>maxIter=100</em>, <em>regParam=0.0</em>, <em>elasticNetParam=0.0</em>, <em>tol=1e-06</em>, <em>fitIntercept=True</em>, <em>threshold=0.5</em>, <em>thresholds=None</em>, <em>probabilityCol='probability'</em>, <em>rawPredictionCol='rawPrediction'</em>, <em>standardization=True</em>, <em>weightCol=None</em>, <em>aggregationDepth=2</em>, <em>family='auto'</em>, <em>lowerBoundsOnCoefficients=None</em>, <em>upperBoundsOnCoefficients=None</em>, <em>lowerBoundsOnIntercepts=None</em>, <em>upperBoundsOnIntercepts=None</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/classification.html#LogisticRegression"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#p
 yspark.ml.classification.LogisticRegression" title="Permalink to this definition">¶</a></dt>
 <dd><p>Logistic regression.
 This class supports multinomial logistic (softmax) and binomial logistic regression.</p>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pyspark.sql</span> <span class="k">import</span> <span class="n">Row</span>
@@ -15574,7 +15574,7 @@ uses <code class="xref py py-func docutils literal"><span class="pre">dir()</spa
 
 <dl class="method">
 <dt id="pyspark.ml.classification.LogisticRegression.setParams">
-<code class="descname">setParams</code><span class="sig-paren">(</span><em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/classification.html#LogisticRegression.setParams"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.classification.LogisticRegression.setParams" title="Permalink to this definition">¶</a></dt>
+<code class="descname">setParams</code><span class="sig-paren">(</span><em>featuresCol='features'</em>, <em>labelCol='label'</em>, <em>predictionCol='prediction'</em>, <em>maxIter=100</em>, <em>regParam=0.0</em>, <em>elasticNetParam=0.0</em>, <em>tol=1e-06</em>, <em>fitIntercept=True</em>, <em>threshold=0.5</em>, <em>thresholds=None</em>, <em>probabilityCol='probability'</em>, <em>rawPredictionCol='rawPrediction'</em>, <em>standardization=True</em>, <em>weightCol=None</em>, <em>aggregationDepth=2</em>, <em>family='auto'</em>, <em>lowerBoundsOnCoefficients=None</em>, <em>upperBoundsOnCoefficients=None</em>, <em>lowerBoundsOnIntercepts=None</em>, <em>upperBoundsOnIntercepts=None</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/classification.html#LogisticRegression.setParams"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.classification.LogisticRegression.setParams" title="Permalink to this definition">
 ¶</a></dt>
 <dd><p>setParams(self, featuresCol=”features”, labelCol=”label”, predictionCol=”prediction”,                   maxIter=100, regParam=0.0, elasticNetParam=0.0, tol=1e-6, fitIntercept=True,                   threshold=0.5, thresholds=None, probabilityCol=”probability”,                   rawPredictionCol=”rawPrediction”, standardization=True, weightCol=None,                   aggregationDepth=2, family=”auto”,                   lowerBoundsOnCoefficients=None, upperBoundsOnCoefficients=None,                   lowerBoundsOnIntercepts=None, upperBoundsOnIntercepts=None):
 Sets params for logistic regression.
 If the threshold and thresholds Params are both set, they must be equivalent.</p>
@@ -16926,7 +16926,7 @@ versions.</p>
 
 <dl class="class">
 <dt id="pyspark.ml.classification.DecisionTreeClassifier">
-<em class="property">class </em><code class="descclassname">pyspark.ml.classification.</code><code class="descname">DecisionTreeClassifier</code><span class="sig-paren">(</span><em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/classification.html#DecisionTreeClassifier"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.classification.DecisionTreeClassifier" title="Permalink to this definition">¶</a></dt>
+<em class="property">class </em><code class="descclassname">pyspark.ml.classification.</code><code class="descname">DecisionTreeClassifier</code><span class="sig-paren">(</span><em>featuresCol='features'</em>, <em>labelCol='label'</em>, <em>predictionCol='prediction'</em>, <em>probabilityCol='probability'</em>, <em>rawPredictionCol='rawPrediction'</em>, <em>maxDepth=5</em>, <em>maxBins=32</em>, <em>minInstancesPerNode=1</em>, <em>minInfoGain=0.0</em>, <em>maxMemoryInMB=256</em>, <em>cacheNodeIds=False</em>, <em>checkpointInterval=10</em>, <em>impurity='gini'</em>, <em>seed=None</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/classification.html#DecisionTreeClassifier"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.classification.DecisionTreeClassifier" title="Permalink to this definition">¶</a></dt>
 <dd><p><a class="reference external" href="http://en.wikipedia.org/wiki/Decision_tree_learning">Decision tree</a>
 learning algorithm for classification.
 It supports both binary and multiclass labels, as well as both continuous and categorical
@@ -17670,7 +17670,7 @@ uses <code class="xref py py-func docutils literal"><span class="pre">dir()</spa
 
 <dl class="class">
 <dt id="pyspark.ml.classification.GBTClassifier">
-<em class="property">class </em><code class="descclassname">pyspark.ml.classification.</code><code class="descname">GBTClassifier</code><span class="sig-paren">(</span><em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/classification.html#GBTClassifier"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.classification.GBTClassifier" title="Permalink to this definition">¶</a></dt>
+<em class="property">class </em><code class="descclassname">pyspark.ml.classification.</code><code class="descname">GBTClassifier</code><span class="sig-paren">(</span><em>featuresCol='features'</em>, <em>labelCol='label'</em>, <em>predictionCol='prediction'</em>, <em>maxDepth=5</em>, <em>maxBins=32</em>, <em>minInstancesPerNode=1</em>, <em>minInfoGain=0.0</em>, <em>maxMemoryInMB=256</em>, <em>cacheNodeIds=False</em>, <em>checkpointInterval=10</em>, <em>lossType='logistic'</em>, <em>maxIter=20</em>, <em>stepSize=0.1</em>, <em>seed=None</em>, <em>subsamplingRate=1.0</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/classification.html#GBTClassifier"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.classification.GBTClassifier" title="Permalink to this definition">¶</a></dt>
 <dd><p><a class="reference external" href="http://en.wikipedia.org/wiki/Gradient_boosting">Gradient-Boosted Trees (GBTs)</a>
 learning algorithm for classification.
 It supports binary labels, as well as both continuous and categorical features.</p>
@@ -18441,7 +18441,7 @@ uses <code class="xref py py-func docutils literal"><span class="pre">dir()</spa
 
 <dl class="class">
 <dt id="pyspark.ml.classification.RandomForestClassifier">
-<em class="property">class </em><code class="descclassname">pyspark.ml.classification.</code><code class="descname">RandomForestClassifier</code><span class="sig-paren">(</span><em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/classification.html#RandomForestClassifier"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.classification.RandomForestClassifier" title="Permalink to this definition">¶</a></dt>
+<em class="property">class </em><code class="descclassname">pyspark.ml.classification.</code><code class="descname">RandomForestClassifier</code><span class="sig-paren">(</span><em>featuresCol='features'</em>, <em>labelCol='label'</em>, <em>predictionCol='prediction'</em>, <em>probabilityCol='probability'</em>, <em>rawPredictionCol='rawPrediction'</em>, <em>maxDepth=5</em>, <em>maxBins=32</em>, <em>minInstancesPerNode=1</em>, <em>minInfoGain=0.0</em>, <em>maxMemoryInMB=256</em>, <em>cacheNodeIds=False</em>, <em>checkpointInterval=10</em>, <em>impurity='gini'</em>, <em>numTrees=20</em>, <em>featureSubsetStrategy='auto'</em>, <em>seed=None</em>, <em>subsamplingRate=1.0</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/classification.html#RandomForestClassifier"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.classification.RandomForestClassifier" title="Permalink to this definition">¶</a></dt>
 <dd><p><a class="reference external" href="http://en.wikipedia.org/wiki/Random_forest">Random Forest</a>
 learning algorithm for classification.
 It supports both binary and multiclass labels, as well as both continuous and categorical
@@ -19261,7 +19261,7 @@ uses <code class="xref py py-func docutils literal"><span class="pre">dir()</spa
 
 <dl class="class">
 <dt id="pyspark.ml.classification.NaiveBayes">
-<em class="property">class </em><code class="descclassname">pyspark.ml.classification.</code><code class="descname">NaiveBayes</code><span class="sig-paren">(</span><em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/classification.html#NaiveBayes"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.classification.NaiveBayes" title="Permalink to this definition">¶</a></dt>
+<em class="property">class </em><code class="descclassname">pyspark.ml.classification.</code><code class="descname">NaiveBayes</code><span class="sig-paren">(</span><em>featuresCol='features'</em>, <em>labelCol='label'</em>, <em>predictionCol='prediction'</em>, <em>probabilityCol='probability'</em>, <em>rawPredictionCol='rawPrediction'</em>, <em>smoothing=1.0</em>, <em>modelType='multinomial'</em>, <em>thresholds=None</em>, <em>weightCol=None</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/classification.html#NaiveBayes"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.classification.NaiveBayes" title="Permalink to this definition">¶</a></dt>
 <dd><p>Naive Bayes Classifiers.
 It supports both Multinomial and Bernoulli NB. <a class="reference external" href="http://nlp.stanford.edu/IR-book/html/htmledition/naive-bayes-text-classification-1.html">Multinomial NB</a>
 can handle finitely supported discrete data. For example, by converting documents into
@@ -19882,7 +19882,7 @@ uses <code class="xref py py-func docutils literal"><span class="pre">dir()</spa
 
 <dl class="class">
 <dt id="pyspark.ml.classification.MultilayerPerceptronClassifier">
-<em class="property">class </em><code class="descclassname">pyspark.ml.classification.</code><code class="descname">MultilayerPerceptronClassifier</code><span class="sig-paren">(</span><em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/classification.html#MultilayerPerceptronClassifier"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.classification.MultilayerPerceptronClassifier" title="Permalink to this definition">¶</a></dt>
+<em class="property">class </em><code class="descclassname">pyspark.ml.classification.</code><code class="descname">MultilayerPerceptronClassifier</code><span class="sig-paren">(</span><em>featuresCol='features'</em>, <em>labelCol='label'</em>, <em>predictionCol='prediction'</em>, <em>maxIter=100</em>, <em>tol=1e-06</em>, <em>seed=None</em>, <em>layers=None</em>, <em>blockSize=128</em>, <em>stepSize=0.03</em>, <em>solver='l-bfgs'</em>, <em>initialWeights=None</em>, <em>probabilityCol='probability'</em>, <em>rawPredictionCol='rawPrediction'</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/classification.html#MultilayerPerceptronClassifier"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.classification.MultilayerPerceptronClassifier" title="Permalink to this definition">¶</a></dt>
 <dd><p>Classifier trainer based on the Multilayer Perceptron.
 Each layer has sigmoid activation function, output layer has softmax.
 Number of inputs has to be equal to the size of feature vectors.
@@ -20305,7 +20305,7 @@ uses <code class="xref py py-func docutils literal"><span class="pre">dir()</spa
 
 <dl class="method">
 <dt id="pyspark.ml.classification.MultilayerPerceptronClassifier.setParams">
-<code class="descname">setParams</code><span class="sig-paren">(</span><em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/classification.html#MultilayerPerceptronClassifier.setParams"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.classification.MultilayerPerceptronClassifier.setParams" title="Permalink to this definition">¶</a></dt>
+<code class="descname">setParams</code><span class="sig-paren">(</span><em>featuresCol='features'</em>, <em>labelCol='label'</em>, <em>predictionCol='prediction'</em>, <em>maxIter=100</em>, <em>tol=1e-06</em>, <em>seed=None</em>, <em>layers=None</em>, <em>blockSize=128</em>, <em>stepSize=0.03</em>, <em>solver='l-bfgs'</em>, <em>initialWeights=None</em>, <em>probabilityCol='probability'</em>, <em>rawPredictionCol='rawPrediction'</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/classification.html#MultilayerPerceptronClassifier.setParams"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.classification.MultilayerPerceptronClassifier.setParams" title="Permalink to this definition">¶</a></dt>
 <dd><p>setParams(self, featuresCol=”features”, labelCol=”label”, predictionCol=”prediction”,                   maxIter=100, tol=1e-6, seed=None, layers=None, blockSize=128, stepSize=0.03,                   solver=”l-bfgs”, initialWeights=None, probabilityCol=”probability”,                   rawPredictionCol=”rawPrediction”):
 Sets params for MultilayerPerceptronClassifier.</p>
 <div class="versionadded">
@@ -20583,7 +20583,7 @@ uses <code class="xref py py-func docutils literal"><span class="pre">dir()</spa
 
 <dl class="class">
 <dt id="pyspark.ml.classification.OneVsRest">
-<em class="property">class </em><code class="descclassname">pyspark.ml.classification.</code><code class="descname">OneVsRest</code><span class="sig-paren">(</span><em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/ml/classification.html#OneVsRest"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.ml.classification.OneVsRest" title="Permalink to this definition">¶</a></dt>
+<em class="property">class </em><code class

<TRUNCATED>

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org


[3/7] spark-website git commit: Fix signature description broken in PySpark API documentation in 2.3.1

Posted by gu...@apache.org.
http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/pyspark.sql.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/pyspark.sql.html b/site/docs/2.3.1/api/python/pyspark.sql.html
index 43c51be..6716867 100644
--- a/site/docs/2.3.1/api/python/pyspark.sql.html
+++ b/site/docs/2.3.1/api/python/pyspark.sql.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.sql module &#8212; PySpark master documentation</title>
+    <title>pyspark.sql module &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    './',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -39,7 +39,7 @@
           <a href="pyspark.html" title="pyspark package"
              accesskey="P">previous</a> |</li>
     
-        <li class="nav-item nav-item-0"><a href="index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="pyspark.html" accesskey="U">pyspark package</a> &#187;</li> 
       </ul>
@@ -292,22 +292,22 @@ omit the <code class="docutils literal"><span class="pre">struct&lt;&gt;</span><
 </div>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">l</span> <span class="o">=</span> <span class="p">[(</span><span class="s1">&#39;Alice&#39;</span><span class="p">,</span> <span class="mi">1</span><span class="p">)]</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">spark</span><span class="o">.</span><span class="n">createDataFrame</span><span class="p">(</span><span class="n">l</span><span class="p">)</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(_1=u&#39;Alice&#39;, _2=1)]</span>
+<span class="go">[Row(_1=&#39;Alice&#39;, _2=1)]</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">spark</span><span class="o">.</span><span class="n">createDataFrame</span><span class="p">(</span><span class="n">l</span><span class="p">,</span> <span class="p">[</span><span class="s1">&#39;name&#39;</span><span class="p">,</span> <span class="s1">&#39;age&#39;</span><span class="p">])</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(name=u&#39;Alice&#39;, age=1)]</span>
+<span class="go">[Row(name=&#39;Alice&#39;, age=1)]</span>
 </pre></div>
 </div>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">d</span> <span class="o">=</span> <span class="p">[{</span><span class="s1">&#39;name&#39;</span><span class="p">:</span> <span class="s1">&#39;Alice&#39;</span><span class="p">,</span> <span class="s1">&#39;age&#39;</span><span class="p">:</span> <span class="mi">1</span><span class="p">}]</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">spark</span><span class="o">.</span><span class="n">createDataFrame</span><span class="p">(</span><span class="n">d</span><span class="p">)</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(age=1, name=u&#39;Alice&#39;)]</span>
+<span class="go">[Row(age=1, name=&#39;Alice&#39;)]</span>
 </pre></div>
 </div>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">rdd</span> <span class="o">=</span> <span class="n">sc</span><span class="o">.</span><span class="n">parallelize</span><span class="p">(</span><span class="n">l</span><span class="p">)</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">spark</span><span class="o">.</span><span class="n">createDataFrame</span><span class="p">(</span><span class="n">rdd</span><span class="p">)</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(_1=u&#39;Alice&#39;, _2=1)]</span>
+<span class="go">[Row(_1=&#39;Alice&#39;, _2=1)]</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">df</span> <span class="o">=</span> <span class="n">spark</span><span class="o">.</span><span class="n">createDataFrame</span><span class="p">(</span><span class="n">rdd</span><span class="p">,</span> <span class="p">[</span><span class="s1">&#39;name&#39;</span><span class="p">,</span> <span class="s1">&#39;age&#39;</span><span class="p">])</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(name=u&#39;Alice&#39;, age=1)]</span>
+<span class="go">[Row(name=&#39;Alice&#39;, age=1)]</span>
 </pre></div>
 </div>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pyspark.sql</span> <span class="k">import</span> <span class="n">Row</span>
@@ -315,7 +315,7 @@ omit the <code class="docutils literal"><span class="pre">struct&lt;&gt;</span><
 <span class="gp">&gt;&gt;&gt; </span><span class="n">person</span> <span class="o">=</span> <span class="n">rdd</span><span class="o">.</span><span class="n">map</span><span class="p">(</span><span class="k">lambda</span> <span class="n">r</span><span class="p">:</span> <span class="n">Person</span><span class="p">(</span><span class="o">*</span><span class="n">r</span><span class="p">))</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">df2</span> <span class="o">=</span> <span class="n">spark</span><span class="o">.</span><span class="n">createDataFrame</span><span class="p">(</span><span class="n">person</span><span class="p">)</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">df2</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(name=u&#39;Alice&#39;, age=1)]</span>
+<span class="go">[Row(name=&#39;Alice&#39;, age=1)]</span>
 </pre></div>
 </div>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pyspark.sql.types</span> <span class="k">import</span> <span class="o">*</span>
@@ -324,17 +324,17 @@ omit the <code class="docutils literal"><span class="pre">struct&lt;&gt;</span><
 <span class="gp">... </span>   <span class="n">StructField</span><span class="p">(</span><span class="s2">&quot;age&quot;</span><span class="p">,</span> <span class="n">IntegerType</span><span class="p">(),</span> <span class="kc">True</span><span class="p">)])</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">df3</span> <span class="o">=</span> <span class="n">spark</span><span class="o">.</span><span class="n">createDataFrame</span><span class="p">(</span><span class="n">rdd</span><span class="p">,</span> <span class="n">schema</span><span class="p">)</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">df3</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(name=u&#39;Alice&#39;, age=1)]</span>
+<span class="go">[Row(name=&#39;Alice&#39;, age=1)]</span>
 </pre></div>
 </div>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">spark</span><span class="o">.</span><span class="n">createDataFrame</span><span class="p">(</span><span class="n">df</span><span class="o">.</span><span class="n">toPandas</span><span class="p">())</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>  
-<span class="go">[Row(name=u&#39;Alice&#39;, age=1)]</span>
+<span class="go">[Row(name=&#39;Alice&#39;, age=1)]</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">spark</span><span class="o">.</span><span class="n">createDataFrame</span><span class="p">(</span><span class="n">pandas</span><span class="o">.</span><span class="n">DataFrame</span><span class="p">([[</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">]]))</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>  
 <span class="go">[Row(0=1, 1=2)]</span>
 </pre></div>
 </div>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">spark</span><span class="o">.</span><span class="n">createDataFrame</span><span class="p">(</span><span class="n">rdd</span><span class="p">,</span> <span class="s2">&quot;a: string, b: int&quot;</span><span class="p">)</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(a=u&#39;Alice&#39;, b=1)]</span>
+<span class="go">[Row(a=&#39;Alice&#39;, b=1)]</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">rdd</span> <span class="o">=</span> <span class="n">rdd</span><span class="o">.</span><span class="n">map</span><span class="p">(</span><span class="k">lambda</span> <span class="n">row</span><span class="p">:</span> <span class="n">row</span><span class="p">[</span><span class="mi">1</span><span class="p">])</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">spark</span><span class="o">.</span><span class="n">createDataFrame</span><span class="p">(</span><span class="n">rdd</span><span class="p">,</span> <span class="s2">&quot;int&quot;</span><span class="p">)</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
 <span class="go">[Row(value=1)]</span>
@@ -461,7 +461,7 @@ as a streaming <a class="reference internal" href="#pyspark.sql.DataFrame" title
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">createOrReplaceTempView</span><span class="p">(</span><span class="s2">&quot;table1&quot;</span><span class="p">)</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">df2</span> <span class="o">=</span> <span class="n">spark</span><span class="o">.</span><span class="n">sql</span><span class="p">(</span><span class="s2">&quot;SELECT field1 AS f1, field2 as f2 from table1&quot;</span><span class="p">)</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">df2</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(f1=1, f2=u&#39;row1&#39;), Row(f1=2, f2=u&#39;row2&#39;), Row(f1=3, f2=u&#39;row3&#39;)]</span>
+<span class="go">[Row(f1=1, f2=&#39;row1&#39;), Row(f1=2, f2=&#39;row2&#39;), Row(f1=3, f2=&#39;row3&#39;)]</span>
 </pre></div>
 </div>
 <div class="versionadded">
@@ -642,22 +642,22 @@ If it’s not a <a class="reference internal" href="#pyspark.sql.types.StructTyp
 </div>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">l</span> <span class="o">=</span> <span class="p">[(</span><span class="s1">&#39;Alice&#39;</span><span class="p">,</span> <span class="mi">1</span><span class="p">)]</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">sqlContext</span><span class="o">.</span><span class="n">createDataFrame</span><span class="p">(</span><span class="n">l</span><span class="p">)</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(_1=u&#39;Alice&#39;, _2=1)]</span>
+<span class="go">[Row(_1=&#39;Alice&#39;, _2=1)]</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">sqlContext</span><span class="o">.</span><span class="n">createDataFrame</span><span class="p">(</span><span class="n">l</span><span class="p">,</span> <span class="p">[</span><span class="s1">&#39;name&#39;</span><span class="p">,</span> <span class="s1">&#39;age&#39;</span><span class="p">])</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(name=u&#39;Alice&#39;, age=1)]</span>
+<span class="go">[Row(name=&#39;Alice&#39;, age=1)]</span>
 </pre></div>
 </div>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">d</span> <span class="o">=</span> <span class="p">[{</span><span class="s1">&#39;name&#39;</span><span class="p">:</span> <span class="s1">&#39;Alice&#39;</span><span class="p">,</span> <span class="s1">&#39;age&#39;</span><span class="p">:</span> <span class="mi">1</span><span class="p">}]</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">sqlContext</span><span class="o">.</span><span class="n">createDataFrame</span><span class="p">(</span><span class="n">d</span><span class="p">)</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(age=1, name=u&#39;Alice&#39;)]</span>
+<span class="go">[Row(age=1, name=&#39;Alice&#39;)]</span>
 </pre></div>
 </div>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">rdd</span> <span class="o">=</span> <span class="n">sc</span><span class="o">.</span><span class="n">parallelize</span><span class="p">(</span><span class="n">l</span><span class="p">)</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">sqlContext</span><span class="o">.</span><span class="n">createDataFrame</span><span class="p">(</span><span class="n">rdd</span><span class="p">)</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(_1=u&#39;Alice&#39;, _2=1)]</span>
+<span class="go">[Row(_1=&#39;Alice&#39;, _2=1)]</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">df</span> <span class="o">=</span> <span class="n">sqlContext</span><span class="o">.</span><span class="n">createDataFrame</span><span class="p">(</span><span class="n">rdd</span><span class="p">,</span> <span class="p">[</span><span class="s1">&#39;name&#39;</span><span class="p">,</span> <span class="s1">&#39;age&#39;</span><span class="p">])</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(name=u&#39;Alice&#39;, age=1)]</span>
+<span class="go">[Row(name=&#39;Alice&#39;, age=1)]</span>
 </pre></div>
 </div>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pyspark.sql</span> <span class="k">import</span> <span class="n">Row</span>
@@ -665,7 +665,7 @@ If it’s not a <a class="reference internal" href="#pyspark.sql.types.StructTyp
 <span class="gp">&gt;&gt;&gt; </span><span class="n">person</span> <span class="o">=</span> <span class="n">rdd</span><span class="o">.</span><span class="n">map</span><span class="p">(</span><span class="k">lambda</span> <span class="n">r</span><span class="p">:</span> <span class="n">Person</span><span class="p">(</span><span class="o">*</span><span class="n">r</span><span class="p">))</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">df2</span> <span class="o">=</span> <span class="n">sqlContext</span><span class="o">.</span><span class="n">createDataFrame</span><span class="p">(</span><span class="n">person</span><span class="p">)</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">df2</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(name=u&#39;Alice&#39;, age=1)]</span>
+<span class="go">[Row(name=&#39;Alice&#39;, age=1)]</span>
 </pre></div>
 </div>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pyspark.sql.types</span> <span class="k">import</span> <span class="o">*</span>
@@ -674,17 +674,17 @@ If it’s not a <a class="reference internal" href="#pyspark.sql.types.StructTyp
 <span class="gp">... </span>   <span class="n">StructField</span><span class="p">(</span><span class="s2">&quot;age&quot;</span><span class="p">,</span> <span class="n">IntegerType</span><span class="p">(),</span> <span class="kc">True</span><span class="p">)])</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">df3</span> <span class="o">=</span> <span class="n">sqlContext</span><span class="o">.</span><span class="n">createDataFrame</span><span class="p">(</span><span class="n">rdd</span><span class="p">,</span> <span class="n">schema</span><span class="p">)</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">df3</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(name=u&#39;Alice&#39;, age=1)]</span>
+<span class="go">[Row(name=&#39;Alice&#39;, age=1)]</span>
 </pre></div>
 </div>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">sqlContext</span><span class="o">.</span><span class="n">createDataFrame</span><span class="p">(</span><span class="n">df</span><span class="o">.</span><span class="n">toPandas</span><span class="p">())</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>  
-<span class="go">[Row(name=u&#39;Alice&#39;, age=1)]</span>
+<span class="go">[Row(name=&#39;Alice&#39;, age=1)]</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">sqlContext</span><span class="o">.</span><span class="n">createDataFrame</span><span class="p">(</span><span class="n">pandas</span><span class="o">.</span><span class="n">DataFrame</span><span class="p">([[</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">]]))</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>  
 <span class="go">[Row(0=1, 1=2)]</span>
 </pre></div>
 </div>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">sqlContext</span><span class="o">.</span><span class="n">createDataFrame</span><span class="p">(</span><span class="n">rdd</span><span class="p">,</span> <span class="s2">&quot;a: string, b: int&quot;</span><span class="p">)</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(a=u&#39;Alice&#39;, b=1)]</span>
+<span class="go">[Row(a=&#39;Alice&#39;, b=1)]</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">rdd</span> <span class="o">=</span> <span class="n">rdd</span><span class="o">.</span><span class="n">map</span><span class="p">(</span><span class="k">lambda</span> <span class="n">row</span><span class="p">:</span> <span class="n">row</span><span class="p">[</span><span class="mi">1</span><span class="p">])</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">sqlContext</span><span class="o">.</span><span class="n">createDataFrame</span><span class="p">(</span><span class="n">rdd</span><span class="p">,</span> <span class="s2">&quot;int&quot;</span><span class="p">)</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
 <span class="go">[Row(value=1)]</span>
@@ -743,12 +743,12 @@ created external table.</p>
 defaultValue. If the key is not set and defaultValue is not set, return
 the system default value.</p>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">sqlContext</span><span class="o">.</span><span class="n">getConf</span><span class="p">(</span><span class="s2">&quot;spark.sql.shuffle.partitions&quot;</span><span class="p">)</span>
-<span class="go">u&#39;200&#39;</span>
+<span class="go">&#39;200&#39;</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">sqlContext</span><span class="o">.</span><span class="n">getConf</span><span class="p">(</span><span class="s2">&quot;spark.sql.shuffle.partitions&quot;</span><span class="p">,</span> <span class="sa">u</span><span class="s2">&quot;10&quot;</span><span class="p">)</span>
-<span class="go">u&#39;10&#39;</span>
+<span class="go">&#39;10&#39;</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">sqlContext</span><span class="o">.</span><span class="n">setConf</span><span class="p">(</span><span class="s2">&quot;spark.sql.shuffle.partitions&quot;</span><span class="p">,</span> <span class="sa">u</span><span class="s2">&quot;50&quot;</span><span class="p">)</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">sqlContext</span><span class="o">.</span><span class="n">getConf</span><span class="p">(</span><span class="s2">&quot;spark.sql.shuffle.partitions&quot;</span><span class="p">,</span> <span class="sa">u</span><span class="s2">&quot;10&quot;</span><span class="p">)</span>
-<span class="go">u&#39;50&#39;</span>
+<span class="go">&#39;50&#39;</span>
 </pre></div>
 </div>
 <div class="versionadded">
@@ -931,7 +931,7 @@ See <a class="reference internal" href="#pyspark.sql.UDFRegistration.registerJav
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">sqlContext</span><span class="o">.</span><span class="n">registerDataFrameAsTable</span><span class="p">(</span><span class="n">df</span><span class="p">,</span> <span class="s2">&quot;table1&quot;</span><span class="p">)</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">df2</span> <span class="o">=</span> <span class="n">sqlContext</span><span class="o">.</span><span class="n">sql</span><span class="p">(</span><span class="s2">&quot;SELECT field1 AS f1, field2 as f2 from table1&quot;</span><span class="p">)</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">df2</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(f1=1, f2=u&#39;row1&#39;), Row(f1=2, f2=u&#39;row2&#39;), Row(f1=3, f2=u&#39;row3&#39;)]</span>
+<span class="go">[Row(f1=1, f2=&#39;row1&#39;), Row(f1=2, f2=&#39;row2&#39;), Row(f1=3, f2=&#39;row3&#39;)]</span>
 </pre></div>
 </div>
 <div class="versionadded">
@@ -1022,7 +1022,7 @@ See <a class="reference internal" href="#pyspark.sql.UDFRegistration.registerJav
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">sqlContext</span><span class="o">.</span><span class="n">registerDataFrameAsTable</span><span class="p">(</span><span class="n">df</span><span class="p">,</span> <span class="s2">&quot;table1&quot;</span><span class="p">)</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">df2</span> <span class="o">=</span> <span class="n">sqlContext</span><span class="o">.</span><span class="n">tables</span><span class="p">()</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">df2</span><span class="o">.</span><span class="n">filter</span><span class="p">(</span><span class="s2">&quot;tableName = &#39;table1&#39;&quot;</span><span class="p">)</span><span class="o">.</span><span class="n">first</span><span class="p">()</span>
-<span class="go">Row(database=u&#39;&#39;, tableName=u&#39;table1&#39;, isTemporary=True)</span>
+<span class="go">Row(database=&#39;&#39;, tableName=&#39;table1&#39;, isTemporary=True)</span>
 </pre></div>
 </div>
 <div class="versionadded">
@@ -1138,11 +1138,11 @@ object must match the specified type. In this case, this API works as if
 <cite>register(name, f, returnType=StringType())</cite>.</p>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">strlen</span> <span class="o">=</span> <span class="n">spark</span><span class="o">.</span><span class="n">udf</span><span class="o">.</span><span class="n">register</span><span class="p">(</span><span class="s2">&quot;stringLengthString&quot;</span><span class="p">,</span> <span class="k">lambda</span> <span class="n">x</span><span class="p">:</span> <span class="nb">len</span><span class="p">(</span><span class="n">x</span><span class="p">))</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">spark</span><span class="o">.</span><span class="n">sql</span><span class="p">(</span><span class="s2">&quot;SELECT stringLengthString(&#39;test&#39;)&quot;</span><span class="p">)</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(stringLengthString(test)=u&#39;4&#39;)]</span>
+<span class="go">[Row(stringLengthString(test)=&#39;4&#39;)]</span>
 </pre></div>
 </div>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">spark</span><span class="o">.</span><span class="n">sql</span><span class="p">(</span><span class="s2">&quot;SELECT &#39;foo&#39; AS text&quot;</span><span class="p">)</span><span class="o">.</span><span class="n">select</span><span class="p">(</span><span class="n">strlen</span><span class="p">(</span><span class="s2">&quot;text&quot;</span><span class="p">))</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(stringLengthString(text)=u&#39;3&#39;)]</span>
+<span class="go">[Row(stringLengthString(text)=&#39;3&#39;)]</span>
 </pre></div>
 </div>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pyspark.sql.types</span> <span class="k">import</span> <span class="n">IntegerType</span>
@@ -1268,7 +1268,7 @@ a <a class="reference internal" href="#pyspark.sql.types.DataType" title="pyspar
 <span class="gp">&gt;&gt;&gt; </span><span class="n">df</span> <span class="o">=</span> <span class="n">spark</span><span class="o">.</span><span class="n">createDataFrame</span><span class="p">([(</span><span class="mi">1</span><span class="p">,</span> <span class="s2">&quot;a&quot;</span><span class="p">),(</span><span class="mi">2</span><span class="p">,</span> <span class="s2">&quot;b&quot;</span><span class="p">),</span> <span class="p">(</span><span class="mi">3</span><span class="p">,</span> <span class="s2">&quot;a&quot;</span><span class="p">)],[</span><span class="s2">&quot;id&quot;</span><span class="p">,</span> <span class="s2">&quot;name&quot;</span><span class="p">])</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">createOrReplaceTempView</span><span class="p">(</span><span class="s2">&quot;df&quot;</span><span class="p">)</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">spark</span><span class="o">.</span><span class="n">sql</span><span class="p">(</span><span class="s2">&quot;SELECT name, javaUDAF(id) as avg from df group by name&quot;</span><span class="p">)</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(name=u&#39;b&#39;, avg=102.0), Row(name=u&#39;a&#39;, avg=102.0)]</span>
+<span class="go">[Row(name=&#39;b&#39;, avg=102.0), Row(name=&#39;a&#39;, avg=102.0)]</span>
 </pre></div>
 </div>
 <div class="versionadded">
@@ -1331,7 +1331,7 @@ and can be created using various functions in <a class="reference internal" href
 <span class="gp">&gt;&gt;&gt; </span><span class="n">df_as2</span> <span class="o">=</span> <span class="n">df</span><span class="o">.</span><span class="n">alias</span><span class="p">(</span><span class="s2">&quot;df_as2&quot;</span><span class="p">)</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">joined_df</span> <span class="o">=</span> <span class="n">df_as1</span><span class="o">.</span><span class="n">join</span><span class="p">(</span><span class="n">df_as2</span><span class="p">,</span> <span class="n">col</span><span class="p">(</span><span class="s2">&quot;df_as1.name&quot;</span><span class="p">)</span> <span class="o">==</span> <span class="n">col</span><span class="p">(</span><span class="s2">&quot;df_as2.name&quot;</span><span class="p">),</span> <span class="s1">&#39;inner&#39;</span><span class="p">)</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">joined_df</span><span class="o">.</span><span class="n">select</span><span class="p">(</span><span class="s2">&quot;df_as1.name&quot;</span><span class="p">,</span> <span class="s2">&quot;df_as2.name&quot;</span><span class="p">,</span> <span class="s2">&quot;df_as2.age&quot;</span><span class="p">)</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(name=u&#39;Bob&#39;, name=u&#39;Bob&#39;, age=5), Row(name=u&#39;Alice&#39;, name=u&#39;Alice&#39;, age=2)]</span>
+<span class="go">[Row(name=&#39;Bob&#39;, name=&#39;Bob&#39;, age=5), Row(name=&#39;Alice&#39;, name=&#39;Alice&#39;, age=2)]</span>
 </pre></div>
 </div>
 <div class="versionadded">
@@ -1487,7 +1487,7 @@ as <a class="reference internal" href="#pyspark.sql.Column" title="pyspark.sql.C
 <code class="descname">collect</code><span class="sig-paren">(</span><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/sql/dataframe.html#DataFrame.collect"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.sql.DataFrame.collect" title="Permalink to this definition">¶</a></dt>
 <dd><p>Returns all the records as a list of <a class="reference internal" href="#pyspark.sql.Row" title="pyspark.sql.Row"><code class="xref py py-class docutils literal"><span class="pre">Row</span></code></a>.</p>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(age=2, name=u&#39;Alice&#39;), Row(age=5, name=u&#39;Bob&#39;)]</span>
+<span class="go">[Row(age=2, name=&#39;Alice&#39;), Row(age=5, name=&#39;Bob&#39;)]</span>
 </pre></div>
 </div>
 <div class="versionadded">
@@ -1666,12 +1666,12 @@ catalog.</p>
 </tbody>
 </table>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">select</span><span class="p">(</span><span class="s2">&quot;age&quot;</span><span class="p">,</span> <span class="s2">&quot;name&quot;</span><span class="p">)</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(age=2, name=u&#39;Alice&#39;), Row(age=5, name=u&#39;Bob&#39;)]</span>
+<span class="go">[Row(age=2, name=&#39;Alice&#39;), Row(age=5, name=&#39;Bob&#39;)]</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">df2</span><span class="o">.</span><span class="n">select</span><span class="p">(</span><span class="s2">&quot;name&quot;</span><span class="p">,</span> <span class="s2">&quot;height&quot;</span><span class="p">)</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(name=u&#39;Tom&#39;, height=80), Row(name=u&#39;Bob&#39;, height=85)]</span>
+<span class="go">[Row(name=&#39;Tom&#39;, height=80), Row(name=&#39;Bob&#39;, height=85)]</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">crossJoin</span><span class="p">(</span><span class="n">df2</span><span class="o">.</span><span class="n">select</span><span class="p">(</span><span class="s2">&quot;height&quot;</span><span class="p">))</span><span class="o">.</span><span class="n">select</span><span class="p">(</span><span class="s2">&quot;age&quot;</span><span class="p">,</span> <span class="s2">&quot;name&quot;</span><span class="p">,</span> <span class="s2">&quot;height&quot;</span><span class="p">)</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(age=2, name=u&#39;Alice&#39;, height=80), Row(age=2, name=u&#39;Alice&#39;, height=85),</span>
-<span class="go"> Row(age=5, name=u&#39;Bob&#39;, height=80), Row(age=5, name=u&#39;Bob&#39;, height=85)]</span>
+<span class="go">[Row(age=2, name=&#39;Alice&#39;, height=80), Row(age=2, name=&#39;Alice&#39;, height=85),</span>
+<span class="go"> Row(age=5, name=&#39;Bob&#39;, height=80), Row(age=5, name=&#39;Bob&#39;, height=85)]</span>
 </pre></div>
 </div>
 <div class="versionadded">
@@ -1799,23 +1799,23 @@ This is a no-op if schema doesn’t contain the given column name(s).</p>
 </tbody>
 </table>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">drop</span><span class="p">(</span><span class="s1">&#39;age&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(name=u&#39;Alice&#39;), Row(name=u&#39;Bob&#39;)]</span>
+<span class="go">[Row(name=&#39;Alice&#39;), Row(name=&#39;Bob&#39;)]</span>
 </pre></div>
 </div>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">drop</span><span class="p">(</span><span class="n">df</span><span class="o">.</span><span class="n">age</span><span class="p">)</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(name=u&#39;Alice&#39;), Row(name=u&#39;Bob&#39;)]</span>
+<span class="go">[Row(name=&#39;Alice&#39;), Row(name=&#39;Bob&#39;)]</span>
 </pre></div>
 </div>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">join</span><span class="p">(</span><span class="n">df2</span><span class="p">,</span> <span class="n">df</span><span class="o">.</span><span class="n">name</span> <span class="o">==</span> <span class="n">df2</span><span class="o">.</span><span class="n">name</span><span class="p">,</span> <span class="s1">&#39;inner&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">drop</span><span class="p">(</span><span class="n">df</span><span class="o">.</span><span class="n">name</span><span class="p">)</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(age=5, height=85, name=u&#39;Bob&#39;)]</span>
+<span class="go">[Row(age=5, height=85, name=&#39;Bob&#39;)]</span>
 </pre></div>
 </div>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">join</span><span class="p">(</span><span class="n">df2</span><span class="p">,</span> <span class="n">df</span><span class="o">.</span><span class="n">name</span> <span class="o">==</span> <span class="n">df2</span><span class="o">.</span><span class="n">name</span><span class="p">,</span> <span class="s1">&#39;inner&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">drop</span><span class="p">(</span><span class="n">df2</span><span class="o">.</span><span class="n">name</span><span class="p">)</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(age=5, name=u&#39;Bob&#39;, height=85)]</span>
+<span class="go">[Row(age=5, name=&#39;Bob&#39;, height=85)]</span>
 </pre></div>
 </div>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">join</span><span class="p">(</span><span class="n">df2</span><span class="p">,</span> <span class="s1">&#39;name&#39;</span><span class="p">,</span> <span class="s1">&#39;inner&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">drop</span><span class="p">(</span><span class="s1">&#39;age&#39;</span><span class="p">,</span> <span class="s1">&#39;height&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(name=u&#39;Bob&#39;)]</span>
+<span class="go">[Row(name=&#39;Bob&#39;)]</span>
 </pre></div>
 </div>
 <div class="versionadded">
@@ -2027,15 +2027,15 @@ or a string of SQL expression.</td>
 </tbody>
 </table>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">filter</span><span class="p">(</span><span class="n">df</span><span class="o">.</span><span class="n">age</span> <span class="o">&gt;</span> <span class="mi">3</span><span class="p">)</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(age=5, name=u&#39;Bob&#39;)]</span>
+<span class="go">[Row(age=5, name=&#39;Bob&#39;)]</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">where</span><span class="p">(</span><span class="n">df</span><span class="o">.</span><span class="n">age</span> <span class="o">==</span> <span class="mi">2</span><span class="p">)</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(age=2, name=u&#39;Alice&#39;)]</span>
+<span class="go">[Row(age=2, name=&#39;Alice&#39;)]</span>
 </pre></div>
 </div>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">filter</span><span class="p">(</span><span class="s2">&quot;age &gt; 3&quot;</span><span class="p">)</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(age=5, name=u&#39;Bob&#39;)]</span>
+<span class="go">[Row(age=5, name=&#39;Bob&#39;)]</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">where</span><span class="p">(</span><span class="s2">&quot;age = 2&quot;</span><span class="p">)</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(age=2, name=u&#39;Alice&#39;)]</span>
+<span class="go">[Row(age=2, name=&#39;Alice&#39;)]</span>
 </pre></div>
 </div>
 <div class="versionadded">
@@ -2048,7 +2048,7 @@ or a string of SQL expression.</td>
 <code class="descname">first</code><span class="sig-paren">(</span><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/sql/dataframe.html#DataFrame.first"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.sql.DataFrame.first" title="Permalink to this definition">¶</a></dt>
 <dd><p>Returns the first row as a <a class="reference internal" href="#pyspark.sql.Row" title="pyspark.sql.Row"><code class="xref py py-class docutils literal"><span class="pre">Row</span></code></a>.</p>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">first</span><span class="p">()</span>
-<span class="go">Row(age=2, name=u&#39;Alice&#39;)</span>
+<span class="go">Row(age=2, name=&#39;Alice&#39;)</span>
 </pre></div>
 </div>
 <div class="versionadded">
@@ -2137,11 +2137,11 @@ Each element should be a column name (string) or an expression (<a class="refere
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">groupBy</span><span class="p">()</span><span class="o">.</span><span class="n">avg</span><span class="p">()</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
 <span class="go">[Row(avg(age)=3.5)]</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="nb">sorted</span><span class="p">(</span><span class="n">df</span><span class="o">.</span><span class="n">groupBy</span><span class="p">(</span><span class="s1">&#39;name&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">agg</span><span class="p">({</span><span class="s1">&#39;age&#39;</span><span class="p">:</span> <span class="s1">&#39;mean&#39;</span><span class="p">})</span><span class="o">.</span><span class="n">collect</span><span class="p">())</span>
-<span class="go">[Row(name=u&#39;Alice&#39;, avg(age)=2.0), Row(name=u&#39;Bob&#39;, avg(age)=5.0)]</span>
+<span class="go">[Row(name=&#39;Alice&#39;, avg(age)=2.0), Row(name=&#39;Bob&#39;, avg(age)=5.0)]</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="nb">sorted</span><span class="p">(</span><span class="n">df</span><span class="o">.</span><span class="n">groupBy</span><span class="p">(</span><span class="n">df</span><span class="o">.</span><span class="n">name</span><span class="p">)</span><span class="o">.</span><span class="n">avg</span><span class="p">()</span><span class="o">.</span><span class="n">collect</span><span class="p">())</span>
-<span class="go">[Row(name=u&#39;Alice&#39;, avg(age)=2.0), Row(name=u&#39;Bob&#39;, avg(age)=5.0)]</span>
+<span class="go">[Row(name=&#39;Alice&#39;, avg(age)=2.0), Row(name=&#39;Bob&#39;, avg(age)=5.0)]</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="nb">sorted</span><span class="p">(</span><span class="n">df</span><span class="o">.</span><span class="n">groupBy</span><span class="p">([</span><span class="s1">&#39;name&#39;</span><span class="p">,</span> <span class="n">df</span><span class="o">.</span><span class="n">age</span><span class="p">])</span><span class="o">.</span><span class="n">count</span><span class="p">()</span><span class="o">.</span><span class="n">collect</span><span class="p">())</span>
-<span class="go">[Row(name=u&#39;Alice&#39;, age=2, count=1), Row(name=u&#39;Bob&#39;, age=5, count=1)]</span>
+<span class="go">[Row(name=&#39;Alice&#39;, age=2, count=1), Row(name=&#39;Bob&#39;, age=5, count=1)]</span>
 </pre></div>
 </div>
 <div class="versionadded">
@@ -2179,9 +2179,9 @@ If n is 1, return a single Row.</td>
 </tbody>
 </table>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">head</span><span class="p">()</span>
-<span class="go">Row(age=2, name=u&#39;Alice&#39;)</span>
+<span class="go">Row(age=2, name=&#39;Alice&#39;)</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">head</span><span class="p">(</span><span class="mi">1</span><span class="p">)</span>
-<span class="go">[Row(age=2, name=u&#39;Alice&#39;)]</span>
+<span class="go">[Row(age=2, name=&#39;Alice&#39;)]</span>
 </pre></div>
 </div>
 <div class="versionadded">
@@ -2284,24 +2284,24 @@ the column(s) must exist on both sides, and this performs an equi-join.</li>
 </table>
 <p>The following performs a full outer join between <code class="docutils literal"><span class="pre">df1</span></code> and <code class="docutils literal"><span class="pre">df2</span></code>.</p>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">join</span><span class="p">(</span><span class="n">df2</span><span class="p">,</span> <span class="n">df</span><span class="o">.</span><span class="n">name</span> <span class="o">==</span> <span class="n">df2</span><span class="o">.</span><span class="n">name</span><span class="p">,</span> <span class="s1">&#39;outer&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">select</span><span class="p">(</span><span class="n">df</span><span class="o">.</span><span class="n">name</span><span class="p">,</span> <span class="n">df2</span><span class="o">.</span><span class="n">height</span><span class="p">)</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(name=None, height=80), Row(name=u&#39;Bob&#39;, height=85), Row(name=u&#39;Alice&#39;, height=None)]</span>
+<span class="go">[Row(name=None, height=80), Row(name=&#39;Bob&#39;, height=85), Row(name=&#39;Alice&#39;, height=None)]</span>
 </pre></div>
 </div>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">join</span><span class="p">(</span><span class="n">df2</span><span class="p">,</span> <span class="s1">&#39;name&#39;</span><span class="p">,</span> <span class="s1">&#39;outer&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">select</span><span class="p">(</span><span class="s1">&#39;name&#39;</span><span class="p">,</span> <span class="s1">&#39;height&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(name=u&#39;Tom&#39;, height=80), Row(name=u&#39;Bob&#39;, height=85), Row(name=u&#39;Alice&#39;, height=None)]</span>
+<span class="go">[Row(name=&#39;Tom&#39;, height=80), Row(name=&#39;Bob&#39;, height=85), Row(name=&#39;Alice&#39;, height=None)]</span>
 </pre></div>
 </div>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">cond</span> <span class="o">=</span> <span class="p">[</span><span class="n">df</span><span class="o">.</span><span class="n">name</span> <span class="o">==</span> <span class="n">df3</span><span class="o">.</span><span class="n">name</span><span class="p">,</span> <span class="n">df</span><span class="o">.</span><span class="n">age</span> <span class="o">==</span> <span class="n">df3</span><span class="o">.</span><span class="n">age</span><span class="p">]</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">join</span><span class="p">(</span><span class="n">df3</span><span class="p">,</span> <span class="n">cond</span><span class="p">,</span> <span class="s1">&#39;outer&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">select</span><span class="p">(</span><span class="n">df</span><span class="o">.</span><span class="n">name</span><span class="p">,</span> <span class="n">df3</span><span class="o">.</span><span class="n">age</span><span class="p">)</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(name=u&#39;Alice&#39;, age=2), Row(name=u&#39;Bob&#39;, age=5)]</span>
+<span class="go">[Row(name=&#39;Alice&#39;, age=2), Row(name=&#39;Bob&#39;, age=5)]</span>
 </pre></div>
 </div>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">join</span><span class="p">(</span><span class="n">df2</span><span class="p">,</span> <span class="s1">&#39;name&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">select</span><span class="p">(</span><span class="n">df</span><span class="o">.</span><span class="n">name</span><span class="p">,</span> <span class="n">df2</span><span class="o">.</span><span class="n">height</span><span class="p">)</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(name=u&#39;Bob&#39;, height=85)]</span>
+<span class="go">[Row(name=&#39;Bob&#39;, height=85)]</span>
 </pre></div>
 </div>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">join</span><span class="p">(</span><span class="n">df4</span><span class="p">,</span> <span class="p">[</span><span class="s1">&#39;name&#39;</span><span class="p">,</span> <span class="s1">&#39;age&#39;</span><span class="p">])</span><span class="o">.</span><span class="n">select</span><span class="p">(</span><span class="n">df</span><span class="o">.</span><span class="n">name</span><span class="p">,</span> <span class="n">df</span><span class="o">.</span><span class="n">age</span><span class="p">)</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(name=u&#39;Bob&#39;, age=5)]</span>
+<span class="go">[Row(name=&#39;Bob&#39;, age=5)]</span>
 </pre></div>
 </div>
 <div class="versionadded">
@@ -2314,7 +2314,7 @@ the column(s) must exist on both sides, and this performs an equi-join.</li>
 <code class="descname">limit</code><span class="sig-paren">(</span><em>num</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/sql/dataframe.html#DataFrame.limit"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.sql.DataFrame.limit" title="Permalink to this definition">¶</a></dt>
 <dd><p>Limits the result count to the number specified.</p>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">limit</span><span class="p">(</span><span class="mi">1</span><span class="p">)</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(age=2, name=u&#39;Alice&#39;)]</span>
+<span class="go">[Row(age=2, name=&#39;Alice&#39;)]</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">limit</span><span class="p">(</span><span class="mi">0</span><span class="p">)</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
 <span class="go">[]</span>
 </pre></div>
@@ -2376,18 +2376,18 @@ If a list is specified, length of the list must equal length of the <cite>cols</
 </tbody>
 </table>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">sort</span><span class="p">(</span><span class="n">df</span><span class="o">.</span><span class="n">age</span><span class="o">.</span><span class="n">desc</span><span class="p">())</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(age=5, name=u&#39;Bob&#39;), Row(age=2, name=u&#39;Alice&#39;)]</span>
+<span class="go">[Row(age=5, name=&#39;Bob&#39;), Row(age=2, name=&#39;Alice&#39;)]</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">sort</span><span class="p">(</span><span class="s2">&quot;age&quot;</span><span class="p">,</span> <span class="n">ascending</span><span class="o">=</span><span class="kc">False</span><span class="p">)</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(age=5, name=u&#39;Bob&#39;), Row(age=2, name=u&#39;Alice&#39;)]</span>
+<span class="go">[Row(age=5, name=&#39;Bob&#39;), Row(age=2, name=&#39;Alice&#39;)]</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">orderBy</span><span class="p">(</span><span class="n">df</span><span class="o">.</span><span class="n">age</span><span class="o">.</span><span class="n">desc</span><span class="p">())</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(age=5, name=u&#39;Bob&#39;), Row(age=2, name=u&#39;Alice&#39;)]</span>
+<span class="go">[Row(age=5, name=&#39;Bob&#39;), Row(age=2, name=&#39;Alice&#39;)]</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pyspark.sql.functions</span> <span class="k">import</span> <span class="o">*</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">sort</span><span class="p">(</span><span class="n">asc</span><span class="p">(</span><span class="s2">&quot;age&quot;</span><span class="p">))</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(age=2, name=u&#39;Alice&#39;), Row(age=5, name=u&#39;Bob&#39;)]</span>
+<span class="go">[Row(age=2, name=&#39;Alice&#39;), Row(age=5, name=&#39;Bob&#39;)]</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">orderBy</span><span class="p">(</span><span class="n">desc</span><span class="p">(</span><span class="s2">&quot;age&quot;</span><span class="p">),</span> <span class="s2">&quot;name&quot;</span><span class="p">)</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(age=5, name=u&#39;Bob&#39;), Row(age=2, name=u&#39;Alice&#39;)]</span>
+<span class="go">[Row(age=5, name=&#39;Bob&#39;), Row(age=2, name=&#39;Alice&#39;)]</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">orderBy</span><span class="p">([</span><span class="s2">&quot;age&quot;</span><span class="p">,</span> <span class="s2">&quot;name&quot;</span><span class="p">],</span> <span class="n">ascending</span><span class="o">=</span><span class="p">[</span><span class="mi">0</span><span class="p">,</span> <span class="mi">1</span><span class="p">])</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(age=5, name=u&#39;Bob&#39;), Row(age=2, name=u&#39;Alice&#39;)]</span>
+<span class="go">[Row(age=5, name=&#39;Bob&#39;), Row(age=2, name=&#39;Alice&#39;)]</span>
 </pre></div>
 </div>
 <div class="versionadded">
@@ -2760,11 +2760,11 @@ in the current DataFrame.</td>
 </tbody>
 </table>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">select</span><span class="p">(</span><span class="s1">&#39;*&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(age=2, name=u&#39;Alice&#39;), Row(age=5, name=u&#39;Bob&#39;)]</span>
+<span class="go">[Row(age=2, name=&#39;Alice&#39;), Row(age=5, name=&#39;Bob&#39;)]</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">select</span><span class="p">(</span><span class="s1">&#39;name&#39;</span><span class="p">,</span> <span class="s1">&#39;age&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(name=u&#39;Alice&#39;, age=2), Row(name=u&#39;Bob&#39;, age=5)]</span>
+<span class="go">[Row(name=&#39;Alice&#39;, age=2), Row(name=&#39;Bob&#39;, age=5)]</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">select</span><span class="p">(</span><span class="n">df</span><span class="o">.</span><span class="n">name</span><span class="p">,</span> <span class="p">(</span><span class="n">df</span><span class="o">.</span><span class="n">age</span> <span class="o">+</span> <span class="mi">10</span><span class="p">)</span><span class="o">.</span><span class="n">alias</span><span class="p">(</span><span class="s1">&#39;age&#39;</span><span class="p">))</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(name=u&#39;Alice&#39;, age=12), Row(name=u&#39;Bob&#39;, age=15)]</span>
+<span class="go">[Row(name=&#39;Alice&#39;, age=12), Row(name=&#39;Bob&#39;, age=15)]</span>
 </pre></div>
 </div>
 <div class="versionadded">
@@ -2855,18 +2855,18 @@ If a list is specified, length of the list must equal length of the <cite>cols</
 </tbody>
 </table>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">sort</span><span class="p">(</span><span class="n">df</span><span class="o">.</span><span class="n">age</span><span class="o">.</span><span class="n">desc</span><span class="p">())</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(age=5, name=u&#39;Bob&#39;), Row(age=2, name=u&#39;Alice&#39;)]</span>
+<span class="go">[Row(age=5, name=&#39;Bob&#39;), Row(age=2, name=&#39;Alice&#39;)]</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">sort</span><span class="p">(</span><span class="s2">&quot;age&quot;</span><span class="p">,</span> <span class="n">ascending</span><span class="o">=</span><span class="kc">False</span><span class="p">)</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(age=5, name=u&#39;Bob&#39;), Row(age=2, name=u&#39;Alice&#39;)]</span>
+<span class="go">[Row(age=5, name=&#39;Bob&#39;), Row(age=2, name=&#39;Alice&#39;)]</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">orderBy</span><span class="p">(</span><span class="n">df</span><span class="o">.</span><span class="n">age</span><span class="o">.</span><span class="n">desc</span><span class="p">())</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(age=5, name=u&#39;Bob&#39;), Row(age=2, name=u&#39;Alice&#39;)]</span>
+<span class="go">[Row(age=5, name=&#39;Bob&#39;), Row(age=2, name=&#39;Alice&#39;)]</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pyspark.sql.functions</span> <span class="k">import</span> <span class="o">*</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">sort</span><span class="p">(</span><span class="n">asc</span><span class="p">(</span><span class="s2">&quot;age&quot;</span><span class="p">))</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(age=2, name=u&#39;Alice&#39;), Row(age=5, name=u&#39;Bob&#39;)]</span>
+<span class="go">[Row(age=2, name=&#39;Alice&#39;), Row(age=5, name=&#39;Bob&#39;)]</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">orderBy</span><span class="p">(</span><span class="n">desc</span><span class="p">(</span><span class="s2">&quot;age&quot;</span><span class="p">),</span> <span class="s2">&quot;name&quot;</span><span class="p">)</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(age=5, name=u&#39;Bob&#39;), Row(age=2, name=u&#39;Alice&#39;)]</span>
+<span class="go">[Row(age=5, name=&#39;Bob&#39;), Row(age=2, name=&#39;Alice&#39;)]</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">orderBy</span><span class="p">([</span><span class="s2">&quot;age&quot;</span><span class="p">,</span> <span class="s2">&quot;name&quot;</span><span class="p">],</span> <span class="n">ascending</span><span class="o">=</span><span class="p">[</span><span class="mi">0</span><span class="p">,</span> <span class="mi">1</span><span class="p">])</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(age=5, name=u&#39;Bob&#39;), Row(age=2, name=u&#39;Alice&#39;)]</span>
+<span class="go">[Row(age=5, name=&#39;Bob&#39;), Row(age=2, name=&#39;Alice&#39;)]</span>
 </pre></div>
 </div>
 <div class="versionadded">
@@ -3007,7 +3007,7 @@ guarantee about the backward compatibility of the schema of the resulting DataFr
 <code class="descname">take</code><span class="sig-paren">(</span><em>num</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/sql/dataframe.html#DataFrame.take"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.sql.DataFrame.take" title="Permalink to this definition">¶</a></dt>
 <dd><p>Returns the first <code class="docutils literal"><span class="pre">num</span></code> rows as a <code class="xref py py-class docutils literal"><span class="pre">list</span></code> of <a class="reference internal" href="#pyspark.sql.Row" title="pyspark.sql.Row"><code class="xref py py-class docutils literal"><span class="pre">Row</span></code></a>.</p>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">take</span><span class="p">(</span><span class="mi">2</span><span class="p">)</span>
-<span class="go">[Row(age=2, name=u&#39;Alice&#39;), Row(age=5, name=u&#39;Bob&#39;)]</span>
+<span class="go">[Row(age=2, name=&#39;Alice&#39;), Row(age=5, name=&#39;Bob&#39;)]</span>
 </pre></div>
 </div>
 <div class="versionadded">
@@ -3028,7 +3028,7 @@ guarantee about the backward compatibility of the schema of the resulting DataFr
 </tbody>
 </table>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">toDF</span><span class="p">(</span><span class="s1">&#39;f1&#39;</span><span class="p">,</span> <span class="s1">&#39;f2&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(f1=2, f2=u&#39;Alice&#39;), Row(f1=5, f2=u&#39;Bob&#39;)]</span>
+<span class="go">[Row(f1=2, f2=&#39;Alice&#39;), Row(f1=5, f2=&#39;Bob&#39;)]</span>
 </pre></div>
 </div>
 </dd></dl>
@@ -3039,7 +3039,7 @@ guarantee about the backward compatibility of the schema of the resulting DataFr
 <dd><p>Converts a <a class="reference internal" href="#pyspark.sql.DataFrame" title="pyspark.sql.DataFrame"><code class="xref py py-class docutils literal"><span class="pre">DataFrame</span></code></a> into a <code class="xref py py-class docutils literal"><span class="pre">RDD</span></code> of string.</p>
 <p>Each row is turned into a JSON document as one element in the returned RDD.</p>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">toJSON</span><span class="p">()</span><span class="o">.</span><span class="n">first</span><span class="p">()</span>
-<span class="go">u&#39;{&quot;age&quot;:2,&quot;name&quot;:&quot;Alice&quot;}&#39;</span>
+<span class="go">&#39;{&quot;age&quot;:2,&quot;name&quot;:&quot;Alice&quot;}&#39;</span>
 </pre></div>
 </div>
 <div class="versionadded">
@@ -3053,7 +3053,7 @@ guarantee about the backward compatibility of the schema of the resulting DataFr
 <dd><p>Returns an iterator that contains all of the rows in this <a class="reference internal" href="#pyspark.sql.DataFrame" title="pyspark.sql.DataFrame"><code class="xref py py-class docutils literal"><span class="pre">DataFrame</span></code></a>.
 The iterator will consume as much memory as the largest partition in this DataFrame.</p>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="nb">list</span><span class="p">(</span><span class="n">df</span><span class="o">.</span><span class="n">toLocalIterator</span><span class="p">())</span>
-<span class="go">[Row(age=2, name=u&#39;Alice&#39;), Row(age=5, name=u&#39;Bob&#39;)]</span>
+<span class="go">[Row(age=2, name=&#39;Alice&#39;), Row(age=5, name=&#39;Bob&#39;)]</span>
 </pre></div>
 </div>
 <div class="versionadded">
@@ -3181,7 +3181,7 @@ a column from some other dataframe will raise an error.</p>
 </tbody>
 </table>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">withColumn</span><span class="p">(</span><span class="s1">&#39;age2&#39;</span><span class="p">,</span> <span class="n">df</span><span class="o">.</span><span class="n">age</span> <span class="o">+</span> <span class="mi">2</span><span class="p">)</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(age=2, name=u&#39;Alice&#39;, age2=4), Row(age=5, name=u&#39;Bob&#39;, age2=7)]</span>
+<span class="go">[Row(age=2, name=&#39;Alice&#39;, age2=4), Row(age=5, name=&#39;Bob&#39;, age2=7)]</span>
 </pre></div>
 </div>
 <div class="versionadded">
@@ -3207,7 +3207,7 @@ This is a no-op if schema doesn’t contain the given column name.</p>
 </tbody>
 </table>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">withColumnRenamed</span><span class="p">(</span><span class="s1">&#39;age&#39;</span><span class="p">,</span> <span class="s1">&#39;age2&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(age2=2, name=u&#39;Alice&#39;), Row(age2=5, name=u&#39;Bob&#39;)]</span>
+<span class="go">[Row(age2=2, name=&#39;Alice&#39;), Row(age2=5, name=&#39;Bob&#39;)]</span>
 </pre></div>
 </div>
 <div class="versionadded">
@@ -3334,12 +3334,12 @@ or a list of <a class="reference internal" href="#pyspark.sql.Column" title="pys
 </table>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">gdf</span> <span class="o">=</span> <span class="n">df</span><span class="o">.</span><span class="n">groupBy</span><span class="p">(</span><span class="n">df</span><span class="o">.</span><span class="n">name</span><span class="p">)</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="nb">sorted</span><span class="p">(</span><span class="n">gdf</span><span class="o">.</span><span class="n">agg</span><span class="p">({</span><span class="s2">&quot;*&quot;</span><span class="p">:</span> <span class="s2">&quot;count&quot;</span><span class="p">})</span><span class="o">.</span><span class="n">collect</span><span class="p">())</span>
-<span class="go">[Row(name=u&#39;Alice&#39;, count(1)=1), Row(name=u&#39;Bob&#39;, count(1)=1)]</span>
+<span class="go">[Row(name=&#39;Alice&#39;, count(1)=1), Row(name=&#39;Bob&#39;, count(1)=1)]</span>
 </pre></div>
 </div>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pyspark.sql</span> <span class="k">import</span> <span class="n">functions</span> <span class="k">as</span> <span class="n">F</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="nb">sorted</span><span class="p">(</span><span class="n">gdf</span><span class="o">.</span><span class="n">agg</span><span class="p">(</span><span class="n">F</span><span class="o">.</span><span class="n">min</span><span class="p">(</span><span class="n">df</span><span class="o">.</span><span class="n">age</span><span class="p">))</span><span class="o">.</span><span class="n">collect</span><span class="p">())</span>
-<span class="go">[Row(name=u&#39;Alice&#39;, min(age)=2), Row(name=u&#39;Bob&#39;, min(age)=5)]</span>
+<span class="go">[Row(name=&#39;Alice&#39;, min(age)=2), Row(name=&#39;Bob&#39;, min(age)=5)]</span>
 </pre></div>
 </div>
 <div class="versionadded">
@@ -3615,9 +3615,9 @@ corresponding :class: <cite>StructField</cite> (optional, keyword only argument)
 <code class="descname">asc</code><span class="sig-paren">(</span><span class="sig-paren">)</span><a class="headerlink" href="#pyspark.sql.Column.asc" title="Permalink to this definition">¶</a></dt>
 <dd><p>Returns a sort expression based on the ascending order of the given column name</p>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pyspark.sql</span> <span class="k">import</span> <span class="n">Row</span>
-<span class="gp">&gt;&gt;&gt; </span><span class="n">df</span> <span class="o">=</span> <span class="n">spark</span><span class="o">.</span><span class="n">createDataFrame</span><span class="p">([</span><span class="n">Row</span><span class="p">(</span><span class="n">name</span><span class="o">=</span><span class="sa">u</span><span class="s1">&#39;Tom&#39;</span><span class="p">,</span> <span class="n">height</span><span class="o">=</span><span class="mi">80</span><span class="p">),</span> <span class="n">Row</span><span class="p">(</span><span class="n">name</span><span class="o">=</span><span class="sa">u</span><span class="s1">&#39;Alice&#39;</span><span class="p">,</span> <span class="n">height</span><span class="o">=</span><span class="kc">None</span><span class="p">)])</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">df</span> <span class="o">=</span> <span class="n">spark</span><span class="o">.</span><span class="n">createDataFrame</span><span class="p">([</span><span class="n">Row</span><span class="p">(</span><span class="n">name</span><span class="o">=</span><span class="s1">&#39;Tom&#39;</span><span class="p">,</span> <span class="n">height</span><span class="o">=</span><span class="mi">80</span><span class="p">),</span> <span class="n">Row</span><span class="p">(</span><span class="n">name</span><span class="o">=</span><span class="s1">&#39;Alice&#39;</span><span class="p">,</span> <span class="n">height</span><span class="o">=</span><span class="kc">None</span><span class="p">)])</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">select</span><span class="p">(</span><span class="n">df</span><span class="o">.</span><span class="n">name</span><span class="p">)</span><span class="o">.</span><span class="n">orderBy</span><span class="p">(</span><span class="n">df</span><span class="o">.</span><span class="n">name</span><span class="o">.</span><span class="n">asc</span><span class="p">())</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(name=u&#39;Alice&#39;), Row(name=u&#39;Tom&#39;)]</span>
+<span class="go">[Row(name=&#39;Alice&#39;), Row(name=&#39;Tom&#39;)]</span>
 </pre></div>
 </div>
 </dd></dl>
@@ -3718,9 +3718,9 @@ this <a class="reference internal" href="#pyspark.sql.Column" title="pyspark.sql
 <code class="descname">cast</code><span class="sig-paren">(</span><em>dataType</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/sql/column.html#Column.cast"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.sql.Column.cast" title="Permalink to this definition">¶</a></dt>
 <dd><p>Convert the column into type <code class="docutils literal"><span class="pre">dataType</span></code>.</p>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">select</span><span class="p">(</span><span class="n">df</span><span class="o">.</span><span class="n">age</span><span class="o">.</span><span class="n">cast</span><span class="p">(</span><span class="s2">&quot;string&quot;</span><span class="p">)</span><span class="o">.</span><span class="n">alias</span><span class="p">(</span><span class="s1">&#39;ages&#39;</span><span class="p">))</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(ages=u&#39;2&#39;), Row(ages=u&#39;5&#39;)]</span>
+<span class="go">[Row(ages=&#39;2&#39;), Row(ages=&#39;5&#39;)]</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">select</span><span class="p">(</span><span class="n">df</span><span class="o">.</span><span class="n">age</span><span class="o">.</span><span class="n">cast</span><span class="p">(</span><span class="n">StringType</span><span class="p">())</span><span class="o">.</span><span class="n">alias</span><span class="p">(</span><span class="s1">&#39;ages&#39;</span><span class="p">))</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(ages=u&#39;2&#39;), Row(ages=u&#39;5&#39;)]</span>
+<span class="go">[Row(ages=&#39;2&#39;), Row(ages=&#39;5&#39;)]</span>
 </pre></div>
 </div>
 <div class="versionadded">
@@ -3741,7 +3741,7 @@ this <a class="reference internal" href="#pyspark.sql.Column" title="pyspark.sql
 </tbody>
 </table>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">filter</span><span class="p">(</span><span class="n">df</span><span class="o">.</span><span class="n">name</span><span class="o">.</span><span class="n">contains</span><span class="p">(</span><span class="s1">&#39;o&#39;</span><span class="p">))</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(age=5, name=u&#39;Bob&#39;)]</span>
+<span class="go">[Row(age=5, name=&#39;Bob&#39;)]</span>
 </pre></div>
 </div>
 </dd></dl>
@@ -3751,9 +3751,9 @@ this <a class="reference internal" href="#pyspark.sql.Column" title="pyspark.sql
 <code class="descname">desc</code><span class="sig-paren">(</span><span class="sig-paren">)</span><a class="headerlink" href="#pyspark.sql.Column.desc" title="Permalink to this definition">¶</a></dt>
 <dd><p>Returns a sort expression based on the descending order of the given column name.</p>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pyspark.sql</span> <span class="k">import</span> <span class="n">Row</span>
-<span class="gp">&gt;&gt;&gt; </span><span class="n">df</span> <span class="o">=</span> <span class="n">spark</span><span class="o">.</span><span class="n">createDataFrame</span><span class="p">([</span><span class="n">Row</span><span class="p">(</span><span class="n">name</span><span class="o">=</span><span class="sa">u</span><span class="s1">&#39;Tom&#39;</span><span class="p">,</span> <span class="n">height</span><span class="o">=</span><span class="mi">80</span><span class="p">),</span> <span class="n">Row</span><span class="p">(</span><span class="n">name</span><span class="o">=</span><span class="sa">u</span><span class="s1">&#39;Alice&#39;</span><span class="p">,</span> <span class="n">height</span><span class="o">=</span><span class="kc">None</span><span class="p">)])</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">df</span> <span class="o">=</span> <span class="n">spark</span><span class="o">.</span><span class="n">createDataFrame</span><span class="p">([</span><span class="n">Row</span><span class="p">(</span><span class="n">name</span><span class="o">=</span><span class="s1">&#39;Tom&#39;</span><span class="p">,</span> <span class="n">height</span><span class="o">=</span><span class="mi">80</span><span class="p">),</span> <span class="n">Row</span><span class="p">(</span><span class="n">name</span><span class="o">=</span><span class="s1">&#39;Alice&#39;</span><span class="p">,</span> <span class="n">height</span><span class="o">=</span><span class="kc">None</span><span class="p">)])</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">select</span><span class="p">(</span><span class="n">df</span><span class="o">.</span><span class="n">name</span><span class="p">)</span><span class="o">.</span><span class="n">orderBy</span><span class="p">(</span><span class="n">df</span><span class="o">.</span><span class="n">name</span><span class="o">.</span><span class="n">desc</span><span class="p">())</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(name=u&#39;Tom&#39;), Row(name=u&#39;Alice&#39;)]</span>
+<span class="go">[Row(name=&#39;Tom&#39;), Row(name=&#39;Alice&#39;)]</span>
 </pre></div>
 </div>
 </dd></dl>
@@ -3771,7 +3771,7 @@ this <a class="reference internal" href="#pyspark.sql.Column" title="pyspark.sql
 </tbody>
 </table>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">filter</span><span class="p">(</span><span class="n">df</span><span class="o">.</span><span class="n">name</span><span class="o">.</span><span class="n">endswith</span><span class="p">(</span><span class="s1">&#39;ice&#39;</span><span class="p">))</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(age=2, name=u&#39;Alice&#39;)]</span>
+<span class="go">[Row(age=2, name=&#39;Alice&#39;)]</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">filter</span><span class="p">(</span><span class="n">df</span><span class="o">.</span><span class="n">name</span><span class="o">.</span><span class="n">endswith</span><span class="p">(</span><span class="s1">&#39;ice$&#39;</span><span class="p">))</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
 <span class="go">[]</span>
 </pre></div>
@@ -3898,9 +3898,9 @@ or gets an item by key out of a dict.</p>
 <code class="descname">isNotNull</code><span class="sig-paren">(</span><span class="sig-paren">)</span><a class="headerlink" href="#pyspark.sql.Column.isNotNull" title="Permalink to this definition">¶</a></dt>
 <dd><p>True if the current expression is NOT null.</p>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pyspark.sql</span> <span class="k">import</span> <span class="n">Row</span>
-<span class="gp">&gt;&gt;&gt; </span><span class="n">df</span> <span class="o">=</span> <span class="n">spark</span><span class="o">.</span><span class="n">createDataFrame</span><span class="p">([</span><span class="n">Row</span><span class="p">(</span><span class="n">name</span><span class="o">=</span><span class="sa">u</span><span class="s1">&#39;Tom&#39;</span><span class="p">,</span> <span class="n">height</span><span class="o">=</span><span class="mi">80</span><span class="p">),</span> <span class="n">Row</span><span class="p">(</span><span class="n">name</span><span class="o">=</span><span class="sa">u</span><span class="s1">&#39;Alice&#39;</span><span class="p">,</span> <span class="n">height</span><span class="o">=</span><span class="kc">None</span><span class="p">)])</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">df</span> <span class="o">=</span> <span class="n">spark</span><span class="o">.</span><span class="n">createDataFrame</span><span class="p">([</span><span class="n">Row</span><span class="p">(</span><span class="n">name</span><span class="o">=</span><span class="s1">&#39;Tom&#39;</span><span class="p">,</span> <span class="n">height</span><span class="o">=</span><span class="mi">80</span><span class="p">),</span> <span class="n">Row</span><span class="p">(</span><span class="n">name</span><span class="o">=</span><span class="s1">&#39;Alice&#39;</span><span class="p">,</span> <span class="n">height</span><span class="o">=</span><span class="kc">None</span><span class="p">)])</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">filter</span><span class="p">(</span><span class="n">df</span><span class="o">.</span><span class="n">height</span><span class="o">.</span><span class="n">isNotNull</span><span class="p">())</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(height=80, name=u&#39;Tom&#39;)]</span>
+<span class="go">[Row(height=80, name=&#39;Tom&#39;)]</span>
 </pre></div>
 </div>
 </dd></dl>
@@ -3910,9 +3910,9 @@ or gets an item by key out of a dict.</p>
 <code class="descname">isNull</code><span class="sig-paren">(</span><span class="sig-paren">)</span><a class="headerlink" href="#pyspark.sql.Column.isNull" title="Permalink to this definition">¶</a></dt>
 <dd><p>True if the current expression is null.</p>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pyspark.sql</span> <span class="k">import</span> <span class="n">Row</span>
-<span class="gp">&gt;&gt;&gt; </span><span class="n">df</span> <span class="o">=</span> <span class="n">spark</span><span class="o">.</span><span class="n">createDataFrame</span><span class="p">([</span><span class="n">Row</span><span class="p">(</span><span class="n">name</span><span class="o">=</span><span class="sa">u</span><span class="s1">&#39;Tom&#39;</span><span class="p">,</span> <span class="n">height</span><span class="o">=</span><span class="mi">80</span><span class="p">),</span> <span class="n">Row</span><span class="p">(</span><span class="n">name</span><span class="o">=</span><span class="sa">u</span><span class="s1">&#39;Alice&#39;</span><span class="p">,</span> <span class="n">height</span><span class="o">=</span><span class="kc">None</span><span class="p">)])</span>
+<span class="gp">&gt;&gt;&gt; </span><span class="n">df</span> <span class="o">=</span> <span class="n">spark</span><span class="o">.</span><span class="n">createDataFrame</span><span class="p">([</span><span class="n">Row</span><span class="p">(</span><span class="n">name</span><span class="o">=</span><span class="s1">&#39;Tom&#39;</span><span class="p">,</span> <span class="n">height</span><span class="o">=</span><span class="mi">80</span><span class="p">),</span> <span class="n">Row</span><span class="p">(</span><span class="n">name</span><span class="o">=</span><span class="s1">&#39;Alice&#39;</span><span class="p">,</span> <span class="n">height</span><span class="o">=</span><span class="kc">None</span><span class="p">)])</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">filter</span><span class="p">(</span><span class="n">df</span><span class="o">.</span><span class="n">height</span><span class="o">.</span><span class="n">isNull</span><span class="p">())</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(height=None, name=u&#39;Alice&#39;)]</span>
+<span class="go">[Row(height=None, name=&#39;Alice&#39;)]</span>
 </pre></div>
 </div>
 </dd></dl>
@@ -3923,9 +3923,9 @@ or gets an item by key out of a dict.</p>
 <dd><p>A boolean expression that is evaluated to true if the value of this
 expression is contained by the evaluated values of the arguments.</p>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="p">[</span><span class="n">df</span><span class="o">.</span><span class="n">name</span><span class="o">.</span><span class="n">isin</span><span class="p">(</span><span class="s2">&quot;Bob&quot;</span><span class="p">,</span> <span class="s2">&quot;Mike&quot;</span><span class="p">)]</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(age=5, name=u&#39;Bob&#39;)]</span>
+<span class="go">[Row(age=5, name=&#39;Bob&#39;)]</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="p">[</span><span class="n">df</span><span class="o">.</span><span class="n">age</span><span class="o">.</span><span class="n">isin</span><span class="p">([</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">3</span><span class="p">])]</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(age=2, name=u&#39;Alice&#39;)]</span>
+<span class="go">[Row(age=2, name=&#39;Alice&#39;)]</span>
 </pre></div>
 </div>
 <div class="versionadded">
@@ -3947,7 +3947,7 @@ expression is contained by the evaluated values of the arguments.</p>
 </table>
 <p>See <a class="reference internal" href="#pyspark.sql.Column.rlike" title="pyspark.sql.Column.rlike"><code class="xref py py-func docutils literal"><span class="pre">rlike()</span></code></a> for a regex version</p>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">filter</span><span class="p">(</span><span class="n">df</span><span class="o">.</span><span class="n">name</span><span class="o">.</span><span class="n">like</span><span class="p">(</span><span class="s1">&#39;Al%&#39;</span><span class="p">))</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(age=2, name=u&#39;Alice&#39;)]</span>
+<span class="go">[Row(age=2, name=&#39;Alice&#39;)]</span>
 </pre></div>
 </div>
 </dd></dl>
@@ -4029,7 +4029,7 @@ match.</p>
 </tbody>
 </table>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">filter</span><span class="p">(</span><span class="n">df</span><span class="o">.</span><span class="n">name</span><span class="o">.</span><span class="n">rlike</span><span class="p">(</span><span class="s1">&#39;ice$&#39;</span><span class="p">))</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(age=2, name=u&#39;Alice&#39;)]</span>
+<span class="go">[Row(age=2, name=&#39;Alice&#39;)]</span>
 </pre></div>
 </div>
 </dd></dl>
@@ -4047,7 +4047,7 @@ match.</p>
 </tbody>
 </table>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">filter</span><span class="p">(</span><span class="n">df</span><span class="o">.</span><span class="n">name</span><span class="o">.</span><span class="n">startswith</span><span class="p">(</span><span class="s1">&#39;Al&#39;</span><span class="p">))</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(age=2, name=u&#39;Alice&#39;)]</span>
+<span class="go">[Row(age=2, name=&#39;Alice&#39;)]</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">filter</span><span class="p">(</span><span class="n">df</span><span class="o">.</span><span class="n">name</span><span class="o">.</span><span class="n">startswith</span><span class="p">(</span><span class="s1">&#39;^Al&#39;</span><span class="p">))</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
 <span class="go">[]</span>
 </pre></div>
@@ -4071,7 +4071,7 @@ match.</p>
 </tbody>
 </table>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">df</span><span class="o">.</span><span class="n">select</span><span class="p">(</span><span class="n">df</span><span class="o">.</span><span class="n">name</span><span class="o">.</span><span class="n">substr</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">3</span><span class="p">)</span><span class="o">.</span><span class="n">alias</span><span class="p">(</span><span class="s2">&quot;col&quot;</span><span class="p">))</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[Row(col=u&#39;Ali&#39;), Row(col=u&#39;Bob&#39;)]</span>
+<span class="go">[Row(col=&#39;Ali&#39;), Row(col=&#39;Bob&#39;)]</span>
 </pre></div>
 </div>
 <div class="versionadded">
@@ -4922,12 +4922,12 @@ any value greater than or equal to 9223372036854775807.</li>
 
 <dl class="attribute">
 <dt id="pyspark.sql.Window.unboundedFollowing">
-<code class="descname">unboundedFollowing</code><em class="property"> = 9223372036854775807L</em><a class="headerlink" href="#pyspark.sql.Window.unboundedFollowing" title="Permalink to this definition">¶</a></dt>
+<code class="descname">unboundedFollowing</code><em class="property"> = 9223372036854775807</em><a class="headerlink" href="#pyspark.sql.Window.unboundedFollowing" title="Permalink to this definition">¶</a></dt>
 <dd></dd></dl>
 
 <dl class="attribute">
 <dt id="pyspark.sql.Window.unboundedPreceding">
-<code class="descname">unboundedPreceding</code><em class="property"> = -9223372036854775808L</em><a class="headerlink" href="#pyspark.sql.Window.unboundedPreceding" title="Permalink to this definition">¶</a></dt>
+<code class="descname">unb

<TRUNCATED>

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org


[7/7] spark-website git commit: Fix signature description broken in PySpark API documentation in 2.3.1

Posted by gu...@apache.org.
Fix signature description broken in PySpark API documentation in 2.3.1


Project: http://git-wip-us.apache.org/repos/asf/spark-website/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark-website/commit/5660fb9a
Tree: http://git-wip-us.apache.org/repos/asf/spark-website/tree/5660fb9a
Diff: http://git-wip-us.apache.org/repos/asf/spark-website/diff/5660fb9a

Branch: refs/heads/asf-site
Commit: 5660fb9a42befd2e91ccb0111b7c43e386dfb261
Parents: 26b5271
Author: hyukjinkwon <gu...@apache.org>
Authored: Tue Jul 3 01:59:50 2018 +0800
Committer: hyukjinkwon <gu...@apache.org>
Committed: Wed Jul 4 12:41:32 2018 +0800

----------------------------------------------------------------------
 site/docs/2.3.1/api/python/_modules/index.html  |   8 +-
 .../python/_modules/pyspark/accumulators.html   |   8 +-
 .../api/python/_modules/pyspark/broadcast.html  |   8 +-
 .../2.3.1/api/python/_modules/pyspark/conf.html |   8 +-
 .../api/python/_modules/pyspark/context.html    |   8 +-
 .../api/python/_modules/pyspark/files.html      |   8 +-
 .../api/python/_modules/pyspark/ml/base.html    |   8 +-
 .../_modules/pyspark/ml/classification.html     |   8 +-
 .../python/_modules/pyspark/ml/clustering.html  |   8 +-
 .../python/_modules/pyspark/ml/evaluation.html  |   8 +-
 .../api/python/_modules/pyspark/ml/feature.html |   8 +-
 .../api/python/_modules/pyspark/ml/fpm.html     |   8 +-
 .../api/python/_modules/pyspark/ml/image.html   |   8 +-
 .../api/python/_modules/pyspark/ml/linalg.html  |   8 +-
 .../api/python/_modules/pyspark/ml/param.html   |   8 +-
 .../_modules/pyspark/ml/param/shared.html       |   8 +-
 .../python/_modules/pyspark/ml/pipeline.html    |   8 +-
 .../_modules/pyspark/ml/recommendation.html     |   8 +-
 .../python/_modules/pyspark/ml/regression.html  |   8 +-
 .../api/python/_modules/pyspark/ml/stat.html    |   8 +-
 .../api/python/_modules/pyspark/ml/tuning.html  |   8 +-
 .../api/python/_modules/pyspark/ml/util.html    |   8 +-
 .../api/python/_modules/pyspark/ml/wrapper.html |   8 +-
 .../_modules/pyspark/mllib/classification.html  |   8 +-
 .../_modules/pyspark/mllib/clustering.html      |   8 +-
 .../python/_modules/pyspark/mllib/common.html   |   8 +-
 .../_modules/pyspark/mllib/evaluation.html      |   8 +-
 .../python/_modules/pyspark/mllib/feature.html  |   8 +-
 .../api/python/_modules/pyspark/mllib/fpm.html  |   8 +-
 .../python/_modules/pyspark/mllib/linalg.html   |   8 +-
 .../pyspark/mllib/linalg/distributed.html       |   8 +-
 .../python/_modules/pyspark/mllib/random.html   |   8 +-
 .../_modules/pyspark/mllib/recommendation.html  |   8 +-
 .../_modules/pyspark/mllib/regression.html      |   8 +-
 .../pyspark/mllib/stat/KernelDensity.html       |   8 +-
 .../pyspark/mllib/stat/distribution.html        |   8 +-
 .../_modules/pyspark/mllib/stat/test.html       |   8 +-
 .../api/python/_modules/pyspark/mllib/tree.html |   8 +-
 .../api/python/_modules/pyspark/mllib/util.html |   8 +-
 .../api/python/_modules/pyspark/profiler.html   |   8 +-
 .../2.3.1/api/python/_modules/pyspark/rdd.html  |   8 +-
 .../python/_modules/pyspark/serializers.html    |   8 +-
 .../python/_modules/pyspark/sql/catalog.html    |   8 +-
 .../api/python/_modules/pyspark/sql/column.html |   8 +-
 .../python/_modules/pyspark/sql/context.html    |   8 +-
 .../python/_modules/pyspark/sql/dataframe.html  |   8 +-
 .../python/_modules/pyspark/sql/functions.html  |   8 +-
 .../api/python/_modules/pyspark/sql/group.html  |   8 +-
 .../python/_modules/pyspark/sql/readwriter.html |   8 +-
 .../python/_modules/pyspark/sql/session.html    |   8 +-
 .../python/_modules/pyspark/sql/streaming.html  |   8 +-
 .../api/python/_modules/pyspark/sql/types.html  |   8 +-
 .../api/python/_modules/pyspark/sql/udf.html    |   8 +-
 .../api/python/_modules/pyspark/sql/window.html |   8 +-
 .../api/python/_modules/pyspark/status.html     |   8 +-
 .../python/_modules/pyspark/storagelevel.html   |   8 +-
 .../_modules/pyspark/streaming/context.html     |   8 +-
 .../_modules/pyspark/streaming/dstream.html     |   8 +-
 .../_modules/pyspark/streaming/flume.html       |   8 +-
 .../_modules/pyspark/streaming/kafka.html       |   8 +-
 .../_modules/pyspark/streaming/kinesis.html     |   8 +-
 .../_modules/pyspark/streaming/listener.html    |   8 +-
 .../python/_modules/pyspark/taskcontext.html    |   8 +-
 site/docs/2.3.1/api/python/index.html           |   8 +-
 site/docs/2.3.1/api/python/pyspark.html         |  30 +-
 site/docs/2.3.1/api/python/pyspark.ml.html      | 178 +++++------
 site/docs/2.3.1/api/python/pyspark.mllib.html   |  36 +--
 site/docs/2.3.1/api/python/pyspark.sql.html     | 300 +++++++++----------
 .../2.3.1/api/python/pyspark.streaming.html     |  11 +-
 site/docs/2.3.1/api/python/search.html          |   8 +-
 site/docs/2.3.1/api/python/searchindex.js       |   2 +-
 71 files changed, 539 insertions(+), 538 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/index.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/index.html b/site/docs/2.3.1/api/python/_modules/index.html
index b3b9b16..430008f 100644
--- a/site/docs/2.3.1/api/python/_modules/index.html
+++ b/site/docs/2.3.1/api/python/_modules/index.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>Overview: module code &#8212; PySpark master documentation</title>
+    <title>Overview: module code &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
  
       </ul>
     </div>  
@@ -133,7 +133,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
  
       </ul>
     </div>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/accumulators.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/accumulators.html b/site/docs/2.3.1/api/python/_modules/pyspark/accumulators.html
index 617ead8..164a7a0 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/accumulators.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/accumulators.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.accumulators &#8212; PySpark master documentation</title>
+    <title>pyspark.accumulators &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -340,7 +340,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/broadcast.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/broadcast.html b/site/docs/2.3.1/api/python/_modules/pyspark/broadcast.html
index d4d1626..1838e6a 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/broadcast.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/broadcast.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.broadcast &#8212; PySpark master documentation</title>
+    <title>pyspark.broadcast &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -236,7 +236,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/conf.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/conf.html b/site/docs/2.3.1/api/python/_modules/pyspark/conf.html
index 51aaf60..0dc229e 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/conf.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/conf.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.conf &#8212; PySpark master documentation</title>
+    <title>pyspark.conf &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -295,7 +295,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/context.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/context.html b/site/docs/2.3.1/api/python/_modules/pyspark/context.html
index b437d61..310f197 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/context.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/context.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.context &#8212; PySpark master documentation</title>
+    <title>pyspark.context &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -1125,7 +1125,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/files.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/files.html b/site/docs/2.3.1/api/python/_modules/pyspark/files.html
index 88ed0fd..9913310 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/files.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/files.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.files &#8212; PySpark master documentation</title>
+    <title>pyspark.files &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -130,7 +130,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/ml/base.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/ml/base.html b/site/docs/2.3.1/api/python/_modules/pyspark/ml/base.html
index 431d366..b696bfe 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/ml/base.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/ml/base.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.ml.base &#8212; PySpark master documentation</title>
+    <title>pyspark.ml.base &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -308,7 +308,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/ml/classification.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/ml/classification.html b/site/docs/2.3.1/api/python/_modules/pyspark/ml/classification.html
index d04a67b..0cffd61 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/ml/classification.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/ml/classification.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.ml.classification &#8212; PySpark master documentation</title>
+    <title>pyspark.ml.classification &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -2117,7 +2117,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/ml/clustering.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/ml/clustering.html b/site/docs/2.3.1/api/python/_modules/pyspark/ml/clustering.html
index 65b93ae..04bb10b 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/ml/clustering.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/ml/clustering.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.ml.clustering &#8212; PySpark master documentation</title>
+    <title>pyspark.ml.clustering &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -1233,7 +1233,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/ml/evaluation.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/ml/evaluation.html b/site/docs/2.3.1/api/python/_modules/pyspark/ml/evaluation.html
index 4468186..718f663 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/ml/evaluation.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/ml/evaluation.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.ml.evaluation &#8212; PySpark master documentation</title>
+    <title>pyspark.ml.evaluation &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -502,7 +502,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/ml/feature.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/ml/feature.html b/site/docs/2.3.1/api/python/_modules/pyspark/ml/feature.html
index 368f2ba..9ec6d5c 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/ml/feature.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/ml/feature.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.ml.feature &#8212; PySpark master documentation</title>
+    <title>pyspark.ml.feature &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -3791,7 +3791,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/ml/fpm.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/ml/fpm.html b/site/docs/2.3.1/api/python/_modules/pyspark/ml/fpm.html
index 953504b..f270615 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/ml/fpm.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/ml/fpm.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.ml.fpm &#8212; PySpark master documentation</title>
+    <title>pyspark.ml.fpm &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -316,7 +316,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/ml/image.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/ml/image.html b/site/docs/2.3.1/api/python/_modules/pyspark/ml/image.html
index 0189279..6fbf596 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/ml/image.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/ml/image.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.ml.image &#8212; PySpark master documentation</title>
+    <title>pyspark.ml.image &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -311,7 +311,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/ml/linalg.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/ml/linalg.html b/site/docs/2.3.1/api/python/_modules/pyspark/ml/linalg.html
index b382845..daba103 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/ml/linalg.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/ml/linalg.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.ml.linalg &#8212; PySpark master documentation</title>
+    <title>pyspark.ml.linalg &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -1235,7 +1235,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/ml/param.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/ml/param.html b/site/docs/2.3.1/api/python/_modules/pyspark/ml/param.html
index 9429b74..545d220 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/ml/param.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/ml/param.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.ml.param &#8212; PySpark master documentation</title>
+    <title>pyspark.ml.param &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -582,7 +582,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/ml/param/shared.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/ml/param/shared.html b/site/docs/2.3.1/api/python/_modules/pyspark/ml/param/shared.html
index 432944e..6ab4f59 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/ml/param/shared.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/ml/param/shared.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.ml.param.shared &#8212; PySpark master documentation</title>
+    <title>pyspark.ml.param.shared &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../../index.html" >Module code</a> &#187;</li>
           <li class="nav-item nav-item-2"><a href="../param.html" accesskey="U">pyspark.ml.param</a> &#187;</li> 
@@ -840,7 +840,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../../index.html" >Module code</a> &#187;</li>
           <li class="nav-item nav-item-2"><a href="../param.html" >pyspark.ml.param</a> &#187;</li> 

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/ml/pipeline.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/ml/pipeline.html b/site/docs/2.3.1/api/python/_modules/pyspark/ml/pipeline.html
index 3005ec8..f3d5c63 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/ml/pipeline.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/ml/pipeline.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.ml.pipeline &#8212; PySpark master documentation</title>
+    <title>pyspark.ml.pipeline &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -461,7 +461,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/ml/recommendation.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/ml/recommendation.html b/site/docs/2.3.1/api/python/_modules/pyspark/ml/recommendation.html
index 685de3b..d345c32 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/ml/recommendation.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/ml/recommendation.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.ml.recommendation &#8212; PySpark master documentation</title>
+    <title>pyspark.ml.recommendation &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -554,7 +554,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/ml/regression.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/ml/regression.html b/site/docs/2.3.1/api/python/_modules/pyspark/ml/regression.html
index 4d24002..759ad6d 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/ml/regression.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/ml/regression.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.ml.regression &#8212; PySpark master documentation</title>
+    <title>pyspark.ml.regression &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -1886,7 +1886,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/ml/stat.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/ml/stat.html b/site/docs/2.3.1/api/python/_modules/pyspark/ml/stat.html
index 825f14b..c60f9ba 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/ml/stat.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/ml/stat.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.ml.stat &#8212; PySpark master documentation</title>
+    <title>pyspark.ml.stat &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -225,7 +225,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/ml/tuning.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/ml/tuning.html b/site/docs/2.3.1/api/python/_modules/pyspark/ml/tuning.html
index 49c0cc9..91a78b1 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/ml/tuning.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/ml/tuning.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.ml.tuning &#8212; PySpark master documentation</title>
+    <title>pyspark.ml.tuning &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -801,7 +801,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/ml/util.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/ml/util.html b/site/docs/2.3.1/api/python/_modules/pyspark/ml/util.html
index 70a72a7..a42dd8e 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/ml/util.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/ml/util.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.ml.util &#8212; PySpark master documentation</title>
+    <title>pyspark.ml.util &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -608,7 +608,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/ml/wrapper.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/ml/wrapper.html b/site/docs/2.3.1/api/python/_modules/pyspark/ml/wrapper.html
index c91e2b7..6a83091 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/ml/wrapper.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/ml/wrapper.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.ml.wrapper &#8212; PySpark master documentation</title>
+    <title>pyspark.ml.wrapper &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -411,7 +411,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/mllib/classification.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/mllib/classification.html b/site/docs/2.3.1/api/python/_modules/pyspark/mllib/classification.html
index e703c25..dfd6ee3 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/mllib/classification.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/mllib/classification.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.mllib.classification &#8212; PySpark master documentation</title>
+    <title>pyspark.mllib.classification &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -838,7 +838,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/mllib/clustering.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/mllib/clustering.html b/site/docs/2.3.1/api/python/_modules/pyspark/mllib/clustering.html
index 960d489..8373529 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/mllib/clustering.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/mllib/clustering.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.mllib.clustering &#8212; PySpark master documentation</title>
+    <title>pyspark.mllib.clustering &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -1126,7 +1126,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/mllib/common.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/mllib/common.html b/site/docs/2.3.1/api/python/_modules/pyspark/mllib/common.html
index e93d11c..932207d 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/mllib/common.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/mllib/common.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.mllib.common &#8212; PySpark master documentation</title>
+    <title>pyspark.mllib.common &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -234,7 +234,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/mllib/evaluation.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/mllib/evaluation.html b/site/docs/2.3.1/api/python/_modules/pyspark/mllib/evaluation.html
index 89af7ea..86b4154 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/mllib/evaluation.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/mllib/evaluation.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.mllib.evaluation &#8212; PySpark master documentation</title>
+    <title>pyspark.mllib.evaluation &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -620,7 +620,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/mllib/feature.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/mllib/feature.html b/site/docs/2.3.1/api/python/_modules/pyspark/mllib/feature.html
index 8a77a08..4fa7e56 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/mllib/feature.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/mllib/feature.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.mllib.feature &#8212; PySpark master documentation</title>
+    <title>pyspark.mllib.feature &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -897,7 +897,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/mllib/fpm.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/mllib/fpm.html b/site/docs/2.3.1/api/python/_modules/pyspark/mllib/fpm.html
index 7d4da2c..116e7b1 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/mllib/fpm.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/mllib/fpm.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.mllib.fpm &#8212; PySpark master documentation</title>
+    <title>pyspark.mllib.fpm &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -275,7 +275,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/mllib/linalg.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/mllib/linalg.html b/site/docs/2.3.1/api/python/_modules/pyspark/mllib/linalg.html
index daf9f09..4066c59 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/mllib/linalg.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/mllib/linalg.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.mllib.linalg &#8212; PySpark master documentation</title>
+    <title>pyspark.mllib.linalg &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -1447,7 +1447,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/mllib/linalg/distributed.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/mllib/linalg/distributed.html b/site/docs/2.3.1/api/python/_modules/pyspark/mllib/linalg/distributed.html
index 3f170b7..1ce6c63 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/mllib/linalg/distributed.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/mllib/linalg/distributed.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.mllib.linalg.distributed &#8212; PySpark master documentation</title>
+    <title>pyspark.mllib.linalg.distributed &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../../index.html" >Module code</a> &#187;</li>
           <li class="nav-item nav-item-2"><a href="../linalg.html" accesskey="U">pyspark.mllib.linalg</a> &#187;</li> 
@@ -1455,7 +1455,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../../index.html" >Module code</a> &#187;</li>
           <li class="nav-item nav-item-2"><a href="../linalg.html" >pyspark.mllib.linalg</a> &#187;</li> 

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/mllib/random.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/mllib/random.html b/site/docs/2.3.1/api/python/_modules/pyspark/mllib/random.html
index 372ba06..c9ad4d3 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/mllib/random.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/mllib/random.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.mllib.random &#8212; PySpark master documentation</title>
+    <title>pyspark.mllib.random &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -499,7 +499,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/mllib/recommendation.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/mllib/recommendation.html b/site/docs/2.3.1/api/python/_modules/pyspark/mllib/recommendation.html
index 0ec83b9..09496d1 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/mllib/recommendation.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/mllib/recommendation.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.mllib.recommendation &#8212; PySpark master documentation</title>
+    <title>pyspark.mllib.recommendation &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -404,7 +404,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/mllib/regression.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/mllib/regression.html b/site/docs/2.3.1/api/python/_modules/pyspark/mllib/regression.html
index 8d1a19d..b82982c 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/mllib/regression.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/mllib/regression.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.mllib.regression &#8212; PySpark master documentation</title>
+    <title>pyspark.mllib.regression &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -914,7 +914,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/mllib/stat/KernelDensity.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/mllib/stat/KernelDensity.html b/site/docs/2.3.1/api/python/_modules/pyspark/mllib/stat/KernelDensity.html
index 4a11d0b..df18141 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/mllib/stat/KernelDensity.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/mllib/stat/KernelDensity.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.mllib.stat.KernelDensity &#8212; PySpark master documentation</title>
+    <title>pyspark.mllib.stat.KernelDensity &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -130,7 +130,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/mllib/stat/distribution.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/mllib/stat/distribution.html b/site/docs/2.3.1/api/python/_modules/pyspark/mllib/stat/distribution.html
index 1669d60..2ef89f8 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/mllib/stat/distribution.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/mllib/stat/distribution.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.mllib.stat.distribution &#8212; PySpark master documentation</title>
+    <title>pyspark.mllib.stat.distribution &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -103,7 +103,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/mllib/stat/test.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/mllib/stat/test.html b/site/docs/2.3.1/api/python/_modules/pyspark/mllib/stat/test.html
index bd98275..26775af 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/mllib/stat/test.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/mllib/stat/test.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.mllib.stat.test &#8212; PySpark master documentation</title>
+    <title>pyspark.mllib.stat.test &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -153,7 +153,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/mllib/tree.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/mllib/tree.html b/site/docs/2.3.1/api/python/_modules/pyspark/mllib/tree.html
index 281ed50..c0487b7 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/mllib/tree.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/mllib/tree.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.mllib.tree &#8212; PySpark master documentation</title>
+    <title>pyspark.mllib.tree &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -731,7 +731,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/mllib/util.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/mllib/util.html b/site/docs/2.3.1/api/python/_modules/pyspark/mllib/util.html
index 3cdc8fb..15309f8 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/mllib/util.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/mllib/util.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.mllib.util &#8212; PySpark master documentation</title>
+    <title>pyspark.mllib.util &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -599,7 +599,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" >Module code</a> &#187;</li> 
       </ul>


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org


[4/7] spark-website git commit: Fix signature description broken in PySpark API documentation in 2.3.1

Posted by gu...@apache.org.
http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/pyspark.mllib.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/pyspark.mllib.html b/site/docs/2.3.1/api/python/pyspark.mllib.html
index c449f16..662b562 100644
--- a/site/docs/2.3.1/api/python/pyspark.mllib.html
+++ b/site/docs/2.3.1/api/python/pyspark.mllib.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.mllib package &#8212; PySpark master documentation</title>
+    <title>pyspark.mllib package &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    './',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -35,7 +35,7 @@
           <a href="pyspark.ml.html" title="pyspark.ml package"
              accesskey="P">previous</a></li>
     
-        <li class="nav-item nav-item-0"><a href="index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="pyspark.html" accesskey="U">pyspark package</a> &#187;</li> 
       </ul>
@@ -2633,7 +2633,7 @@ Compositionality.</p>
 <p>Querying for synonyms of a word will not return that word:</p>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">syms</span> <span class="o">=</span> <span class="n">model</span><span class="o">.</span><span class="n">findSynonyms</span><span class="p">(</span><span class="s2">&quot;a&quot;</span><span class="p">,</span> <span class="mi">2</span><span class="p">)</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="p">[</span><span class="n">s</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span> <span class="k">for</span> <span class="n">s</span> <span class="ow">in</span> <span class="n">syms</span><span class="p">]</span>
-<span class="go">[u&#39;b&#39;, u&#39;c&#39;]</span>
+<span class="go">[&#39;b&#39;, &#39;c&#39;]</span>
 </pre></div>
 </div>
 <p>But querying for synonyms of a vector may return the word whose
@@ -2641,7 +2641,7 @@ representation is that vector:</p>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">vec</span> <span class="o">=</span> <span class="n">model</span><span class="o">.</span><span class="n">transform</span><span class="p">(</span><span class="s2">&quot;a&quot;</span><span class="p">)</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">syms</span> <span class="o">=</span> <span class="n">model</span><span class="o">.</span><span class="n">findSynonyms</span><span class="p">(</span><span class="n">vec</span><span class="p">,</span> <span class="mi">2</span><span class="p">)</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="p">[</span><span class="n">s</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span> <span class="k">for</span> <span class="n">s</span> <span class="ow">in</span> <span class="n">syms</span><span class="p">]</span>
-<span class="go">[u&#39;a&#39;, u&#39;b&#39;]</span>
+<span class="go">[&#39;a&#39;, &#39;b&#39;]</span>
 </pre></div>
 </div>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="kn">import</span> <span class="nn">os</span><span class="o">,</span> <span class="nn">tempfile</span>
@@ -2652,7 +2652,7 @@ representation is that vector:</p>
 <span class="go">True</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">syms</span> <span class="o">=</span> <span class="n">sameModel</span><span class="o">.</span><span class="n">findSynonyms</span><span class="p">(</span><span class="s2">&quot;a&quot;</span><span class="p">,</span> <span class="mi">2</span><span class="p">)</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="p">[</span><span class="n">s</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span> <span class="k">for</span> <span class="n">s</span> <span class="ow">in</span> <span class="n">syms</span><span class="p">]</span>
-<span class="go">[u&#39;b&#39;, u&#39;c&#39;]</span>
+<span class="go">[&#39;b&#39;, &#39;c&#39;]</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">shutil</span> <span class="k">import</span> <span class="n">rmtree</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="k">try</span><span class="p">:</span>
 <span class="gp">... </span>    <span class="n">rmtree</span><span class="p">(</span><span class="n">path</span><span class="p">)</span>
@@ -3073,7 +3073,7 @@ using the Parallel FP-Growth algorithm.</p>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">rdd</span> <span class="o">=</span> <span class="n">sc</span><span class="o">.</span><span class="n">parallelize</span><span class="p">(</span><span class="n">data</span><span class="p">,</span> <span class="mi">2</span><span class="p">)</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">model</span> <span class="o">=</span> <span class="n">FPGrowth</span><span class="o">.</span><span class="n">train</span><span class="p">(</span><span class="n">rdd</span><span class="p">,</span> <span class="mf">0.6</span><span class="p">,</span> <span class="mi">2</span><span class="p">)</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="nb">sorted</span><span class="p">(</span><span class="n">model</span><span class="o">.</span><span class="n">freqItemsets</span><span class="p">()</span><span class="o">.</span><span class="n">collect</span><span class="p">())</span>
-<span class="go">[FreqItemset(items=[u&#39;a&#39;], freq=4), FreqItemset(items=[u&#39;c&#39;], freq=3), ...</span>
+<span class="go">[FreqItemset(items=[&#39;a&#39;], freq=4), FreqItemset(items=[&#39;c&#39;], freq=3), ...</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">model_path</span> <span class="o">=</span> <span class="n">temp_path</span> <span class="o">+</span> <span class="s2">&quot;/fpm&quot;</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">model</span><span class="o">.</span><span class="n">save</span><span class="p">(</span><span class="n">sc</span><span class="p">,</span> <span class="n">model_path</span><span class="p">)</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">sameModel</span> <span class="o">=</span> <span class="n">FPGrowthModel</span><span class="o">.</span><span class="n">load</span><span class="p">(</span><span class="n">sc</span><span class="p">,</span> <span class="n">model_path</span><span class="p">)</span>
@@ -3171,7 +3171,7 @@ another iteration of distributed prefix growth is run.
 <span class="gp">&gt;&gt;&gt; </span><span class="n">rdd</span> <span class="o">=</span> <span class="n">sc</span><span class="o">.</span><span class="n">parallelize</span><span class="p">(</span><span class="n">data</span><span class="p">,</span> <span class="mi">2</span><span class="p">)</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">model</span> <span class="o">=</span> <span class="n">PrefixSpan</span><span class="o">.</span><span class="n">train</span><span class="p">(</span><span class="n">rdd</span><span class="p">)</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="nb">sorted</span><span class="p">(</span><span class="n">model</span><span class="o">.</span><span class="n">freqSequences</span><span class="p">()</span><span class="o">.</span><span class="n">collect</span><span class="p">())</span>
-<span class="go">[FreqSequence(sequence=[[u&#39;a&#39;]], freq=3), FreqSequence(sequence=[[u&#39;a&#39;], [u&#39;a&#39;]], freq=1), ...</span>
+<span class="go">[FreqSequence(sequence=[[&#39;a&#39;]], freq=3), FreqSequence(sequence=[[&#39;a&#39;], [&#39;a&#39;]], freq=1), ...</span>
 </pre></div>
 </div>
 <div class="versionadded">
@@ -5178,7 +5178,7 @@ distribution with the input mean.</p>
 
 <dl class="staticmethod">
 <dt id="pyspark.mllib.random.RandomRDDs.exponentialVectorRDD">
-<em class="property">static </em><code class="descname">exponentialVectorRDD</code><span class="sig-paren">(</span><em>sc</em>, <em>*a</em>, <em>**kw</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/mllib/random.html#RandomRDDs.exponentialVectorRDD"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.mllib.random.RandomRDDs.exponentialVectorRDD" title="Permalink to this definition">¶</a></dt>
+<em class="property">static </em><code class="descname">exponentialVectorRDD</code><span class="sig-paren">(</span><em>sc</em>, <em>mean</em>, <em>numRows</em>, <em>numCols</em>, <em>numPartitions=None</em>, <em>seed=None</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/mllib/random.html#RandomRDDs.exponentialVectorRDD"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.mllib.random.RandomRDDs.exponentialVectorRDD" title="Permalink to this definition">¶</a></dt>
 <dd><p>Generates an RDD comprised of vectors containing i.i.d. samples drawn
 from the Exponential distribution with the input mean.</p>
 <table class="docutils field-list" frame="void" rules="none">
@@ -5264,7 +5264,7 @@ distribution with the input shape and scale.</p>
 
 <dl class="staticmethod">
 <dt id="pyspark.mllib.random.RandomRDDs.gammaVectorRDD">
-<em class="property">static </em><code class="descname">gammaVectorRDD</code><span class="sig-paren">(</span><em>sc</em>, <em>*a</em>, <em>**kw</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/mllib/random.html#RandomRDDs.gammaVectorRDD"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.mllib.random.RandomRDDs.gammaVectorRDD" title="Permalink to this definition">¶</a></dt>
+<em class="property">static </em><code class="descname">gammaVectorRDD</code><span class="sig-paren">(</span><em>sc</em>, <em>shape</em>, <em>scale</em>, <em>numRows</em>, <em>numCols</em>, <em>numPartitions=None</em>, <em>seed=None</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/mllib/random.html#RandomRDDs.gammaVectorRDD"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.mllib.random.RandomRDDs.gammaVectorRDD" title="Permalink to this definition">¶</a></dt>
 <dd><p>Generates an RDD comprised of vectors containing i.i.d. samples drawn
 from the Gamma distribution.</p>
 <table class="docutils field-list" frame="void" rules="none">
@@ -5354,7 +5354,7 @@ distribution with the input mean and standard distribution.</p>
 
 <dl class="staticmethod">
 <dt id="pyspark.mllib.random.RandomRDDs.logNormalVectorRDD">
-<em class="property">static </em><code class="descname">logNormalVectorRDD</code><span class="sig-paren">(</span><em>sc</em>, <em>*a</em>, <em>**kw</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/mllib/random.html#RandomRDDs.logNormalVectorRDD"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.mllib.random.RandomRDDs.logNormalVectorRDD" title="Permalink to this definition">¶</a></dt>
+<em class="property">static </em><code class="descname">logNormalVectorRDD</code><span class="sig-paren">(</span><em>sc</em>, <em>mean</em>, <em>std</em>, <em>numRows</em>, <em>numCols</em>, <em>numPartitions=None</em>, <em>seed=None</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/mllib/random.html#RandomRDDs.logNormalVectorRDD"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.mllib.random.RandomRDDs.logNormalVectorRDD" title="Permalink to this definition">¶</a></dt>
 <dd><p>Generates an RDD comprised of vectors containing i.i.d. samples drawn
 from the log normal distribution.</p>
 <table class="docutils field-list" frame="void" rules="none">
@@ -5440,7 +5440,7 @@ to some other normal N(mean, sigma^2), use
 
 <dl class="staticmethod">
 <dt id="pyspark.mllib.random.RandomRDDs.normalVectorRDD">
-<em class="property">static </em><code class="descname">normalVectorRDD</code><span class="sig-paren">(</span><em>sc</em>, <em>*a</em>, <em>**kw</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/mllib/random.html#RandomRDDs.normalVectorRDD"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.mllib.random.RandomRDDs.normalVectorRDD" title="Permalink to this definition">¶</a></dt>
+<em class="property">static </em><code class="descname">normalVectorRDD</code><span class="sig-paren">(</span><em>sc</em>, <em>numRows</em>, <em>numCols</em>, <em>numPartitions=None</em>, <em>seed=None</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/mllib/random.html#RandomRDDs.normalVectorRDD"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.mllib.random.RandomRDDs.normalVectorRDD" title="Permalink to this definition">¶</a></dt>
 <dd><p>Generates an RDD comprised of vectors containing i.i.d. samples drawn
 from the standard normal distribution.</p>
 <table class="docutils field-list" frame="void" rules="none">
@@ -5518,7 +5518,7 @@ distribution with the input mean.</p>
 
 <dl class="staticmethod">
 <dt id="pyspark.mllib.random.RandomRDDs.poissonVectorRDD">
-<em class="property">static </em><code class="descname">poissonVectorRDD</code><span class="sig-paren">(</span><em>sc</em>, <em>*a</em>, <em>**kw</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/mllib/random.html#RandomRDDs.poissonVectorRDD"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.mllib.random.RandomRDDs.poissonVectorRDD" title="Permalink to this definition">¶</a></dt>
+<em class="property">static </em><code class="descname">poissonVectorRDD</code><span class="sig-paren">(</span><em>sc</em>, <em>mean</em>, <em>numRows</em>, <em>numCols</em>, <em>numPartitions=None</em>, <em>seed=None</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/mllib/random.html#RandomRDDs.poissonVectorRDD"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.mllib.random.RandomRDDs.poissonVectorRDD" title="Permalink to this definition">¶</a></dt>
 <dd><p>Generates an RDD comprised of vectors containing i.i.d. samples drawn
 from the Poisson distribution with the input mean.</p>
 <table class="docutils field-list" frame="void" rules="none">
@@ -5602,7 +5602,7 @@ to U(a, b), use
 
 <dl class="staticmethod">
 <dt id="pyspark.mllib.random.RandomRDDs.uniformVectorRDD">
-<em class="property">static </em><code class="descname">uniformVectorRDD</code><span class="sig-paren">(</span><em>sc</em>, <em>*a</em>, <em>**kw</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/mllib/random.html#RandomRDDs.uniformVectorRDD"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.mllib.random.RandomRDDs.uniformVectorRDD" title="Permalink to this definition">¶</a></dt>
+<em class="property">static </em><code class="descname">uniformVectorRDD</code><span class="sig-paren">(</span><em>sc</em>, <em>numRows</em>, <em>numCols</em>, <em>numPartitions=None</em>, <em>seed=None</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/mllib/random.html#RandomRDDs.uniformVectorRDD"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.mllib.random.RandomRDDs.uniformVectorRDD" title="Permalink to this definition">¶</a></dt>
 <dd><p>Generates an RDD comprised of vectors containing i.i.d. samples drawn
 from the uniform distribution U(0.0, 1.0).</p>
 <table class="docutils field-list" frame="void" rules="none">
@@ -6873,9 +6873,9 @@ of freedom, p-value, the method used, and the null hypothesis.</p>
 <span class="gp">&gt;&gt;&gt; </span><span class="nb">print</span><span class="p">(</span><span class="nb">round</span><span class="p">(</span><span class="n">pearson</span><span class="o">.</span><span class="n">pValue</span><span class="p">,</span> <span class="mi">4</span><span class="p">))</span>
 <span class="go">0.8187</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">pearson</span><span class="o">.</span><span class="n">method</span>
-<span class="go">u&#39;pearson&#39;</span>
+<span class="go">&#39;pearson&#39;</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">pearson</span><span class="o">.</span><span class="n">nullHypothesis</span>
-<span class="go">u&#39;observed follows the same distribution as expected.&#39;</span>
+<span class="go">&#39;observed follows the same distribution as expected.&#39;</span>
 </pre></div>
 </div>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">observed</span> <span class="o">=</span> <span class="n">Vectors</span><span class="o">.</span><span class="n">dense</span><span class="p">([</span><span class="mi">21</span><span class="p">,</span> <span class="mi">38</span><span class="p">,</span> <span class="mi">43</span><span class="p">,</span> <span class="mi">80</span><span class="p">])</span>
@@ -7055,7 +7055,7 @@ the method used, and the null hypothesis.</p>
 <span class="gp">&gt;&gt;&gt; </span><span class="nb">print</span><span class="p">(</span><span class="nb">round</span><span class="p">(</span><span class="n">ksmodel</span><span class="o">.</span><span class="n">statistic</span><span class="p">,</span> <span class="mi">3</span><span class="p">))</span>
 <span class="go">0.175</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">ksmodel</span><span class="o">.</span><span class="n">nullHypothesis</span>
-<span class="go">u&#39;Sample follows theoretical distribution&#39;</span>
+<span class="go">&#39;Sample follows theoretical distribution&#39;</span>
 </pre></div>
 </div>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">data</span> <span class="o">=</span> <span class="n">sc</span><span class="o">.</span><span class="n">parallelize</span><span class="p">([</span><span class="mf">2.0</span><span class="p">,</span> <span class="mf">3.0</span><span class="p">,</span> <span class="mf">4.0</span><span class="p">])</span>
@@ -8453,7 +8453,7 @@ this method throws an exception.</li>
           <a href="pyspark.ml.html" title="pyspark.ml package"
              >previous</a></li>
     
-        <li class="nav-item nav-item-0"><a href="index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="pyspark.html" >pyspark package</a> &#187;</li> 
       </ul>


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org


[6/7] spark-website git commit: Fix signature description broken in PySpark API documentation in 2.3.1

Posted by gu...@apache.org.
http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/profiler.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/profiler.html b/site/docs/2.3.1/api/python/_modules/pyspark/profiler.html
index b7ac6ff..84aa845 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/profiler.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/profiler.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.profiler &#8212; PySpark master documentation</title>
+    <title>pyspark.profiler &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -247,7 +247,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/rdd.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/rdd.html b/site/docs/2.3.1/api/python/_modules/pyspark/rdd.html
index fe64975..219f3bb 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/rdd.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/rdd.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.rdd &#8212; PySpark master documentation</title>
+    <title>pyspark.rdd &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -2594,7 +2594,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/serializers.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/serializers.html b/site/docs/2.3.1/api/python/_modules/pyspark/serializers.html
index 6274335..ab640d8 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/serializers.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/serializers.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.serializers &#8212; PySpark master documentation</title>
+    <title>pyspark.serializers &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -773,7 +773,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/sql/catalog.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/sql/catalog.html b/site/docs/2.3.1/api/python/_modules/pyspark/sql/catalog.html
index 6a5848b..f80af48 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/sql/catalog.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/sql/catalog.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.sql.catalog &#8212; PySpark master documentation</title>
+    <title>pyspark.sql.catalog &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -383,7 +383,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/sql/column.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/sql/column.html b/site/docs/2.3.1/api/python/_modules/pyspark/sql/column.html
index 3aff839..03a85bd 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/sql/column.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/sql/column.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.sql.column &#8212; PySpark master documentation</title>
+    <title>pyspark.sql.column &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -738,7 +738,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/sql/context.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/sql/context.html b/site/docs/2.3.1/api/python/_modules/pyspark/sql/context.html
index 279a1ed..73ef846 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/sql/context.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/sql/context.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.sql.context &#8212; PySpark master documentation</title>
+    <title>pyspark.sql.context &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -621,7 +621,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/sql/dataframe.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/sql/dataframe.html b/site/docs/2.3.1/api/python/_modules/pyspark/sql/dataframe.html
index 69f62aa..3f026c1 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/sql/dataframe.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/sql/dataframe.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.sql.dataframe &#8212; PySpark master documentation</title>
+    <title>pyspark.sql.dataframe &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -2224,7 +2224,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/sql/functions.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/sql/functions.html b/site/docs/2.3.1/api/python/_modules/pyspark/sql/functions.html
index 1948ca9..12aa366 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/sql/functions.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/sql/functions.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.sql.functions &#8212; PySpark master documentation</title>
+    <title>pyspark.sql.functions &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -2408,7 +2408,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/sql/group.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/sql/group.html b/site/docs/2.3.1/api/python/_modules/pyspark/sql/group.html
index 4f66fcf..a75d488 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/sql/group.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/sql/group.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.sql.group &#8212; PySpark master documentation</title>
+    <title>pyspark.sql.group &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -356,7 +356,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/sql/readwriter.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/sql/readwriter.html b/site/docs/2.3.1/api/python/_modules/pyspark/sql/readwriter.html
index cc86358..3ea0d36 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/sql/readwriter.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/sql/readwriter.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.sql.readwriter &#8212; PySpark master documentation</title>
+    <title>pyspark.sql.readwriter &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -1048,7 +1048,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/sql/session.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/sql/session.html b/site/docs/2.3.1/api/python/_modules/pyspark/sql/session.html
index f953e90..fe24005 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/sql/session.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/sql/session.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.sql.session &#8212; PySpark master documentation</title>
+    <title>pyspark.sql.session &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -883,7 +883,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/sql/streaming.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/sql/streaming.html b/site/docs/2.3.1/api/python/_modules/pyspark/sql/streaming.html
index ac98459..13967f3 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/sql/streaming.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/sql/streaming.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.sql.streaming &#8212; PySpark master documentation</title>
+    <title>pyspark.sql.streaming &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -1008,7 +1008,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/sql/types.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/sql/types.html b/site/docs/2.3.1/api/python/_modules/pyspark/sql/types.html
index 38e1a7a..70a212f 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/sql/types.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/sql/types.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.sql.types &#8212; PySpark master documentation</title>
+    <title>pyspark.sql.types &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -1937,7 +1937,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/sql/udf.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/sql/udf.html b/site/docs/2.3.1/api/python/_modules/pyspark/sql/udf.html
index d460e1d..c23dd18 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/sql/udf.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/sql/udf.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.sql.udf &#8212; PySpark master documentation</title>
+    <title>pyspark.sql.udf &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -473,7 +473,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/sql/window.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/sql/window.html b/site/docs/2.3.1/api/python/_modules/pyspark/sql/window.html
index 0ac53a5..f32abae 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/sql/window.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/sql/window.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.sql.window &#8212; PySpark master documentation</title>
+    <title>pyspark.sql.window &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -308,7 +308,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/status.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/status.html b/site/docs/2.3.1/api/python/_modules/pyspark/status.html
index 6cc9689..a86cce3 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/status.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/status.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.status &#8212; PySpark master documentation</title>
+    <title>pyspark.status &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -167,7 +167,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/storagelevel.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/storagelevel.html b/site/docs/2.3.1/api/python/_modules/pyspark/storagelevel.html
index 28ebecc..51ea24c 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/storagelevel.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/storagelevel.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.storagelevel &#8212; PySpark master documentation</title>
+    <title>pyspark.storagelevel &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -142,7 +142,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/streaming/context.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/streaming/context.html b/site/docs/2.3.1/api/python/_modules/pyspark/streaming/context.html
index 4d3050b..06b4b6c 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/streaming/context.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/streaming/context.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.streaming.context &#8212; PySpark master documentation</title>
+    <title>pyspark.streaming.context &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -441,7 +441,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/streaming/dstream.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/streaming/dstream.html b/site/docs/2.3.1/api/python/_modules/pyspark/streaming/dstream.html
index 92deed3..9960287 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/streaming/dstream.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/streaming/dstream.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.streaming.dstream &#8212; PySpark master documentation</title>
+    <title>pyspark.streaming.dstream &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -716,7 +716,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/streaming/flume.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/streaming/flume.html b/site/docs/2.3.1/api/python/_modules/pyspark/streaming/flume.html
index ab992cf..1618b90 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/streaming/flume.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/streaming/flume.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.streaming.flume &#8212; PySpark master documentation</title>
+    <title>pyspark.streaming.flume &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -227,7 +227,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/streaming/kafka.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/streaming/kafka.html b/site/docs/2.3.1/api/python/_modules/pyspark/streaming/kafka.html
index 796f500..e40c48f 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/streaming/kafka.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/streaming/kafka.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.streaming.kafka &#8212; PySpark master documentation</title>
+    <title>pyspark.streaming.kafka &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -577,7 +577,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/streaming/kinesis.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/streaming/kinesis.html b/site/docs/2.3.1/api/python/_modules/pyspark/streaming/kinesis.html
index e4dad1f..c01b912 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/streaming/kinesis.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/streaming/kinesis.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.streaming.kinesis &#8212; PySpark master documentation</title>
+    <title>pyspark.streaming.kinesis &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -191,7 +191,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/streaming/listener.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/streaming/listener.html b/site/docs/2.3.1/api/python/_modules/pyspark/streaming/listener.html
index 3dc1841..5f28ea9 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/streaming/listener.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/streaming/listener.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.streaming.listener &#8212; PySpark master documentation</title>
+    <title>pyspark.streaming.listener &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -152,7 +152,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/_modules/pyspark/taskcontext.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/_modules/pyspark/taskcontext.html b/site/docs/2.3.1/api/python/_modules/pyspark/taskcontext.html
index 5d7121a..f729864 100644
--- a/site/docs/2.3.1/api/python/_modules/pyspark/taskcontext.html
+++ b/site/docs/2.3.1/api/python/_modules/pyspark/taskcontext.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark.taskcontext &#8212; PySpark master documentation</title>
+    <title>pyspark.taskcontext &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="../../_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="../../_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="../../_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    '../../',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -31,7 +31,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../index.html" accesskey="U">Module code</a> &#187;</li> 
       </ul>
@@ -161,7 +161,7 @@
       <h3>Navigation</h3>
       <ul>
     
-        <li class="nav-item nav-item-0"><a href="../../index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="../../index.html">PySpark 2.3.1 documentation</a> &#187;</li>
 
           <li class="nav-item nav-item-1"><a href="../index.html" >Module code</a> &#187;</li> 
       </ul>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/index.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/index.html b/site/docs/2.3.1/api/python/index.html
index 3318e83..05f21e0 100644
--- a/site/docs/2.3.1/api/python/index.html
+++ b/site/docs/2.3.1/api/python/index.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>Welcome to Spark Python API Docs! &#8212; PySpark master documentation</title>
+    <title>Welcome to Spark Python API Docs! &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    './',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -35,7 +35,7 @@
           <a href="pyspark.html" title="pyspark package"
              accesskey="N">next</a></li>
     
-        <li class="nav-item nav-item-0"><a href="#">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="#">PySpark 2.3.1 documentation</a> &#187;</li>
  
       </ul>
     </div>  
@@ -178,7 +178,7 @@
           <a href="pyspark.html" title="pyspark package"
              >next</a></li>
     
-        <li class="nav-item nav-item-0"><a href="#">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="#">PySpark 2.3.1 documentation</a> &#187;</li>
  
       </ul>
     </div>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/5660fb9a/site/docs/2.3.1/api/python/pyspark.html
----------------------------------------------------------------------
diff --git a/site/docs/2.3.1/api/python/pyspark.html b/site/docs/2.3.1/api/python/pyspark.html
index 6a6a043..c2b1dd7 100644
--- a/site/docs/2.3.1/api/python/pyspark.html
+++ b/site/docs/2.3.1/api/python/pyspark.html
@@ -5,14 +5,14 @@
 <html xmlns="http://www.w3.org/1999/xhtml">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
-    <title>pyspark package &#8212; PySpark master documentation</title>
+    <title>pyspark package &#8212; PySpark 2.3.1 documentation</title>
     <link rel="stylesheet" href="_static/nature.css" type="text/css" />
     <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
     <link rel="stylesheet" href="_static/pyspark.css" type="text/css" />
     <script type="text/javascript">
       var DOCUMENTATION_OPTIONS = {
         URL_ROOT:    './',
-        VERSION:     'master',
+        VERSION:     '2.3.1',
         COLLAPSE_INDEX: false,
         FILE_SUFFIX: '.html',
         HAS_SOURCE:  true,
@@ -39,7 +39,7 @@
           <a href="index.html" title="Welcome to Spark Python API Docs!"
              accesskey="P">previous</a> |</li>
     
-        <li class="nav-item nav-item-0"><a href="index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="index.html">PySpark 2.3.1 documentation</a> &#187;</li>
  
       </ul>
     </div>  
@@ -273,7 +273,7 @@ Its format depends on the scheduler implementation.</p>
 <li>in case of YARN something like ‘application_1433865536131_34483’</li>
 </ul>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">sc</span><span class="o">.</span><span class="n">applicationId</span>  
-<span class="go">u&#39;local-...&#39;</span>
+<span class="go">&#39;local-...&#39;</span>
 </pre></div>
 </div>
 </dd></dl>
@@ -758,7 +758,7 @@ Spark 1.2)</p>
 <span class="gp">... </span>   <span class="n">_</span> <span class="o">=</span> <span class="n">testFile</span><span class="o">.</span><span class="n">write</span><span class="p">(</span><span class="s2">&quot;Hello world!&quot;</span><span class="p">)</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">textFile</span> <span class="o">=</span> <span class="n">sc</span><span class="o">.</span><span class="n">textFile</span><span class="p">(</span><span class="n">path</span><span class="p">)</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">textFile</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[u&#39;Hello world!&#39;]</span>
+<span class="go">[&#39;Hello world!&#39;]</span>
 </pre></div>
 </div>
 </dd></dl>
@@ -781,10 +781,10 @@ serializer:</p>
 <span class="gp">... </span>   <span class="n">_</span> <span class="o">=</span> <span class="n">testFile</span><span class="o">.</span><span class="n">write</span><span class="p">(</span><span class="s2">&quot;Hello&quot;</span><span class="p">)</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">textFile</span> <span class="o">=</span> <span class="n">sc</span><span class="o">.</span><span class="n">textFile</span><span class="p">(</span><span class="n">path</span><span class="p">)</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">textFile</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[u&#39;Hello&#39;]</span>
+<span class="go">[&#39;Hello&#39;]</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">parallelized</span> <span class="o">=</span> <span class="n">sc</span><span class="o">.</span><span class="n">parallelize</span><span class="p">([</span><span class="s2">&quot;World!&quot;</span><span class="p">])</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="nb">sorted</span><span class="p">(</span><span class="n">sc</span><span class="o">.</span><span class="n">union</span><span class="p">([</span><span class="n">textFile</span><span class="p">,</span> <span class="n">parallelized</span><span class="p">])</span><span class="o">.</span><span class="n">collect</span><span class="p">())</span>
-<span class="go">[u&#39;Hello&#39;, &#39;World!&#39;]</span>
+<span class="go">[&#39;Hello&#39;, &#39;World!&#39;]</span>
 </pre></div>
 </div>
 </dd></dl>
@@ -834,7 +834,7 @@ fully in memory.</p>
 <span class="gp">... </span>   <span class="n">_</span> <span class="o">=</span> <span class="n">file2</span><span class="o">.</span><span class="n">write</span><span class="p">(</span><span class="s2">&quot;2&quot;</span><span class="p">)</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">textFiles</span> <span class="o">=</span> <span class="n">sc</span><span class="o">.</span><span class="n">wholeTextFiles</span><span class="p">(</span><span class="n">dirPath</span><span class="p">)</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="nb">sorted</span><span class="p">(</span><span class="n">textFiles</span><span class="o">.</span><span class="n">collect</span><span class="p">())</span>
-<span class="go">[(u&#39;.../1.txt&#39;, u&#39;1&#39;), (u&#39;.../2.txt&#39;, u&#39;2&#39;)]</span>
+<span class="go">[(&#39;.../1.txt&#39;, &#39;1&#39;), (&#39;.../2.txt&#39;, &#39;2&#39;)]</span>
 </pre></div>
 </div>
 </dd></dl>
@@ -1699,7 +1699,7 @@ If no storage level is specified defaults to (<code class="xref py py-class docu
 <code class="descname">pipe</code><span class="sig-paren">(</span><em>command</em>, <em>env=None</em>, <em>checkCode=False</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/rdd.html#RDD.pipe"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.RDD.pipe" title="Permalink to this definition">¶</a></dt>
 <dd><p>Return an RDD created by piping elements to a forked external process.</p>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">sc</span><span class="o">.</span><span class="n">parallelize</span><span class="p">([</span><span class="s1">&#39;1&#39;</span><span class="p">,</span> <span class="s1">&#39;2&#39;</span><span class="p">,</span> <span class="s1">&#39;&#39;</span><span class="p">,</span> <span class="s1">&#39;3&#39;</span><span class="p">])</span><span class="o">.</span><span class="n">pipe</span><span class="p">(</span><span class="s1">&#39;cat&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">collect</span><span class="p">()</span>
-<span class="go">[u&#39;1&#39;, u&#39;2&#39;, u&#39;&#39;, u&#39;3&#39;]</span>
+<span class="go">[&#39;1&#39;, &#39;2&#39;, &#39;&#39;, &#39;3&#39;]</span>
 </pre></div>
 </div>
 <table class="docutils field-list" frame="void" rules="none">
@@ -1814,7 +1814,7 @@ using <cite>coalesce</cite>, which can avoid performing a shuffle.</p>
 
 <dl class="method">
 <dt id="pyspark.RDD.repartitionAndSortWithinPartitions">
-<code class="descname">repartitionAndSortWithinPartitions</code><span class="sig-paren">(</span><em>numPartitions=None</em>, <em>partitionFunc=&lt;function portable_hash&gt;</em>, <em>ascending=True</em>, <em>keyfunc=&lt;function &lt;lambda&gt;&gt;</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/rdd.html#RDD.repartitionAndSortWithinPartitions"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.RDD.repartitionAndSortWithinPartitions" title="Permalink to this definition">¶</a></dt>
+<code class="descname">repartitionAndSortWithinPartitions</code><span class="sig-paren">(</span><em>numPartitions=None</em>, <em>partitionFunc=&lt;function portable_hash&gt;</em>, <em>ascending=True</em>, <em>keyfunc=&lt;function RDD.&lt;lambda&gt;&gt;</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/rdd.html#RDD.repartitionAndSortWithinPartitions"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.RDD.repartitionAndSortWithinPartitions" title="Permalink to this definition">¶</a></dt>
 <dd><p>Repartition the RDD according to the given partitioner and, within each resulting partition,
 sort records by their keys.</p>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">rdd</span> <span class="o">=</span> <span class="n">sc</span><span class="o">.</span><span class="n">parallelize</span><span class="p">([(</span><span class="mi">0</span><span class="p">,</span> <span class="mi">5</span><span class="p">),</span> <span class="p">(</span><span class="mi">3</span><span class="p">,</span> <span class="mi">8</span><span class="p">),</span> <span class="p">(</span><span class="mi">2</span><span class="p">,</span> <span class="mi">6</span><span class="p">),</span> <span class="p">(</span><span class="mi">0</span><span class="p">,</span> <span class="mi">8</span><span class="p">),</span> <span class="p">(</span><span class="mi">3</span><span class="p">,</span> <span class="mi">8</span><span class="p">),</span> <span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">3</span><span class="p">)])</span>
@@ -2104,7 +2104,7 @@ RDD’s key and value types. The mechanism is as follows:</p>
 <span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">fileinput</span> <span class="k">import</span> <span class="nb">input</span><span class="p">,</span> <span class="n">hook_compressed</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">result</span> <span class="o">=</span> <span class="nb">sorted</span><span class="p">(</span><span class="nb">input</span><span class="p">(</span><span class="n">glob</span><span class="p">(</span><span class="n">tempFile3</span><span class="o">.</span><span class="n">name</span> <span class="o">+</span> <span class="s2">&quot;/part*.gz&quot;</span><span class="p">),</span> <span class="n">openhook</span><span class="o">=</span><span class="n">hook_compressed</span><span class="p">))</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="sa">b</span><span class="s1">&#39;&#39;</span><span class="o">.</span><span class="n">join</span><span class="p">(</span><span class="n">result</span><span class="p">)</span><span class="o">.</span><span class="n">decode</span><span class="p">(</span><span class="s1">&#39;utf-8&#39;</span><span class="p">)</span>
-<span class="go">u&#39;bar\nfoo\n&#39;</span>
+<span class="go">&#39;bar\nfoo\n&#39;</span>
 </pre></div>
 </div>
 </dd></dl>
@@ -2115,7 +2115,7 @@ RDD’s key and value types. The mechanism is as follows:</p>
 <dd><p>Assign a name to this RDD.</p>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">rdd1</span> <span class="o">=</span> <span class="n">sc</span><span class="o">.</span><span class="n">parallelize</span><span class="p">([</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">])</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">rdd1</span><span class="o">.</span><span class="n">setName</span><span class="p">(</span><span class="s1">&#39;RDD1&#39;</span><span class="p">)</span><span class="o">.</span><span class="n">name</span><span class="p">()</span>
-<span class="go">u&#39;RDD1&#39;</span>
+<span class="go">&#39;RDD1&#39;</span>
 </pre></div>
 </div>
 </dd></dl>
@@ -2135,7 +2135,7 @@ RDD’s key and value types. The mechanism is as follows:</p>
 
 <dl class="method">
 <dt id="pyspark.RDD.sortByKey">
-<code class="descname">sortByKey</code><span class="sig-paren">(</span><em>ascending=True</em>, <em>numPartitions=None</em>, <em>keyfunc=&lt;function &lt;lambda&gt;&gt;</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/rdd.html#RDD.sortByKey"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.RDD.sortByKey" title="Permalink to this definition">¶</a></dt>
+<code class="descname">sortByKey</code><span class="sig-paren">(</span><em>ascending=True</em>, <em>numPartitions=None</em>, <em>keyfunc=&lt;function RDD.&lt;lambda&gt;&gt;</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/rdd.html#RDD.sortByKey"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.RDD.sortByKey" title="Permalink to this definition">¶</a></dt>
 <dd><p>Sorts this RDD, which is assumed to consist of (key, value) pairs.</p>
 <div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">tmp</span> <span class="o">=</span> <span class="p">[(</span><span class="s1">&#39;a&#39;</span><span class="p">,</span> <span class="mi">1</span><span class="p">),</span> <span class="p">(</span><span class="s1">&#39;b&#39;</span><span class="p">,</span> <span class="mi">2</span><span class="p">),</span> <span class="p">(</span><span class="s1">&#39;1&#39;</span><span class="p">,</span> <span class="mi">3</span><span class="p">),</span> <span class="p">(</span><span class="s1">&#39;d&#39;</span><span class="p">,</span> <span class="mi">4</span><span class="p">),</span> <span class="p">(</span><span class="s1">&#39;2&#39;</span><span class="p">,</span> <span class="mi">5</span><span class="p">)]</span>
 <span class="gp">&gt;&gt;&gt; </span><span class="n">sc</span><span class="o">.</span><span class="n">parallelize</span><span class="p">(</span><span class="n">tmp</span><span class="p">)</span><span class="o">.</span><span class="n">sortByKey</span><span class="p">()</span><span class="o">.</span><span class="n">first</span><span class="p">()</span>
@@ -2673,7 +2673,7 @@ not be as fast as more specialized serializers.</p>
 
 <dl class="method">
 <dt id="pyspark.PickleSerializer.loads">
-<code class="descname">loads</code><span class="sig-paren">(</span><em>obj</em>, <em>encoding=None</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/serializers.html#PickleSerializer.loads"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.PickleSerializer.loads" title="Permalink to this definition">¶</a></dt>
+<code class="descname">loads</code><span class="sig-paren">(</span><em>obj</em>, <em>encoding='bytes'</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pyspark/serializers.html#PickleSerializer.loads"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pyspark.PickleSerializer.loads" title="Permalink to this definition">¶</a></dt>
 <dd></dd></dl>
 
 </dd></dl>
@@ -2930,7 +2930,7 @@ TaskAttemptID.</p>
           <a href="index.html" title="Welcome to Spark Python API Docs!"
              >previous</a> |</li>
     
-        <li class="nav-item nav-item-0"><a href="index.html">PySpark master documentation</a> &#187;</li>
+        <li class="nav-item nav-item-0"><a href="index.html">PySpark 2.3.1 documentation</a> &#187;</li>
  
       </ul>
     </div>


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org