You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by gu...@apache.org on 2018/03/26 03:42:38 UTC

spark git commit: [SPARK-23700][PYTHON] Cleanup imports in pyspark.sql

Repository: spark
Updated Branches:
  refs/heads/master e4bec7cb8 -> a9350d709


[SPARK-23700][PYTHON] Cleanup imports in pyspark.sql

## What changes were proposed in this pull request?

This cleans up unused imports, mainly from pyspark.sql module.  Added a note in function.py that imports `UserDefinedFunction` only to maintain backwards compatibility for using `from pyspark.sql.function import UserDefinedFunction`.

## How was this patch tested?

Existing tests and built docs.

Author: Bryan Cutler <cu...@gmail.com>

Closes #20892 from BryanCutler/pyspark-cleanup-imports-SPARK-23700.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/a9350d70
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/a9350d70
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/a9350d70

Branch: refs/heads/master
Commit: a9350d7095b79c8374fb4a06fd3f1a1a67615f6f
Parents: e4bec7c
Author: Bryan Cutler <cu...@gmail.com>
Authored: Mon Mar 26 12:42:32 2018 +0900
Committer: hyukjinkwon <gu...@apache.org>
Committed: Mon Mar 26 12:42:32 2018 +0900

----------------------------------------------------------------------
 python/pyspark/sql/column.py     | 1 -
 python/pyspark/sql/conf.py       | 1 -
 python/pyspark/sql/functions.py  | 3 +--
 python/pyspark/sql/group.py      | 3 +--
 python/pyspark/sql/readwriter.py | 2 +-
 python/pyspark/sql/streaming.py  | 2 --
 python/pyspark/sql/types.py      | 1 -
 python/pyspark/sql/udf.py        | 6 ++----
 python/pyspark/util.py           | 2 --
 9 files changed, 5 insertions(+), 16 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/a9350d70/python/pyspark/sql/column.py
----------------------------------------------------------------------
diff --git a/python/pyspark/sql/column.py b/python/pyspark/sql/column.py
index e05a7b3..922c7cf 100644
--- a/python/pyspark/sql/column.py
+++ b/python/pyspark/sql/column.py
@@ -16,7 +16,6 @@
 #
 
 import sys
-import warnings
 import json
 
 if sys.version >= '3':

http://git-wip-us.apache.org/repos/asf/spark/blob/a9350d70/python/pyspark/sql/conf.py
----------------------------------------------------------------------
diff --git a/python/pyspark/sql/conf.py b/python/pyspark/sql/conf.py
index b82224b..db49040 100644
--- a/python/pyspark/sql/conf.py
+++ b/python/pyspark/sql/conf.py
@@ -67,7 +67,6 @@ class RuntimeConfig(object):
 def _test():
     import os
     import doctest
-    from pyspark.context import SparkContext
     from pyspark.sql.session import SparkSession
     import pyspark.sql.conf
 

http://git-wip-us.apache.org/repos/asf/spark/blob/a9350d70/python/pyspark/sql/functions.py
----------------------------------------------------------------------
diff --git a/python/pyspark/sql/functions.py b/python/pyspark/sql/functions.py
index dff5909..a4edb1e 100644
--- a/python/pyspark/sql/functions.py
+++ b/python/pyspark/sql/functions.py
@@ -18,7 +18,6 @@
 """
 A collections of builtin functions
 """
-import math
 import sys
 import functools
 import warnings
@@ -28,10 +27,10 @@ if sys.version < "3":
 
 from pyspark import since, SparkContext
 from pyspark.rdd import ignore_unicode_prefix, PythonEvalType
-from pyspark.serializers import PickleSerializer, AutoBatchedSerializer
 from pyspark.sql.column import Column, _to_java_column, _to_seq
 from pyspark.sql.dataframe import DataFrame
 from pyspark.sql.types import StringType, DataType
+# Keep UserDefinedFunction import for backwards compatible import; moved in SPARK-22409
 from pyspark.sql.udf import UserDefinedFunction, _create_udf
 
 

http://git-wip-us.apache.org/repos/asf/spark/blob/a9350d70/python/pyspark/sql/group.py
----------------------------------------------------------------------
diff --git a/python/pyspark/sql/group.py b/python/pyspark/sql/group.py
index 35cac40..3505065 100644
--- a/python/pyspark/sql/group.py
+++ b/python/pyspark/sql/group.py
@@ -19,9 +19,8 @@ import sys
 
 from pyspark import since
 from pyspark.rdd import ignore_unicode_prefix, PythonEvalType
-from pyspark.sql.column import Column, _to_seq, _to_java_column, _create_column_from_literal
+from pyspark.sql.column import Column, _to_seq
 from pyspark.sql.dataframe import DataFrame
-from pyspark.sql.udf import UserDefinedFunction
 from pyspark.sql.types import *
 
 __all__ = ["GroupedData"]

http://git-wip-us.apache.org/repos/asf/spark/blob/a9350d70/python/pyspark/sql/readwriter.py
----------------------------------------------------------------------
diff --git a/python/pyspark/sql/readwriter.py b/python/pyspark/sql/readwriter.py
index e528863..4f9b938 100644
--- a/python/pyspark/sql/readwriter.py
+++ b/python/pyspark/sql/readwriter.py
@@ -22,7 +22,7 @@ if sys.version >= '3':
 
 from py4j.java_gateway import JavaClass
 
-from pyspark import RDD, since, keyword_only
+from pyspark import RDD, since
 from pyspark.rdd import ignore_unicode_prefix
 from pyspark.sql.column import _to_seq
 from pyspark.sql.types import *

http://git-wip-us.apache.org/repos/asf/spark/blob/a9350d70/python/pyspark/sql/streaming.py
----------------------------------------------------------------------
diff --git a/python/pyspark/sql/streaming.py b/python/pyspark/sql/streaming.py
index 07f9ac1..c7907aa 100644
--- a/python/pyspark/sql/streaming.py
+++ b/python/pyspark/sql/streaming.py
@@ -24,8 +24,6 @@ if sys.version >= '3':
 else:
     intlike = (int, long)
 
-from abc import ABCMeta, abstractmethod
-
 from pyspark import since, keyword_only
 from pyspark.rdd import ignore_unicode_prefix
 from pyspark.sql.column import _to_seq

http://git-wip-us.apache.org/repos/asf/spark/blob/a9350d70/python/pyspark/sql/types.py
----------------------------------------------------------------------
diff --git a/python/pyspark/sql/types.py b/python/pyspark/sql/types.py
index 5d5919e..1f65348 100644
--- a/python/pyspark/sql/types.py
+++ b/python/pyspark/sql/types.py
@@ -35,7 +35,6 @@ from py4j.java_gateway import JavaClass
 
 from pyspark import SparkContext
 from pyspark.serializers import CloudPickleSerializer
-from pyspark.util import _exception_message
 
 __all__ = [
     "DataType", "NullType", "StringType", "BinaryType", "BooleanType", "DateType",

http://git-wip-us.apache.org/repos/asf/spark/blob/a9350d70/python/pyspark/sql/udf.py
----------------------------------------------------------------------
diff --git a/python/pyspark/sql/udf.py b/python/pyspark/sql/udf.py
index 24dd06c..9dbe49b 100644
--- a/python/pyspark/sql/udf.py
+++ b/python/pyspark/sql/udf.py
@@ -17,16 +17,14 @@
 """
 User-defined function related classes and functions
 """
-import sys
-import inspect
 import functools
 import sys
 
 from pyspark import SparkContext, since
 from pyspark.rdd import _prepare_for_python_RDD, PythonEvalType, ignore_unicode_prefix
 from pyspark.sql.column import Column, _to_java_column, _to_seq
-from pyspark.sql.types import StringType, DataType, ArrayType, StructType, MapType, \
-    _parse_datatype_string, to_arrow_type, to_arrow_schema
+from pyspark.sql.types import StringType, DataType, StructType, _parse_datatype_string,\
+    to_arrow_type, to_arrow_schema
 from pyspark.util import _get_argspec
 
 __all__ = ["UDFRegistration"]

http://git-wip-us.apache.org/repos/asf/spark/blob/a9350d70/python/pyspark/util.py
----------------------------------------------------------------------
diff --git a/python/pyspark/util.py b/python/pyspark/util.py
index ed1bdd0..49afc13 100644
--- a/python/pyspark/util.py
+++ b/python/pyspark/util.py
@@ -22,8 +22,6 @@ from py4j.protocol import Py4JJavaError
 
 __all__ = []
 
-import sys
-
 
 def _exception_message(excp):
     """Return the message from an exception as either a str or unicode object.  Supports both


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org