You are viewing a plain text version of this content. The canonical link for it is here.
Posted to reviews@spark.apache.org by GitBox <gi...@apache.org> on 2022/10/07 05:19:13 UTC

[GitHub] [spark] sadikovi commented on a diff in pull request #38113: [SPARK-40667][SQL] Refactor File Data Source Options

sadikovi commented on code in PR #38113:
URL: https://github.com/apache/spark/pull/38113#discussion_r989688150


##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JSONOptions.scala:
##########
@@ -230,3 +232,35 @@ private[sql] object JSONOptionsInRead {
     Charset.forName("UTF-32")
   )
 }
+
+object JSONOptions extends DataSourceOptions {
+  val SAMPLING_RATIO = newOption("samplingRatio")
+  val PRIMITIVES_AS_STRING = newOption("primitivesAsString")
+  val PREFERS_DECIMAL = newOption("prefersDecimal")
+  val ALLOW_COMMENTS = newOption("allowComments")
+  val ALLOW_UNQUOTED_FIELD_NAMES = newOption("allowUnquotedFieldNames")
+  val ALLOW_SINGLE_QUOTES = newOption("allowSingleQuotes")
+  val ALLOW_NUMERIC_LEADING_ZEROS = newOption("allowNumericLeadingZeros")
+  val ALLOW_NON_NUMERIC_NUMBERS = newOption("allowNonNumericNumbers")
+  val ALLOW_BACKSLASH_ESCAPING_ANY_CHARACTER = newOption("allowBackslashEscapingAnyCharacter")
+  val ALLOW_UNQUOTED_CONTROL_CHARS = newOption("allowUnquotedControlChars")
+  val COMPRESSION = newOption("compression")
+  val MODE = newOption("mode")
+  val DROP_FIELD_IF_ALL_NULL = newOption("dropFieldIfAllNull")
+  val IGNORE_NULL_FIELDS = newOption("ignoreNullFields")
+  val LOCALE = newOption("locale")
+  val DATE_FORMAT = newOption("dateFormat")
+  val TIMESTAMP_FORMAT = newOption("timestampFormat")
+  val TIMESTAMP_NTZ_FORMAT = newOption("timestampNTZFormat")
+  val ENABLE_DATETIME_PARSING_FALLBACK = newOption("enableDateTimeParsingFallback")
+  val MULTILINE = newOption("multiLine")
+  val LINE_SEP = newOption("lineSep")
+  val PRETTY = newOption("pretty")
+  val INFER_TIMESTAMP = newOption("inferTimestamp")
+  val COLUMN_NAME_OF_CORRUPTED_RECORD = newOption("columnNameOfCorruptRecord")
+  val TIME_ZONE = newOption("timeZone")
+  val WRITE_NON_ASCII_CHARACTER_AS_CODEPOINT = newOption("writeNonAsciiCharacterAsCodePoint")
+  // Options with alternative
+  val ENCODING = newOption("encoding", Some("charset"))
+  val CHARSET = newOption("charset", Some("encoding"))

Review Comment:
   I think there should be one option with alternative, shouldn't it?
   ```scala
   val ENCODING = newOption("encoding", Some("charset"))
   ```



##########
sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/csv/CSVSuite.scala:
##########
@@ -3080,6 +3081,23 @@ abstract class CSVSuite
       }
     }
   }
+
+  test("SPARK-40667: check the number of valid CSV option names") {
+    assert(CSVOptions.getAllValidOptionNames.size == 38)
+  }
+
+  test("SPARK-40667: validate a given option name") {
+    assert(CSVOptions.isValidOptionName("inferSchema"))
+    assert(CSVOptions.isValidOptionName("prefersDate"))
+    assert(!CSVOptions.isValidOptionName("inferSchemas"))
+    assert(!CSVOptions.isValidOptionName("randomName"))

Review Comment:
   Let's add those two assertions in the test below, separated by the new line from `getAlternativeOptionName` tests. Thanks.



##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/csv/CSVOptions.scala:
##########
@@ -203,17 +205,17 @@ class CSVOptions(
   // Otherwise, depending on the parser policy and a custom pattern, an exception may be thrown and
   // the value will be parsed as null.
   val enableDateTimeParsingFallback: Option[Boolean] =
-    parameters.get("enableDateTimeParsingFallback").map(_.toBoolean)

Review Comment:
   +1



##########
connector/avro/src/main/scala/org/apache/spark/sql/avro/AvroOptions.scala:
##########
@@ -104,7 +106,7 @@ private[sql] class AvroOptions(
       ignoreFilesWithoutExtensionByDefault)
 
     parameters
-      .get(AvroOptions.ignoreExtensionKey)
+      .get(AvroOptions.IGNORE_EXTENSION)

Review Comment:
   Let's just keep IGNORE_EXTENSION, we already import AvroOptions._



##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JSONOptions.scala:
##########
@@ -230,3 +232,35 @@ private[sql] object JSONOptionsInRead {
     Charset.forName("UTF-32")
   )
 }
+
+object JSONOptions extends DataSourceOptions {
+  val SAMPLING_RATIO = newOption("samplingRatio")
+  val PRIMITIVES_AS_STRING = newOption("primitivesAsString")
+  val PREFERS_DECIMAL = newOption("prefersDecimal")
+  val ALLOW_COMMENTS = newOption("allowComments")
+  val ALLOW_UNQUOTED_FIELD_NAMES = newOption("allowUnquotedFieldNames")
+  val ALLOW_SINGLE_QUOTES = newOption("allowSingleQuotes")
+  val ALLOW_NUMERIC_LEADING_ZEROS = newOption("allowNumericLeadingZeros")
+  val ALLOW_NON_NUMERIC_NUMBERS = newOption("allowNonNumericNumbers")
+  val ALLOW_BACKSLASH_ESCAPING_ANY_CHARACTER = newOption("allowBackslashEscapingAnyCharacter")
+  val ALLOW_UNQUOTED_CONTROL_CHARS = newOption("allowUnquotedControlChars")
+  val COMPRESSION = newOption("compression")
+  val MODE = newOption("mode")
+  val DROP_FIELD_IF_ALL_NULL = newOption("dropFieldIfAllNull")
+  val IGNORE_NULL_FIELDS = newOption("ignoreNullFields")
+  val LOCALE = newOption("locale")
+  val DATE_FORMAT = newOption("dateFormat")
+  val TIMESTAMP_FORMAT = newOption("timestampFormat")
+  val TIMESTAMP_NTZ_FORMAT = newOption("timestampNTZFormat")
+  val ENABLE_DATETIME_PARSING_FALLBACK = newOption("enableDateTimeParsingFallback")
+  val MULTILINE = newOption("multiLine")
+  val LINE_SEP = newOption("lineSep")

Review Comment:
   Maybe we will keep the changes as is, I am inclined to keep them separate for now.



##########
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetOptions.scala:
##########
@@ -47,9 +47,9 @@ class ParquetOptions(
     // `compression`, `parquet.compression`(i.e., ParquetOutputFormat.COMPRESSION), and
     // `spark.sql.parquet.compression.codec`
     // are in order of precedence from highest to lowest.
-    val parquetCompressionConf = parameters.get(ParquetOutputFormat.COMPRESSION)
+    val parquetCompressionConf = parameters.get(PARQUET_COMPRESS)

Review Comment:
   Let's call it PARQUET_COMPRESSION, similar to ORC_COMPRESSION.



##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/csv/CSVOptions.scala:
##########
@@ -327,3 +329,45 @@ class CSVOptions(
     settings
   }
 }
+
+object CSVOptions extends DataSourceOptions {
+  val HEADER = newOption("header")
+  val INFER_SCHEMA = newOption("inferSchema")
+  val IGNORE_LEADING_WHITESPACE = newOption("ignoreLeadingWhiteSpace")
+  val IGNORE_TRAILING_WHITESPACE = newOption("ignoreTrailingWhiteSpace")
+  val PREFERS_DATE = newOption("prefersDate")
+  val ESCAPE_QUOTES = newOption("escapeQuotes")
+  val QUOTE_ALL = newOption("quoteAll")
+  val ENFORCE_SCHEMA = newOption("enforceSchema")
+  val QUOTE = newOption("quote")
+  val ESCAPE = newOption("escape")
+  val COMMENT = newOption("comment")
+  val MAX_COLUMNS = newOption("maxColumns")
+  val MAX_CHARS_PER_COLUMN = newOption("maxCharsPerColumn")
+  val MODE = newOption("mode")
+  val CHAR_TO_ESCAPE_QUOTE_ESCAPING = newOption("charToEscapeQuoteEscaping")
+  val LOCALE = newOption("locale")
+  val DATE_FORMAT = newOption("dateFormat")
+  val TIMESTAMP_FORMAT = newOption("timestampFormat")
+  val TIMESTAMP_NTZ_FORMAT = newOption("timestampNTZFormat")
+  val ENABLE_DATETIME_PARSING_FALLBACK = newOption("enableDateTimeParsingFallback")
+  val MULTI_LINE = newOption("multiLine")
+  val SAMPLING_RATIO = newOption("samplingRatio")
+  val EMPTY_VALUE = newOption("emptynewOption")
+  val LINE_SEP = newOption("lineSep")
+  val INPUT_BUFFER_SIZE = newOption("inputBufferSize")
+  val COLUMN_NAME_OF_CORRUPT_RECORD = newOption("columnNameOfCorruptRecord")
+  val NULL_VALUE = newOption("nullValue")
+  val NAN_VALUE = newOption("nanValue")
+  val POSITIVE_INF = newOption("positiveInf")
+  val NEGATIVE_INF = newOption("negativeInf")
+  val TIME_ZONE = newOption("timeZone")
+  val UNESCAPED_QUOTE_HANDLING = newOption("unescapedQuoteHandling")
+  // Options with alternative
+  val CHARSET = newOption("charset")

Review Comment:
   This option and code do not seem to have an alternative.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org

For queries about this service, please contact Infrastructure at:
users@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org