You are viewing a plain text version of this content. The canonical link for it is here.
Posted to reviews@spark.apache.org by hvanhovell <gi...@git.apache.org> on 2018/08/07 11:45:20 UTC

[GitHub] spark pull request #21258: [SPARK-23933][SQL] Add map_from_arrays function

Github user hvanhovell commented on a diff in the pull request:

    https://github.com/apache/spark/pull/21258#discussion_r208199133
  
    --- Diff: sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeCreator.scala ---
    @@ -235,6 +235,69 @@ case class CreateMap(children: Seq[Expression]) extends Expression {
       override def prettyName: String = "map"
     }
     
    +/**
    + * Returns a catalyst Map containing the two arrays in children expressions as keys and values.
    + */
    +@ExpressionDescription(
    +  usage = """
    +    _FUNC_(keys, values) - Creates a map with a pair of the given key/value arrays. All elements
    +      in keys should not be null""",
    +  examples = """
    +    Examples:
    +      > SELECT _FUNC_([1.0, 3.0], ['2', '4']);
    +       {1.0:"2",3.0:"4"}
    +  """, since = "2.4.0")
    +case class CreateMapFromArray(left: Expression, right: Expression)
    +    extends BinaryExpression with ExpectsInputTypes {
    +
    +  override def inputTypes: Seq[AbstractDataType] = Seq(ArrayType, ArrayType)
    +
    +  override def checkInputDataTypes(): TypeCheckResult = {
    +    (left.dataType, right.dataType) match {
    +      case (ArrayType(_, cn), ArrayType(_, _)) =>
    +        if (!cn) {
    +          TypeCheckResult.TypeCheckSuccess
    +        } else {
    +          TypeCheckResult.TypeCheckFailure("All of the given keys should be non-null")
    +        }
    +      case _ =>
    +        TypeCheckResult.TypeCheckFailure("The given two arguments should be an array")
    +    }
    +  }
    +
    +  override def dataType: DataType = {
    +    MapType(
    +      keyType = left.dataType.asInstanceOf[ArrayType].elementType,
    +      valueType = right.dataType.asInstanceOf[ArrayType].elementType,
    +      valueContainsNull = left.dataType.asInstanceOf[ArrayType].containsNull)
    +  }
    +
    +  override def nullable: Boolean = false
    +
    +  override def nullSafeEval(keyArray: Any, valueArray: Any): Any = {
    +    val keyArrayData = keyArray.asInstanceOf[ArrayData]
    --- End diff --
    
    I would like to err on the safe side here. `CreateMap` should be fixed IMO.


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org