You are viewing a plain text version of this content. The canonical link for it is here.
Posted to reviews@spark.apache.org by rdblue <gi...@git.apache.org> on 2018/08/02 16:08:06 UTC

[GitHub] spark pull request #21911: [SPARK-24940][SQL] Coalesce and Repartition Hint ...

Github user rdblue commented on a diff in the pull request:

    https://github.com/apache/spark/pull/21911#discussion_r207285266
  
    --- Diff: sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveHints.scala ---
    @@ -102,6 +104,35 @@ object ResolveHints {
         }
       }
     
    +  /**
    +   * COALESCE Hint accepts name "COALESCE" and "REPARTITION".
    +   * Its parameter includes a partition number.
    +   */
    +  class ResolveCoalesceHints(conf: SQLConf) extends Rule[LogicalPlan] {
    +    private val COALESCE_HINT_NAMES = Set("COALESCE", "REPARTITION")
    +
    +    private def applyCoalesceHint(
    +      plan: LogicalPlan,
    +      numPartitions: Int,
    +      shuffle: Boolean): LogicalPlan = {
    +      Repartition(numPartitions, shuffle, plan)
    +    }
    +
    +    def apply(plan: LogicalPlan): LogicalPlan = plan.resolveOperators {
    +      case h: UnresolvedHint if COALESCE_HINT_NAMES.contains(h.name.toUpperCase(Locale.ROOT)) =>
    +        h.parameters match {
    +          case Seq(Literal(numPartitions: Int, IntegerType)) =>
    +            val shuffle = h.name.toUpperCase(Locale.ROOT) match {
    +              case "REPARTITION" => true
    +              case "COALESCE" => false
    +            }
    +            applyCoalesceHint(h.child, numPartitions, shuffle)
    +          case _ =>
    +            throw new AnalysisException("COALESCE Hint expects a partition number as parameter")
    --- End diff --
    
    Can you use `h.name.toUpperCase` in this error message instead? I think that would be a better message for users that don't know the relationship between COALESCE and REPARTITION.


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org