You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by se...@apache.org on 2018/04/28 02:02:47 UTC

[1/4] hive git commit: HIVE-19324 : improve YARN queue check error message in Tez pool (Sergey Shelukhin, reviewed by Ashutosh Chauhan)

Repository: hive
Updated Branches:
  refs/heads/branch-3 7cbd64818 -> 0930154fd
  refs/heads/master cbc3863b3 -> e388bc770


HIVE-19324 : improve YARN queue check error message in Tez pool (Sergey Shelukhin, reviewed by Ashutosh Chauhan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/9e7a10c2
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/9e7a10c2
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/9e7a10c2

Branch: refs/heads/master
Commit: 9e7a10c2abc72284d756dc5f01a60d9211c45d8d
Parents: cbc3863
Author: sergey <se...@apache.org>
Authored: Fri Apr 27 18:55:48 2018 -0700
Committer: sergey <se...@apache.org>
Committed: Fri Apr 27 18:55:48 2018 -0700

----------------------------------------------------------------------
 .../hadoop/hive/ql/exec/tez/YarnQueueHelper.java  | 18 ++++++++++++------
 1 file changed, 12 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/9e7a10c2/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/YarnQueueHelper.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/YarnQueueHelper.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/YarnQueueHelper.java
index c9c859a..b762e68 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/YarnQueueHelper.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/YarnQueueHelper.java
@@ -52,25 +52,31 @@ public class YarnQueueHelper {
     lastKnownGoodUrl = 0;
   }
 
-  public void checkQueueAccess(String queueName, String userName) throws IOException {
+  public void checkQueueAccess(
+      String queueName, String userName) throws IOException, HiveException {
     String urlSuffix = String.format(PERMISSION_PATH, queueName, userName);
     // TODO: if we ever use this endpoint for anything else, refactor cycling into a separate class.
     int urlIx = lastKnownGoodUrl, lastUrlIx = ((urlIx == 0) ? rmNodes.length : urlIx) - 1;
     Exception firstError = null;
     while (true) {
       String node = rmNodes[urlIx];
+      String error = null;
+      boolean isCallOk = false;
       try {
-        String error = checkQueueAccessFromSingleRm("http://" + node + urlSuffix);
-        lastKnownGoodUrl = urlIx;
-        if (error == null) return; // null error message here means the user has access.
-        throw new HiveException(error.isEmpty()
-            ? (userName + " has no access to " + queueName) : error);
+        error = checkQueueAccessFromSingleRm("http://" + node + urlSuffix);
+        isCallOk = true;
       } catch (Exception ex) {
         LOG.warn("Cannot check queue access against RM " + node, ex);
         if (firstError == null) {
           firstError = ex;
         }
       }
+      if (isCallOk) {
+        lastKnownGoodUrl = urlIx;
+        if (error == null) return; // null error message here means the user has access.
+        throw new HiveException(error.isEmpty()
+            ? (userName + " has no access to " + queueName) : error);
+      }
       if (urlIx == lastUrlIx) {
         throw new IOException("Cannot access any RM service; first error", firstError);
       }


[3/4] hive git commit: HIVE-19338 : isExplicitAnalyze method may be incorrect in BasicStatsTask (Sergey Shelukhin, reviewed by Jesus Camacho Rodriguez)

Posted by se...@apache.org.
HIVE-19338 : isExplicitAnalyze method may be incorrect in BasicStatsTask (Sergey Shelukhin, reviewed by Jesus Camacho Rodriguez)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/e388bc77
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/e388bc77
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/e388bc77

Branch: refs/heads/master
Commit: e388bc770c237c0c197c81f534b0a8eb5e621dc7
Parents: 9e7a10c
Author: sergey <se...@apache.org>
Authored: Fri Apr 27 18:57:37 2018 -0700
Committer: sergey <se...@apache.org>
Committed: Fri Apr 27 18:57:37 2018 -0700

----------------------------------------------------------------------
 .../org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java  | 1 +
 .../org/apache/hadoop/hive/ql/parse/ProcessAnalyzeTable.java  | 1 +
 ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java | 1 +
 .../hadoop/hive/ql/parse/spark/SparkProcessAnalyzeTable.java  | 1 +
 .../java/org/apache/hadoop/hive/ql/plan/BasicStatsWork.java   | 7 ++++++-
 5 files changed, 10 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/e388bc77/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java
index 06b7c93..64f9c70 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java
@@ -112,6 +112,7 @@ public class GenMRTableScan1 implements NodeProcessor {
             // The MR task is just a simple TableScanOperator
 
             BasicStatsWork statsWork = new BasicStatsWork(table.getTableSpec());
+            statsWork.setIsExplicitAnalyze(true);
 
             statsWork.setNoScanAnalyzeCommand(noScan);
             StatsWork columnStatsWork = new StatsWork(table, statsWork, parseCtx.getConf());

http://git-wip-us.apache.org/repos/asf/hive/blob/e388bc77/ql/src/java/org/apache/hadoop/hive/ql/parse/ProcessAnalyzeTable.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ProcessAnalyzeTable.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ProcessAnalyzeTable.java
index 5961059..03cceac 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ProcessAnalyzeTable.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ProcessAnalyzeTable.java
@@ -115,6 +115,7 @@ public class ProcessAnalyzeTable implements NodeProcessor {
         // The Tez task is just a simple TableScanOperator
 
         BasicStatsWork basicStatsWork = new BasicStatsWork(table.getTableSpec());
+        basicStatsWork.setIsExplicitAnalyze(true);
         basicStatsWork.setNoScanAnalyzeCommand(parseContext.getQueryProperties().isNoScanAnalyzeCommand());
         StatsWork columnStatsWork = new StatsWork(table, basicStatsWork, parseContext.getConf());
         columnStatsWork.collectStatsFromAggregator(tableScan.getConf());

http://git-wip-us.apache.org/repos/asf/hive/blob/e388bc77/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java
index c268ddc..df1d9cb 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java
@@ -392,6 +392,7 @@ public abstract class TaskCompiler {
       return TaskFactory.get(columnStatsWork);
     } else {
       BasicStatsWork statsWork = new BasicStatsWork(tableScan.getConf().getTableMetadata().getTableSpec());
+      statsWork.setIsExplicitAnalyze(true);
       StatsWork columnStatsWork = new StatsWork(table, statsWork, parseContext.getConf());
       columnStatsWork.collectStatsFromAggregator(tableScan.getConf());
       columnStatsWork.setSourceTask(currentTask);

http://git-wip-us.apache.org/repos/asf/hive/blob/e388bc77/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkProcessAnalyzeTable.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkProcessAnalyzeTable.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkProcessAnalyzeTable.java
index e81d6f3..28d4de7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkProcessAnalyzeTable.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkProcessAnalyzeTable.java
@@ -123,6 +123,7 @@ public class SparkProcessAnalyzeTable implements NodeProcessor {
         // The Spark task is just a simple TableScanOperator
 
         BasicStatsWork basicStatsWork = new BasicStatsWork(table.getTableSpec());
+        basicStatsWork.setIsExplicitAnalyze(true);
         basicStatsWork.setNoScanAnalyzeCommand(parseContext.getQueryProperties().isNoScanAnalyzeCommand());
         StatsWork columnStatsWork = new StatsWork(table, basicStatsWork, parseContext.getConf());
         columnStatsWork.collectStatsFromAggregator(tableScan.getConf());

http://git-wip-us.apache.org/repos/asf/hive/blob/e388bc77/ql/src/java/org/apache/hadoop/hive/ql/plan/BasicStatsWork.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/BasicStatsWork.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/BasicStatsWork.java
index 55d05a1..40def60 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/BasicStatsWork.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/BasicStatsWork.java
@@ -33,6 +33,7 @@ import org.apache.hadoop.hive.ql.plan.LoadTableDesc.LoadFileType;
 public class BasicStatsWork implements Serializable {
   private static final long serialVersionUID = 1L;
 
+  private boolean isExplicitAnalyze;
   private TableSpec tableSpecs;         // source table spec -- for TableScanOperator
   private LoadTableDesc loadTableDesc;  // same as MoveWork.loadTableDesc -- for FileSinkOperator
   private LoadFileDesc loadFileDesc;    // same as MoveWork.loadFileDesc -- for FileSinkOperator
@@ -164,7 +165,7 @@ public class BasicStatsWork implements Serializable {
 
   public boolean isExplicitAnalyze() {
     // ANALYZE TABLE
-    return (getTableSpecs() != null);
+    return isExplicitAnalyze;
   }
   public boolean isTargetRewritten() {
     // ANALYZE TABLE
@@ -202,4 +203,8 @@ public class BasicStatsWork implements Serializable {
     }
   }
 
+  public void setIsExplicitAnalyze(boolean b) {
+    this.isExplicitAnalyze = b;
+  }
+
 }


[4/4] hive git commit: HIVE-19338 : isExplicitAnalyze method may be incorrect in BasicStatsTask (Sergey Shelukhin, reviewed by Jesus Camacho Rodriguez)

Posted by se...@apache.org.
HIVE-19338 : isExplicitAnalyze method may be incorrect in BasicStatsTask (Sergey Shelukhin, reviewed by Jesus Camacho Rodriguez)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/0930154f
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/0930154f
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/0930154f

Branch: refs/heads/branch-3
Commit: 0930154fdf4ffc35de5bc2fe56cd03a1f48f7598
Parents: 596c811
Author: sergey <se...@apache.org>
Authored: Fri Apr 27 18:57:37 2018 -0700
Committer: sergey <se...@apache.org>
Committed: Fri Apr 27 18:57:50 2018 -0700

----------------------------------------------------------------------
 .../org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java  | 1 +
 .../org/apache/hadoop/hive/ql/parse/ProcessAnalyzeTable.java  | 1 +
 ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java | 1 +
 .../hadoop/hive/ql/parse/spark/SparkProcessAnalyzeTable.java  | 1 +
 .../java/org/apache/hadoop/hive/ql/plan/BasicStatsWork.java   | 7 ++++++-
 5 files changed, 10 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/0930154f/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java
index 06b7c93..64f9c70 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java
@@ -112,6 +112,7 @@ public class GenMRTableScan1 implements NodeProcessor {
             // The MR task is just a simple TableScanOperator
 
             BasicStatsWork statsWork = new BasicStatsWork(table.getTableSpec());
+            statsWork.setIsExplicitAnalyze(true);
 
             statsWork.setNoScanAnalyzeCommand(noScan);
             StatsWork columnStatsWork = new StatsWork(table, statsWork, parseCtx.getConf());

http://git-wip-us.apache.org/repos/asf/hive/blob/0930154f/ql/src/java/org/apache/hadoop/hive/ql/parse/ProcessAnalyzeTable.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ProcessAnalyzeTable.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ProcessAnalyzeTable.java
index 5961059..03cceac 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ProcessAnalyzeTable.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ProcessAnalyzeTable.java
@@ -115,6 +115,7 @@ public class ProcessAnalyzeTable implements NodeProcessor {
         // The Tez task is just a simple TableScanOperator
 
         BasicStatsWork basicStatsWork = new BasicStatsWork(table.getTableSpec());
+        basicStatsWork.setIsExplicitAnalyze(true);
         basicStatsWork.setNoScanAnalyzeCommand(parseContext.getQueryProperties().isNoScanAnalyzeCommand());
         StatsWork columnStatsWork = new StatsWork(table, basicStatsWork, parseContext.getConf());
         columnStatsWork.collectStatsFromAggregator(tableScan.getConf());

http://git-wip-us.apache.org/repos/asf/hive/blob/0930154f/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java
index c268ddc..df1d9cb 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java
@@ -392,6 +392,7 @@ public abstract class TaskCompiler {
       return TaskFactory.get(columnStatsWork);
     } else {
       BasicStatsWork statsWork = new BasicStatsWork(tableScan.getConf().getTableMetadata().getTableSpec());
+      statsWork.setIsExplicitAnalyze(true);
       StatsWork columnStatsWork = new StatsWork(table, statsWork, parseContext.getConf());
       columnStatsWork.collectStatsFromAggregator(tableScan.getConf());
       columnStatsWork.setSourceTask(currentTask);

http://git-wip-us.apache.org/repos/asf/hive/blob/0930154f/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkProcessAnalyzeTable.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkProcessAnalyzeTable.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkProcessAnalyzeTable.java
index e81d6f3..28d4de7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkProcessAnalyzeTable.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkProcessAnalyzeTable.java
@@ -123,6 +123,7 @@ public class SparkProcessAnalyzeTable implements NodeProcessor {
         // The Spark task is just a simple TableScanOperator
 
         BasicStatsWork basicStatsWork = new BasicStatsWork(table.getTableSpec());
+        basicStatsWork.setIsExplicitAnalyze(true);
         basicStatsWork.setNoScanAnalyzeCommand(parseContext.getQueryProperties().isNoScanAnalyzeCommand());
         StatsWork columnStatsWork = new StatsWork(table, basicStatsWork, parseContext.getConf());
         columnStatsWork.collectStatsFromAggregator(tableScan.getConf());

http://git-wip-us.apache.org/repos/asf/hive/blob/0930154f/ql/src/java/org/apache/hadoop/hive/ql/plan/BasicStatsWork.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/BasicStatsWork.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/BasicStatsWork.java
index 55d05a1..40def60 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/BasicStatsWork.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/BasicStatsWork.java
@@ -33,6 +33,7 @@ import org.apache.hadoop.hive.ql.plan.LoadTableDesc.LoadFileType;
 public class BasicStatsWork implements Serializable {
   private static final long serialVersionUID = 1L;
 
+  private boolean isExplicitAnalyze;
   private TableSpec tableSpecs;         // source table spec -- for TableScanOperator
   private LoadTableDesc loadTableDesc;  // same as MoveWork.loadTableDesc -- for FileSinkOperator
   private LoadFileDesc loadFileDesc;    // same as MoveWork.loadFileDesc -- for FileSinkOperator
@@ -164,7 +165,7 @@ public class BasicStatsWork implements Serializable {
 
   public boolean isExplicitAnalyze() {
     // ANALYZE TABLE
-    return (getTableSpecs() != null);
+    return isExplicitAnalyze;
   }
   public boolean isTargetRewritten() {
     // ANALYZE TABLE
@@ -202,4 +203,8 @@ public class BasicStatsWork implements Serializable {
     }
   }
 
+  public void setIsExplicitAnalyze(boolean b) {
+    this.isExplicitAnalyze = b;
+  }
+
 }


[2/4] hive git commit: HIVE-19324 : improve YARN queue check error message in Tez pool (Sergey Shelukhin, reviewed by Ashutosh Chauhan)

Posted by se...@apache.org.
HIVE-19324 : improve YARN queue check error message in Tez pool (Sergey Shelukhin, reviewed by Ashutosh Chauhan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/596c8112
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/596c8112
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/596c8112

Branch: refs/heads/branch-3
Commit: 596c811253b4a1818fb11b6edafcf1f7e81b8949
Parents: 7cbd648
Author: sergey <se...@apache.org>
Authored: Fri Apr 27 18:55:48 2018 -0700
Committer: sergey <se...@apache.org>
Committed: Fri Apr 27 18:56:04 2018 -0700

----------------------------------------------------------------------
 .../hadoop/hive/ql/exec/tez/YarnQueueHelper.java  | 18 ++++++++++++------
 1 file changed, 12 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/596c8112/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/YarnQueueHelper.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/YarnQueueHelper.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/YarnQueueHelper.java
index c9c859a..b762e68 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/YarnQueueHelper.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/YarnQueueHelper.java
@@ -52,25 +52,31 @@ public class YarnQueueHelper {
     lastKnownGoodUrl = 0;
   }
 
-  public void checkQueueAccess(String queueName, String userName) throws IOException {
+  public void checkQueueAccess(
+      String queueName, String userName) throws IOException, HiveException {
     String urlSuffix = String.format(PERMISSION_PATH, queueName, userName);
     // TODO: if we ever use this endpoint for anything else, refactor cycling into a separate class.
     int urlIx = lastKnownGoodUrl, lastUrlIx = ((urlIx == 0) ? rmNodes.length : urlIx) - 1;
     Exception firstError = null;
     while (true) {
       String node = rmNodes[urlIx];
+      String error = null;
+      boolean isCallOk = false;
       try {
-        String error = checkQueueAccessFromSingleRm("http://" + node + urlSuffix);
-        lastKnownGoodUrl = urlIx;
-        if (error == null) return; // null error message here means the user has access.
-        throw new HiveException(error.isEmpty()
-            ? (userName + " has no access to " + queueName) : error);
+        error = checkQueueAccessFromSingleRm("http://" + node + urlSuffix);
+        isCallOk = true;
       } catch (Exception ex) {
         LOG.warn("Cannot check queue access against RM " + node, ex);
         if (firstError == null) {
           firstError = ex;
         }
       }
+      if (isCallOk) {
+        lastKnownGoodUrl = urlIx;
+        if (error == null) return; // null error message here means the user has access.
+        throw new HiveException(error.isEmpty()
+            ? (userName + " has no access to " + queueName) : error);
+      }
       if (urlIx == lastUrlIx) {
         throw new IOException("Cannot access any RM service; first error", firstError);
       }