You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by vi...@apache.org on 2021/10/19 19:50:43 UTC

[hive] branch branch-2.3 updated: HIVE-25616: Hive-24741 backport to 2.3 (Neelesh Srinivas Salian reviewed by Vihang Karajgaonkar)

This is an automated email from the ASF dual-hosted git repository.

vihangk1 pushed a commit to branch branch-2.3
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/branch-2.3 by this push:
     new 8e7f23f  HIVE-25616: Hive-24741 backport to 2.3 (Neelesh Srinivas Salian reviewed by Vihang Karajgaonkar)
8e7f23f is described below

commit 8e7f23f34b2ce7328c9d571a13c336f0c8cdecb6
Author: Neelesh Srinivas Salian <ns...@users.noreply.github.com>
AuthorDate: Tue Oct 19 12:50:31 2021 -0700

    HIVE-25616: Hive-24741 backport to 2.3 (Neelesh Srinivas Salian reviewed by Vihang Karajgaonkar)
    
    Closes (#2730)
---
 .../apache/hadoop/hive/metastore/ObjectStore.java  | 62 +++++++++++++++++-----
 .../apache/hadoop/hive/ql/metadata/TestHive.java   | 56 +++++++++++++++++++
 2 files changed, 104 insertions(+), 14 deletions(-)

diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
index f7248b1..bb69d07 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
@@ -169,6 +169,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Joiner;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 
@@ -2565,34 +2566,67 @@ public class ObjectStore implements RawStore, Configurable {
     return (Collection) query.execute(dbName, tableName, partNameMatcher);
   }
 
+  /**
+   * If partVals all the values are empty strings, it means we are returning
+   * all the partitions and hence we can attempt to use a directSQL equivalent API which
+   * is considerably faster.
+   * @param partVals The partitions values used to filter out the partitions.
+   * @return true only when partVals is non-empty and contains only empty strings,
+   * otherwise false. If user or groups is valid then returns false since the directSQL
+   * doesn't support partition privileges.
+   */
+  private boolean canTryDirectSQL(List<String> partVals) {
+    if (partVals.isEmpty()) {
+      return false;
+    }
+    for (String val : partVals) {
+      if (val != null && !val.isEmpty()) {
+        return false;
+      }
+    }
+    return true;
+  }
+
   @Override
   public List<Partition> listPartitionsPsWithAuth(String db_name, String tbl_name,
       List<String> part_vals, short max_parts, String userName, List<String> groupNames)
       throws MetaException, InvalidObjectException, NoSuchObjectException {
-    List<Partition> partitions = new ArrayList<Partition>();
+    List<Partition> partitions = new ArrayList<>();
     boolean success = false;
     QueryWrapper queryWrapper = new QueryWrapper();
-
     try {
       openTransaction();
-      LOG.debug("executing listPartitionNamesPsWithAuth");
-      Collection parts = getPartitionPsQueryResults(db_name, tbl_name,
-          part_vals, max_parts, null, queryWrapper);
+
       MTable mtbl = getMTable(db_name, tbl_name);
+      if (mtbl == null) {
+        throw new NoSuchObjectException(db_name +  "." + tbl_name + " table not found");
+      }
+      boolean getauth = null != userName && null != groupNames &&
+          "TRUE".equalsIgnoreCase(mtbl.getParameters().get("PARTITION_LEVEL_PRIVILEGE"));
+      if(!getauth && canTryDirectSQL(part_vals)) {
+        LOG.debug(
+            "Redirecting to directSQL enabled API: db: {} tbl: {} partVals: {}",
+            db_name, tbl_name, Joiner.on(',').join(part_vals));
+        return getPartitions(db_name, tbl_name, -1);
+      }
+      LOG.debug("executing listPartitionNamesPsWithAuth");
+      Collection parts = getPartitionPsQueryResults(db_name, tbl_name, part_vals,
+          max_parts, null, queryWrapper);
       for (Object o : parts) {
         Partition part = convertToPart((MPartition) o);
-        //set auth privileges
-        if (null != userName && null != groupNames &&
-            "TRUE".equalsIgnoreCase(mtbl.getParameters().get("PARTITION_LEVEL_PRIVILEGE"))) {
-          String partName = Warehouse.makePartName(this.convertToFieldSchemas(mtbl
-              .getPartitionKeys()), part.getValues());
-          PrincipalPrivilegeSet partAuth = getPartitionPrivilegeSet(db_name,
-              tbl_name, partName, userName, groupNames);
-          part.setPrivileges(partAuth);
-        }
+        // set auth privileges
+        String partName = Warehouse.makePartName(this.convertToFieldSchemas(mtbl
+            .getPartitionKeys()), part.getValues());
+        PrincipalPrivilegeSet partAuth = getPartitionPrivilegeSet(db_name,
+            tbl_name, partName, userName, groupNames);
+        part.setPrivileges(partAuth);
         partitions.add(part);
       }
       success = commitTransaction();
+    } catch (InvalidObjectException | NoSuchObjectException | MetaException e) {
+      throw e;
+    } catch (Exception e) {
+      throw new MetaException(e.getMessage());
     } finally {
       rollbackAndCleanup(success, queryWrapper);
     }
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java
index 91eb033..f5f711d 100755
--- a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java
@@ -679,6 +679,62 @@ public class TestHive extends TestCase {
         System.err.println(StringUtils.stringifyException(e));
         assertTrue("Unable to create parition for table: " + tableName, false);
       }
+      part_spec.clear();
+      part_spec.put("ds", "2008-04-08");
+      part_spec.put("hr", "13");
+      try {
+        hm.createPartition(tbl, part_spec);
+      } catch (HiveException e) {
+        System.err.println(StringUtils.stringifyException(e));
+        assertTrue("Unable to create parition for table: " + tableName, false);
+      }
+      part_spec.clear();
+      part_spec.put("ds", "2008-04-08");
+      part_spec.put("hr", "14");
+      try {
+        hm.createPartition(tbl, part_spec);
+      } catch (HiveException e) {
+        System.err.println(StringUtils.stringifyException(e));
+        assertTrue("Unable to create parition for table: " + tableName, false);
+      }
+      part_spec.clear();
+      part_spec.put("ds", "2008-04-07");
+      part_spec.put("hr", "12");
+      try {
+        hm.createPartition(tbl, part_spec);
+      } catch (HiveException e) {
+        System.err.println(StringUtils.stringifyException(e));
+        assertTrue("Unable to create parition for table: " + tableName, false);
+      }
+      part_spec.clear();
+      part_spec.put("ds", "2008-04-07");
+      part_spec.put("hr", "13");
+      try {
+        hm.createPartition(tbl, part_spec);
+      } catch (HiveException e) {
+        System.err.println(StringUtils.stringifyException(e));
+        assertTrue("Unable to create parition for table: " + tableName, false);
+      }
+
+      Map<String, String> partialSpec = new HashMap<>();
+      partialSpec.put("ds", "2008-04-07");
+      assertEquals(2, hm.getPartitions(tbl, partialSpec).size());
+
+      partialSpec = new HashMap<>();
+      partialSpec.put("ds", "2008-04-08");
+      assertEquals(3, hm.getPartitions(tbl, partialSpec).size());
+
+      partialSpec = new HashMap<>();
+      partialSpec.put("hr", "13");
+      assertEquals(2, hm.getPartitions(tbl, partialSpec).size());
+
+      partialSpec = new HashMap<>();
+      assertEquals(5, hm.getPartitions(tbl, partialSpec).size());
+
+      partialSpec = new HashMap<>();
+      partialSpec.put("hr", "14");
+      assertEquals(1, hm.getPartitions(tbl, partialSpec).size());
+
       hm.dropTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName);
     } catch (Throwable e) {
       System.err.println(StringUtils.stringifyException(e));