You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by pv...@apache.org on 2018/06/21 07:31:01 UTC

hive git commit: HIVE-19782: Flash out TestObjectStore.testDirectSQLDropParitionsCleanup (Peter Vary, reviewed by Vihang Karajgaonkar)

Repository: hive
Updated Branches:
  refs/heads/master e6577a0d9 -> 8f0973b28


HIVE-19782: Flash out TestObjectStore.testDirectSQLDropParitionsCleanup (Peter Vary, reviewed by Vihang Karajgaonkar)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/8f0973b2
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/8f0973b2
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/8f0973b2

Branch: refs/heads/master
Commit: 8f0973b28eabbebf1563e1bfeac3609359d05688
Parents: e6577a0
Author: Peter Vary <pv...@cloudera.com>
Authored: Thu Jun 21 09:29:54 2018 +0200
Committer: Peter Vary <pv...@cloudera.com>
Committed: Thu Jun 21 09:29:54 2018 +0200

----------------------------------------------------------------------
 .../client/builder/HiveObjectRefBuilder.java    |   8 +-
 .../hadoop/hive/metastore/TestObjectStore.java  | 107 +++++++++++++++++--
 2 files changed, 106 insertions(+), 9 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/8f0973b2/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/client/builder/HiveObjectRefBuilder.java
----------------------------------------------------------------------
diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/client/builder/HiveObjectRefBuilder.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/client/builder/HiveObjectRefBuilder.java
index 62a227a..666b8ab 100644
--- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/client/builder/HiveObjectRefBuilder.java
+++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/client/builder/HiveObjectRefBuilder.java
@@ -57,7 +57,13 @@ public class HiveObjectRefBuilder {
   }
 
   public HiveObjectRef buildColumnReference(Table table, String columnName) {
-    return new HiveObjectRef(HiveObjectType.TABLE, table.getDbName(), table.getTableName(),
+    return new HiveObjectRef(HiveObjectType.COLUMN, table.getDbName(), table.getTableName(),
         Collections.emptyList(), columnName);
   }
+
+  public HiveObjectRef buildPartitionColumnReference(Table table, String columnName,
+                                                     List<String> partValues) {
+    return new HiveObjectRef(HiveObjectType.COLUMN, table.getDbName(), table.getTableName(),
+        partValues, columnName);
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/8f0973b2/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestObjectStore.java
----------------------------------------------------------------------
diff --git a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestObjectStore.java b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestObjectStore.java
index 7984af6..5d5bc76 100644
--- a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestObjectStore.java
+++ b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestObjectStore.java
@@ -23,11 +23,18 @@ import com.google.common.collect.ImmutableList;
 import org.apache.hadoop.hive.metastore.ObjectStore.RetryingExecutor;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.metastore.annotation.MetastoreUnitTest;
+import org.apache.hadoop.hive.metastore.api.BooleanColumnStatsData;
 import org.apache.hadoop.hive.metastore.api.Catalog;
+import org.apache.hadoop.hive.metastore.api.ColumnStatistics;
+import org.apache.hadoop.hive.metastore.api.ColumnStatisticsData;
+import org.apache.hadoop.hive.metastore.api.ColumnStatisticsDesc;
+import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
 import org.apache.hadoop.hive.metastore.api.CurrentNotificationEventId;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Function;
+import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
+import org.apache.hadoop.hive.metastore.api.HiveObjectRef;
 import org.apache.hadoop.hive.metastore.api.InvalidInputException;
 import org.apache.hadoop.hive.metastore.api.InvalidObjectException;
 import org.apache.hadoop.hive.metastore.api.MetaException;
@@ -37,6 +44,8 @@ import org.apache.hadoop.hive.metastore.api.NotificationEventRequest;
 import org.apache.hadoop.hive.metastore.api.NotificationEventResponse;
 import org.apache.hadoop.hive.metastore.api.Partition;
 import org.apache.hadoop.hive.metastore.api.PrincipalType;
+import org.apache.hadoop.hive.metastore.api.PrivilegeBag;
+import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo;
 import org.apache.hadoop.hive.metastore.api.Role;
 import org.apache.hadoop.hive.metastore.api.SQLForeignKey;
 import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey;
@@ -46,6 +55,9 @@ import org.apache.hadoop.hive.metastore.api.Table;
 import org.apache.hadoop.hive.metastore.client.builder.CatalogBuilder;
 import org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder;
 import org.apache.hadoop.hive.metastore.client.builder.PartitionBuilder;
+import org.apache.hadoop.hive.metastore.client.builder.HiveObjectPrivilegeBuilder;
+import org.apache.hadoop.hive.metastore.client.builder.HiveObjectRefBuilder;
+import org.apache.hadoop.hive.metastore.client.builder.PrivilegeGrantInfoBuilder;
 import org.apache.hadoop.hive.metastore.client.builder.TableBuilder;
 import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
 import org.apache.hadoop.hive.metastore.messaging.EventMessage;
@@ -69,6 +81,7 @@ import java.sql.DriverManager;
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.sql.Statement;
+import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
 import java.util.HashSet;
@@ -356,8 +369,8 @@ public class TestObjectStore {
    */
   @Test
   public void testDirectSQLDropPartitionsCacheInSession()
-      throws MetaException, InvalidObjectException, NoSuchObjectException {
-    createPartitionedTable();
+      throws MetaException, InvalidObjectException, NoSuchObjectException, InvalidInputException {
+    createPartitionedTable(false, false);
     // query the partitions with JDO
     Deadline.startTimer("getPartition");
     List<Partition> partitions = objectStore.getPartitionsInternal(DEFAULT_CATALOG_NAME, DB1, TABLE1,
@@ -383,11 +396,11 @@ public class TestObjectStore {
    */
   @Test
   public void testDirectSQLDropPartitionsCacheCrossSession()
-      throws MetaException, InvalidObjectException, NoSuchObjectException {
+      throws MetaException, InvalidObjectException, NoSuchObjectException, InvalidInputException {
     ObjectStore objectStore2 = new ObjectStore();
     objectStore2.setConf(conf);
 
-    createPartitionedTable();
+    createPartitionedTable(false, false);
     // query the partitions with JDO in the 1st session
     Deadline.startTimer("getPartition");
     List<Partition> partitions = objectStore.getPartitionsInternal(DEFAULT_CATALOG_NAME, DB1, TABLE1,
@@ -419,9 +432,24 @@ public class TestObjectStore {
    */
   @Test
   public void testDirectSQLDropParitionsCleanup() throws MetaException, InvalidObjectException,
-      NoSuchObjectException, SQLException {
-
-    createPartitionedTable();
+      NoSuchObjectException, SQLException, InvalidInputException {
+
+    createPartitionedTable(true, true);
+
+    // Check, that every table in the expected state before the drop
+    checkBackendTableSize("PARTITIONS", 3);
+    checkBackendTableSize("PART_PRIVS", 3);
+    checkBackendTableSize("PART_COL_PRIVS", 3);
+    checkBackendTableSize("PART_COL_STATS", 3);
+    checkBackendTableSize("PARTITION_PARAMS", 3);
+    checkBackendTableSize("PARTITION_KEY_VALS", 3);
+    checkBackendTableSize("SD_PARAMS", 3);
+    checkBackendTableSize("BUCKETING_COLS", 3);
+    checkBackendTableSize("SKEWED_COL_NAMES", 3);
+    checkBackendTableSize("SDS", 4); // Table has an SDS
+    checkBackendTableSize("SORT_COLS", 3);
+    checkBackendTableSize("SERDE_PARAMS", 3);
+    checkBackendTableSize("SERDES", 4); // Table has a serde
 
     // drop the partitions
     Deadline.startTimer("dropPartitions");
@@ -446,10 +474,13 @@ public class TestObjectStore {
 
   /**
    * Creates DB1 database, TABLE1 table with 3 partitions.
+   * @param withPrivileges Should we create privileges as well
+   * @param withStatistics Should we create statitics as well
    * @throws MetaException
    * @throws InvalidObjectException
    */
-  private void createPartitionedTable() throws MetaException, InvalidObjectException {
+  private void createPartitionedTable(boolean withPrivileges, boolean withStatistics)
+      throws MetaException, InvalidObjectException, NoSuchObjectException, InvalidInputException {
     Database db1 = new DatabaseBuilder()
                        .setName(DB1)
                        .setDescription("description")
@@ -463,16 +494,76 @@ public class TestObjectStore {
             .addCol("test_col1", "int")
             .addCol("test_col2", "int")
             .addPartCol("test_part_col", "int")
+            .addCol("test_bucket_col", "int", "test bucket col comment")
+            .addCol("test_skewed_col", "int", "test skewed col comment")
+            .addCol("test_sort_col", "int", "test sort col comment")
             .build(conf);
     objectStore.createTable(tbl1);
 
+    PrivilegeBag privilegeBag = new PrivilegeBag();
     // Create partitions for the partitioned table
     for(int i=0; i < 3; i++) {
       Partition part = new PartitionBuilder()
                            .inTable(tbl1)
                            .addValue("a" + i)
+                           .addSerdeParam("serdeParam", "serdeParamValue")
+                           .addStorageDescriptorParam("sdParam", "sdParamValue")
+                           .addBucketCol("test_bucket_col")
+                           .addSkewedColName("test_skewed_col")
+                           .addSortCol("test_sort_col", 1)
                            .build(conf);
       objectStore.addPartition(part);
+
+      if (withPrivileges) {
+        HiveObjectRef partitionReference = new HiveObjectRefBuilder().buildPartitionReference(part);
+        HiveObjectRef partitionColumnReference = new HiveObjectRefBuilder()
+            .buildPartitionColumnReference(tbl1, "test_part_col", part.getValues());
+        PrivilegeGrantInfo privilegeGrantInfo = new PrivilegeGrantInfoBuilder()
+            .setPrivilege("a")
+            .build();
+        HiveObjectPrivilege partitionPriv = new HiveObjectPrivilegeBuilder()
+                                                .setHiveObjectRef(partitionReference)
+                                                .setPrincipleName("a")
+                                                .setPrincipalType(PrincipalType.USER)
+                                                .setGrantInfo(privilegeGrantInfo)
+                                                .build();
+        privilegeBag.addToPrivileges(partitionPriv);
+        HiveObjectPrivilege partitionColPriv = new HiveObjectPrivilegeBuilder()
+                                                   .setHiveObjectRef(partitionColumnReference)
+                                                   .setPrincipleName("a")
+                                                   .setPrincipalType(PrincipalType.USER)
+                                                   .setGrantInfo(privilegeGrantInfo)
+                                                   .build();
+        privilegeBag.addToPrivileges(partitionColPriv);
+      }
+
+      if (withStatistics) {
+        ColumnStatistics stats = new ColumnStatistics();
+        ColumnStatisticsDesc desc = new ColumnStatisticsDesc();
+        desc.setCatName(tbl1.getCatName());
+        desc.setDbName(tbl1.getDbName());
+        desc.setTableName(tbl1.getTableName());
+        desc.setPartName("test_part_col=a" + i);
+        stats.setStatsDesc(desc);
+
+        List<ColumnStatisticsObj> statsObjList = new ArrayList<>(1);
+        stats.setStatsObj(statsObjList);
+
+        ColumnStatisticsData data = new ColumnStatisticsData();
+        BooleanColumnStatsData boolStats = new BooleanColumnStatsData();
+        boolStats.setNumTrues(0);
+        boolStats.setNumFalses(0);
+        boolStats.setNumNulls(0);
+        data.setBooleanStats(boolStats);
+
+        ColumnStatisticsObj partStats = new ColumnStatisticsObj("test_part_col", "int", data);
+        statsObjList.add(partStats);
+
+        objectStore.updatePartitionColumnStatistics(stats, part.getValues());
+      }
+    }
+    if (withPrivileges) {
+      objectStore.grantPrivileges(privilegeBag);
     }
   }