You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ga...@apache.org on 2017/12/07 17:53:48 UTC

[06/50] [abbrv] hive git commit: HIVE-17980 Move HiveMetaStoreClient plus a few remaining classes. This closes #272 (Alan Gates, reviewed by Daniel Dai)

http://git-wip-us.apache.org/repos/asf/hive/blob/d79c4595/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreConnectionUrlHook.java
----------------------------------------------------------------------
diff --git a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreConnectionUrlHook.java b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreConnectionUrlHook.java
index 7c54354..19279a5 100644
--- a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreConnectionUrlHook.java
+++ b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreConnectionUrlHook.java
@@ -21,7 +21,6 @@ package org.apache.hadoop.hive.metastore;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
 import org.apache.hadoop.hive.metastore.conf.MetastoreConf.ConfVars;
-import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
 import org.junit.Test;
 
 /**
@@ -37,7 +36,7 @@ public class TestMetaStoreConnectionUrlHook {
     MetastoreConf.setVar(conf, ConfVars.CONNECTURLHOOK, DummyJdoConnectionUrlHook.class.getName());
     MetastoreConf.setVar(conf, ConfVars.CONNECTURLKEY, DummyJdoConnectionUrlHook.initialUrl);
     MetastoreConf.setVar(conf, ConfVars.RAW_STORE_IMPL, DummyRawStoreForJdoConnection.class.getName());
-    MetaStoreUtils.setConfForStandloneMode(conf);
+    MetaStoreTestUtils.setConfForStandloneMode(conf);
 
     // Instantiating the HMSHandler with hive.metastore.checkForDefaultDb will cause it to
     // initialize an instance of the DummyRawStoreForJdoConnection

http://git-wip-us.apache.org/repos/asf/hive/blob/d79c4595/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestOldSchema.java
----------------------------------------------------------------------
diff --git a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestOldSchema.java b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestOldSchema.java
new file mode 100644
index 0000000..bf8556d
--- /dev/null
+++ b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestOldSchema.java
@@ -0,0 +1,218 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.metastore;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.common.ndv.hll.HyperLogLog;
+import org.apache.hadoop.hive.metastore.api.AggrStats;
+import org.apache.hadoop.hive.metastore.api.ColumnStatistics;
+import org.apache.hadoop.hive.metastore.api.ColumnStatisticsData;
+import org.apache.hadoop.hive.metastore.api.ColumnStatisticsDesc;
+import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.FileMetadataExprType;
+import org.apache.hadoop.hive.metastore.api.InvalidInputException;
+import org.apache.hadoop.hive.metastore.api.InvalidObjectException;
+import org.apache.hadoop.hive.metastore.api.LongColumnStatsData;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
+import org.apache.hadoop.hive.metastore.api.Partition;
+import org.apache.hadoop.hive.metastore.api.SerDeInfo;
+import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
+import org.apache.hadoop.hive.ql.io.sarg.SearchArgument;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class TestOldSchema {
+  private ObjectStore store = null;
+
+  private static final Logger LOG = LoggerFactory.getLogger(TestOldSchema.class.getName());
+
+  public static class MockPartitionExpressionProxy implements PartitionExpressionProxy {
+    @Override
+    public String convertExprToFilter(byte[] expr) throws MetaException {
+      return null;
+    }
+
+    @Override
+    public boolean filterPartitionsByExpr(List<FieldSchema> partColumns, byte[] expr,
+                                          String defaultPartitionName,
+                                          List<String> partitionNames) throws MetaException {
+      return false;
+    }
+
+    @Override
+    public FileMetadataExprType getMetadataType(String inputFormat) {
+      return null;
+    }
+
+    @Override
+    public SearchArgument createSarg(byte[] expr) {
+      return null;
+    }
+
+    @Override
+    public FileFormatProxy getFileFormatProxy(FileMetadataExprType type) {
+      return null;
+    }
+  }
+
+  private byte bitVectors[][] = new byte[2][];
+
+  @Before
+  public void setUp() throws Exception {
+    Configuration conf = MetastoreConf.newMetastoreConf();
+    MetastoreConf.setClass(conf, MetastoreConf.ConfVars.EXPRESSION_PROXY_CLASS,
+        MockPartitionExpressionProxy.class, PartitionExpressionProxy.class);
+    MetastoreConf.setBoolVar(conf, MetastoreConf.ConfVars.STATS_FETCH_BITVECTOR, false);
+
+    store = new ObjectStore();
+    store.setConf(conf);
+    dropAllStoreObjects(store);
+
+    HyperLogLog hll = HyperLogLog.builder().build();
+    hll.addLong(1);
+    bitVectors[1] = hll.serialize();
+    hll = HyperLogLog.builder().build();
+    hll.addLong(2);
+    hll.addLong(3);
+    hll.addLong(3);
+    hll.addLong(4);
+    bitVectors[0] = hll.serialize();
+  }
+
+  @After
+  public void tearDown() {
+  }
+
+  /**
+   * Tests partition operations
+   */
+  @Test
+  public void testPartitionOps() throws Exception {
+    String dbName = "default";
+    String tableName = "snp";
+    Database db1 = new Database(dbName, "description", "locationurl", null);
+    store.createDatabase(db1);
+    long now = System.currentTimeMillis();
+    List<FieldSchema> cols = new ArrayList<>();
+    cols.add(new FieldSchema("col1", "long", "nocomment"));
+    SerDeInfo serde = new SerDeInfo("serde", "seriallib", null);
+    StorageDescriptor sd = new StorageDescriptor(cols, "file:/tmp", "input", "output", false, 0,
+        serde, null, null, Collections.emptyMap());
+    List<FieldSchema> partCols = new ArrayList<>();
+    partCols.add(new FieldSchema("ds", "string", ""));
+    Table table = new Table(tableName, dbName, "me", (int) now, (int) now, 0, sd, partCols,
+        Collections.emptyMap(), null, null, null);
+    store.createTable(table);
+
+    Deadline.startTimer("getPartition");
+    for (int i = 0; i < 10; i++) {
+      List<String> partVal = new ArrayList<>();
+      partVal.add(String.valueOf(i));
+      StorageDescriptor psd = new StorageDescriptor(sd);
+      psd.setLocation("file:/tmp/default/hit/ds=" + partVal);
+      Partition part = new Partition(partVal, dbName, tableName, (int) now, (int) now, psd,
+          Collections.emptyMap());
+      store.addPartition(part);
+      ColumnStatistics cs = new ColumnStatistics();
+      ColumnStatisticsDesc desc = new ColumnStatisticsDesc(false, dbName, tableName);
+      desc.setLastAnalyzed(now);
+      desc.setPartName("ds=" + String.valueOf(i));
+      cs.setStatsDesc(desc);
+      ColumnStatisticsObj obj = new ColumnStatisticsObj();
+      obj.setColName("col1");
+      obj.setColType("bigint");
+      ColumnStatisticsData data = new ColumnStatisticsData();
+      LongColumnStatsData dcsd = new LongColumnStatsData();
+      dcsd.setHighValue(1000 + i);
+      dcsd.setLowValue(-1000 - i);
+      dcsd.setNumNulls(i);
+      dcsd.setNumDVs(10 * i + 1);
+      dcsd.setBitVectors(bitVectors[0]);
+      data.setLongStats(dcsd);
+      obj.setStatsData(data);
+      cs.addToStatsObj(obj);
+      store.updatePartitionColumnStatistics(cs, partVal);
+
+    }
+
+    Checker statChecker = new Checker() {
+      @Override
+      public void checkStats(AggrStats aggrStats) throws Exception {
+        Assert.assertEquals(10, aggrStats.getPartsFound());
+        Assert.assertEquals(1, aggrStats.getColStatsSize());
+        ColumnStatisticsObj cso = aggrStats.getColStats().get(0);
+        Assert.assertEquals("col1", cso.getColName());
+        Assert.assertEquals("bigint", cso.getColType());
+        LongColumnStatsData lcsd = cso.getStatsData().getLongStats();
+        Assert.assertEquals(1009, lcsd.getHighValue(), 0.01);
+        Assert.assertEquals(-1009, lcsd.getLowValue(), 0.01);
+        Assert.assertEquals(45, lcsd.getNumNulls());
+        Assert.assertEquals(91, lcsd.getNumDVs());
+      }
+    };
+    List<String> partNames = new ArrayList<>();
+    for (int i = 0; i < 10; i++) {
+      partNames.add("ds=" + i);
+    }
+    AggrStats aggrStats = store.get_aggr_stats_for(dbName, tableName, partNames,
+        Arrays.asList("col1"));
+    statChecker.checkStats(aggrStats);
+
+  }
+
+  private interface Checker {
+    void checkStats(AggrStats aggrStats) throws Exception;
+  }
+
+  private static void dropAllStoreObjects(RawStore store) throws MetaException,
+      InvalidObjectException, InvalidInputException {
+    try {
+      Deadline.registerIfNot(100000);
+      Deadline.startTimer("getPartition");
+      List<String> dbs = store.getAllDatabases();
+      for (int i = 0; i < dbs.size(); i++) {
+        String db = dbs.get(i);
+        List<String> tbls = store.getAllTables(db);
+        for (String tbl : tbls) {
+          List<Partition> parts = store.getPartitions(db, tbl, 100);
+          for (Partition part : parts) {
+            store.dropPartition(db, tbl, part.getValues());
+          }
+          store.dropTable(db, tbl);
+        }
+        store.dropDatabase(db);
+      }
+    } catch (NoSuchObjectException e) {
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/d79c4595/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/VerifyingObjectStore.java
----------------------------------------------------------------------
diff --git a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/VerifyingObjectStore.java b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/VerifyingObjectStore.java
new file mode 100644
index 0000000..150b6ca
--- /dev/null
+++ b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/VerifyingObjectStore.java
@@ -0,0 +1,217 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.metastore;
+
+import static org.apache.commons.lang.StringUtils.repeat;
+
+import java.lang.reflect.AccessibleObject;
+import java.lang.reflect.Array;
+import java.lang.reflect.Field;
+import java.lang.reflect.Modifier;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.commons.lang.ClassUtils;
+import org.apache.commons.lang.builder.EqualsBuilder;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.apache.hadoop.hive.metastore.api.ColumnStatistics;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
+import org.apache.hadoop.hive.metastore.api.Partition;
+import org.apache.thrift.TException;
+
+public class VerifyingObjectStore extends ObjectStore {
+  private static final Logger LOG = LoggerFactory.getLogger(VerifyingObjectStore.class);
+
+  public VerifyingObjectStore() {
+    super();
+    LOG.warn(getClass().getSimpleName() + " is being used - test run");
+  }
+
+  @Override
+  public List<Partition> getPartitionsByFilter(String dbName, String tblName, String filter,
+      short maxParts) throws MetaException, NoSuchObjectException {
+    List<Partition> sqlResults = getPartitionsByFilterInternal(
+        dbName, tblName, filter, maxParts, true, false);
+    List<Partition> ormResults = getPartitionsByFilterInternal(
+        dbName, tblName, filter, maxParts, false, true);
+    verifyLists(sqlResults, ormResults, Partition.class);
+    return sqlResults;
+  }
+
+  @Override
+  public List<Partition> getPartitionsByNames(String dbName, String tblName,
+      List<String> partNames) throws MetaException, NoSuchObjectException {
+    List<Partition> sqlResults = getPartitionsByNamesInternal(
+        dbName, tblName, partNames, true, false);
+    List<Partition> ormResults = getPartitionsByNamesInternal(
+        dbName, tblName, partNames, false, true);
+    verifyLists(sqlResults, ormResults, Partition.class);
+    return sqlResults;
+  }
+
+  @Override
+  public boolean getPartitionsByExpr(String dbName, String tblName, byte[] expr,
+      String defaultPartitionName, short maxParts, List<Partition> result) throws TException {
+    List<Partition> ormParts = new LinkedList<>();
+    boolean sqlResult = getPartitionsByExprInternal(
+        dbName, tblName, expr, defaultPartitionName, maxParts, result, true, false);
+    boolean ormResult = getPartitionsByExprInternal(
+        dbName, tblName, expr, defaultPartitionName, maxParts, ormParts, false, true);
+    if (sqlResult != ormResult) {
+      String msg = "The unknown flag is different - SQL " + sqlResult + ", ORM " + ormResult;
+      LOG.error(msg);
+      throw new MetaException(msg);
+    }
+    verifyLists(result, ormParts, Partition.class);
+    return sqlResult;
+  }
+
+  @Override
+  public List<Partition> getPartitions(
+      String dbName, String tableName, int maxParts) throws MetaException, NoSuchObjectException {
+    List<Partition> sqlResults = getPartitionsInternal(dbName, tableName, maxParts, true, false);
+    List<Partition> ormResults = getPartitionsInternal(dbName, tableName, maxParts, false, true);
+    verifyLists(sqlResults, ormResults, Partition.class);
+    return sqlResults;
+  }
+
+  @Override
+  public ColumnStatistics getTableColumnStatistics(String dbName,
+      String tableName, List<String> colNames) throws MetaException, NoSuchObjectException {
+    ColumnStatistics sqlResult = getTableColumnStatisticsInternal(
+        dbName, tableName, colNames, true, false);
+    ColumnStatistics jdoResult = getTableColumnStatisticsInternal(
+        dbName, tableName, colNames, false, true);
+    verifyObjects(sqlResult, jdoResult, ColumnStatistics.class);
+    return sqlResult;
+  }
+
+  @Override
+  public List<ColumnStatistics> getPartitionColumnStatistics(String dbName,
+      String tableName, List<String> partNames, List<String> colNames)
+      throws MetaException, NoSuchObjectException {
+    List<ColumnStatistics> sqlResult = getPartitionColumnStatisticsInternal(
+        dbName, tableName, partNames, colNames, true, false);
+    List<ColumnStatistics> jdoResult = getPartitionColumnStatisticsInternal(
+        dbName, tableName, partNames, colNames,  false, true);
+    verifyLists(sqlResult, jdoResult, ColumnStatistics.class);
+    return sqlResult;
+  }
+
+  private void verifyObjects(
+      Object sqlResult, Object jdoResult, Class<?> clazz) throws MetaException {
+    if (EqualsBuilder.reflectionEquals(sqlResult, jdoResult)) return;
+    StringBuilder errorStr = new StringBuilder("Objects are different: \n");
+    try {
+      dumpObject(errorStr, "SQL", sqlResult, clazz, 0);
+      errorStr.append("\n");
+      dumpObject(errorStr, "ORM", jdoResult, clazz, 0);
+    } catch (Throwable t) {
+      errorStr.append("Error getting the diff: " + t);
+    }
+    LOG.error("Different results: \n" + errorStr.toString());
+    throw new MetaException("Different results from SQL and ORM, see log for details");
+  }
+
+  private <T> void verifyLists(Collection<T> sqlResults, Collection<T> ormResults,
+      Class<?> clazz) throws MetaException {
+    final int MAX_DIFFS = 5;
+    if (sqlResults.size() != ormResults.size()) {
+      String msg = "Lists are not the same size: SQL " + sqlResults.size()
+          + ", ORM " + ormResults.size();
+      LOG.error(msg);
+      throw new MetaException(msg);
+    }
+
+    Iterator<T> sqlIter = sqlResults.iterator(), ormIter = ormResults.iterator();
+    StringBuilder errorStr = new StringBuilder();
+    int errors = 0;
+    for (int partIx = 0; partIx < sqlResults.size(); ++partIx) {
+      assert sqlIter.hasNext() && ormIter.hasNext();
+      T p1 = sqlIter.next(), p2 = ormIter.next();
+      if (EqualsBuilder.reflectionEquals(p1, p2)) continue;
+      errorStr.append("Results are different at list index " + partIx + ": \n");
+      try {
+        dumpObject(errorStr, "SQL", p1, clazz, 0);
+        errorStr.append("\n");
+        dumpObject(errorStr, "ORM", p2, clazz, 0);
+        errorStr.append("\n\n");
+      } catch (Throwable t) {
+        String msg = "Error getting the diff at list index " + partIx;
+        errorStr.append("\n\n" + msg);
+        LOG.error(msg, t);
+        break;
+      }
+      if (++errors == MAX_DIFFS) {
+        errorStr.append("\n\nToo many diffs, giving up (lists might be sorted differently)");
+        break;
+      }
+    }
+    if (errorStr.length() > 0) {
+      LOG.error("Different results: \n" + errorStr.toString());
+      throw new MetaException("Different results from SQL and ORM, see log for details");
+    }
+  }
+
+  private static void dumpObject(StringBuilder errorStr, String name, Object p,
+      Class<?> c, int level) throws IllegalAccessException {
+    String offsetStr = repeat("  ", level);
+    if (p == null || c == String.class || c.isPrimitive()
+        || ClassUtils.wrapperToPrimitive(c) != null) {
+      errorStr.append(offsetStr).append(name + ": [" + p + "]\n");
+    } else if (ClassUtils.isAssignable(c, Iterable.class)) {
+      errorStr.append(offsetStr).append(name + " is an iterable\n");
+      Iterator<?> i1 = ((Iterable<?>)p).iterator();
+      int i = 0;
+      while (i1.hasNext()) {
+        Object o1 = i1.next();
+        Class<?> t = o1 == null ? Object.class : o1.getClass(); // ...
+        dumpObject(errorStr, name + "[" + (i++) + "]", o1, t, level + 1);
+      }
+    } else if (c.isArray()) {
+      int len = Array.getLength(p);
+      Class<?> t = c.getComponentType();
+      errorStr.append(offsetStr).append(name + " is an array\n");
+      for (int i = 0; i < len; ++i) {
+        dumpObject(errorStr, name + "[" + i + "]", Array.get(p, i), t, level + 1);
+      }
+    } else if (ClassUtils.isAssignable(c, Map.class)) {
+      Map<?,?> c1 = (Map<?,?>)p;
+      errorStr.append(offsetStr).append(name + " is a map\n");
+      dumpObject(errorStr, name + ".keys", c1.keySet(), Set.class, level + 1);
+      dumpObject(errorStr, name + ".vals", c1.values(), Collection.class, level + 1);
+    } else {
+      errorStr.append(offsetStr).append(name + " is of type " + c.getCanonicalName() + "\n");
+      // TODO: this doesn't include superclass.
+      Field[] fields = c.getDeclaredFields();
+      AccessibleObject.setAccessible(fields, true);
+      for (int i = 0; i < fields.length; i++) {
+        Field f = fields[i];
+        if (f.getName().indexOf('$') != -1 || Modifier.isStatic(f.getModifiers())) continue;
+        dumpObject(errorStr, name + "." + f.getName(), f.get(p), f.getType(), level + 1);
+      }
+    }
+  }
+}