You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2013/08/30 14:28:23 UTC

svn commit: r1518953 - in /hive/trunk: metastore/src/java/org/apache/hadoop/hive/metastore/ metastore/src/test/org/apache/hadoop/hive/metastore/ ql/src/test/org/apache/hadoop/hive/ql/

Author: hashutosh
Date: Fri Aug 30 12:28:22 2013
New Revision: 1518953

URL: http://svn.apache.org/r1518953
Log:
HIVE-5029 : direct SQL perf optimization cannot be tested well (Sergey Shelukhin via Ashutosh Chauhan)

Added:
    hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/VerifyingObjectStore.java
Modified:
    hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
    hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/RetryingRawStore.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java

Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java?rev=1518953&r1=1518952&r2=1518953&view=diff
==============================================================================
--- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java (original)
+++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java Fri Aug 30 12:28:22 2013
@@ -1659,9 +1659,17 @@ public class ObjectStore implements RawS
   @Override
   public List<Partition> getPartitionsByNames(String dbName, String tblName,
       List<String> partNames) throws MetaException, NoSuchObjectException {
+    return getPartitionsByNamesInternal(dbName, tblName, partNames, true, true);
+  }
+
+  protected List<Partition> getPartitionsByNamesInternal(String dbName, String tblName,
+      List<String> partNames, boolean allowSql, boolean allowJdo)
+          throws MetaException, NoSuchObjectException {
+    assert allowSql || allowJdo;
     boolean doTrace = LOG.isDebugEnabled();
     List<Partition> results = null;
-    boolean doUseDirectSql = HiveConf.getBoolVar(getConf(), ConfVars.METASTORE_TRY_DIRECT_SQL);
+    boolean doUseDirectSql = allowSql
+        && HiveConf.getBoolVar(getConf(), ConfVars.METASTORE_TRY_DIRECT_SQL);
 
     boolean success = false;
     try {
@@ -1671,7 +1679,13 @@ public class ObjectStore implements RawS
         try {
           results = directSql.getPartitionsViaSqlFilter(dbName, tblName, partNames);
         } catch (Exception ex) {
-          LOG.error("Direct SQL failed, falling back to ORM", ex);
+          LOG.error("Direct SQL failed" + (allowJdo ? ", falling back to ORM" : ""), ex);
+          if (!allowJdo) {
+            if (ex instanceof MetaException) {
+              throw (MetaException)ex;
+            }
+            throw new MetaException(ex.getMessage());
+          }
           doUseDirectSql = false;
           rollbackTransaction();
           start = doTrace ? System.nanoTime() : 0;
@@ -1734,9 +1748,16 @@ public class ObjectStore implements RawS
   @Override
   public List<Partition> getPartitionsByFilter(String dbName, String tblName,
       String filter, short maxParts) throws MetaException, NoSuchObjectException {
+    return getPartitionsByFilterInternal(dbName, tblName, filter, maxParts, true, true);
+  }
+
+  protected List<Partition> getPartitionsByFilterInternal(String dbName, String tblName,
+      String filter, short maxParts, boolean allowSql, boolean allowJdo)
+      throws MetaException, NoSuchObjectException {
+    assert allowSql || allowJdo;
     boolean doTrace = LOG.isDebugEnabled();
     // There's no portable SQL limit. It doesn't make a lot of sense w/o offset anyway.
-    boolean doUseDirectSql = (maxParts < 0)
+    boolean doUseDirectSql = allowSql && (maxParts < 0)
         && HiveConf.getBoolVar(getConf(), ConfVars.METASTORE_TRY_DIRECT_SQL);
     dbName = dbName.toLowerCase();
     tblName = tblName.toLowerCase();
@@ -1757,7 +1778,13 @@ public class ObjectStore implements RawS
           Table table = convertToTable(mtable);
           results = directSql.getPartitionsViaSqlFilter(table, parser);
         } catch (Exception ex) {
-          LOG.error("Direct SQL failed, falling back to ORM", ex);
+          LOG.error("Direct SQL failed" + (allowJdo ? ", falling back to ORM" : ""), ex);
+          if (!allowJdo) {
+            if (ex instanceof MetaException) {
+              throw (MetaException)ex;
+            }
+            throw new MetaException(ex.getMessage());
+          }
           doUseDirectSql = false;
           rollbackTransaction();
           start = doTrace ? System.nanoTime() : 0;

Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/RetryingRawStore.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/RetryingRawStore.java?rev=1518953&r1=1518952&r2=1518953&view=diff
==============================================================================
--- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/RetryingRawStore.java (original)
+++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/RetryingRawStore.java Fri Aug 30 12:28:22 2013
@@ -23,7 +23,9 @@ import java.lang.reflect.InvocationTarge
 import java.lang.reflect.Method;
 import java.lang.reflect.Proxy;
 import java.lang.reflect.UndeclaredThrowableException;
+import java.util.List;
 
+import org.apache.commons.lang.ClassUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -70,8 +72,19 @@ public class RetryingRawStore implements
 
     RetryingRawStore handler = new RetryingRawStore(hiveConf, conf, baseClass, id);
 
-    return (RawStore) Proxy.newProxyInstance(RetryingRawStore.class.getClassLoader()
-        , baseClass.getInterfaces(), handler);
+    // Look for interfaces on both the class and all base classes.
+    return (RawStore) Proxy.newProxyInstance(RetryingRawStore.class.getClassLoader(),
+        getAllInterfaces(baseClass), handler);
+  }
+
+  private static Class<?>[] getAllInterfaces(Class<?> baseClass) {
+    List interfaces = ClassUtils.getAllInterfaces(baseClass);
+    Class<?>[] result = new Class<?>[interfaces.size()];
+    int i = 0;
+    for (Object o : interfaces) {
+      result[i++] = (Class<?>)o;
+    }
+    return result;
   }
 
   private void init() throws MetaException {

Added: hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/VerifyingObjectStore.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/VerifyingObjectStore.java?rev=1518953&view=auto
==============================================================================
--- hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/VerifyingObjectStore.java (added)
+++ hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/VerifyingObjectStore.java Fri Aug 30 12:28:22 2013
@@ -0,0 +1,142 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.metastore;
+
+import static org.apache.commons.lang.StringUtils.repeat;
+
+import java.lang.reflect.AccessibleObject;
+import java.lang.reflect.Array;
+import java.lang.reflect.Field;
+import java.lang.reflect.Modifier;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.commons.lang.ClassUtils;
+import org.apache.commons.lang.builder.EqualsBuilder;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
+import org.apache.hadoop.hive.metastore.api.Partition;
+
+class VerifyingObjectStore extends ObjectStore {
+  private static final Log LOG = LogFactory.getLog(VerifyingObjectStore.class);
+
+  public VerifyingObjectStore() {
+    super();
+    LOG.warn(getClass().getSimpleName() + " is being used - test run");
+  }
+
+  @Override
+  public List<Partition> getPartitionsByFilter(String dbName, String tblName, String filter,
+      short maxParts) throws MetaException, NoSuchObjectException {
+    List<Partition> sqlResults = getPartitionsByFilterInternal(
+        dbName, tblName, filter, maxParts, true, false);
+    List<Partition> ormResults = getPartitionsByFilterInternal(
+        dbName, tblName, filter, maxParts, false, true);
+    compareParts(sqlResults, ormResults);
+    return sqlResults;
+  }
+
+  @Override
+  public List<Partition> getPartitionsByNames(String dbName, String tblName,
+      List<String> partNames) throws MetaException, NoSuchObjectException {
+    List<Partition> sqlResults = getPartitionsByNamesInternal(
+        dbName, tblName, partNames, true, false);
+    List<Partition> ormResults = getPartitionsByNamesInternal(
+        dbName, tblName, partNames, false, true);
+    compareParts(sqlResults, ormResults);
+    return sqlResults;
+  }
+
+  private void compareParts(List<Partition> sqlResults, List<Partition> ormResults)
+      throws MetaException {
+    if (sqlResults.size() != ormResults.size()) {
+      String msg = "Lists are not the same size: SQL " + sqlResults.size()
+          + ", ORM " + ormResults.size();
+      LOG.error(msg);
+      throw new MetaException(msg);
+    }
+
+    StringBuilder errorStr = new StringBuilder();
+    for (int partIx = 0; partIx < sqlResults.size(); ++partIx) {
+      Partition p1 = sqlResults.get(partIx), p2 = ormResults.get(partIx);
+      if (EqualsBuilder.reflectionEquals(p1, p2)) continue;
+      errorStr.append("Results are different at list index " + partIx + ": \n");
+      try {
+        dumpObject(errorStr, "SQL", p1, Partition.class, 0);
+        errorStr.append("\n");
+        dumpObject(errorStr, "ORM", p2, Partition.class, 0);
+        errorStr.append("\n\n");
+      } catch (Throwable t) {
+        String msg = "Error getting the diff at list index " + partIx;
+        errorStr.append("\n\n" + msg);
+        LOG.error(msg, t);
+        break;
+      }
+    }
+    if (errorStr.length() > 0) {
+      LOG.error("Different results: \n" + errorStr.toString());
+      throw new MetaException("Different results from SQL and ORM, see log for details");
+    }
+  }
+
+  private void dumpObject(StringBuilder errorStr, String name, Object p, Class<?> c, int level)
+      throws IllegalAccessException {
+    String offsetStr = repeat("  ", level);
+    if (p == null || c == String.class || c.isPrimitive()
+        || ClassUtils.wrapperToPrimitive(c) != null) {
+      errorStr.append(offsetStr).append(name + ": [" + p + "]\n");
+    } else if (ClassUtils.isAssignable(c, Iterable.class)) {
+      errorStr.append(offsetStr).append(name + " is an iterable\n");
+      Iterator<?> i1 = ((Iterable<?>)p).iterator();
+      int i = 0;
+      while (i1.hasNext()) {
+        Object o1 = i1.next();
+        Class<?> t = o1 == null ? Object.class : o1.getClass(); // ...
+        dumpObject(errorStr, name + "[" + (i++) + "]", o1, t, level + 1);
+      }
+    } else if (c.isArray()) {
+      int len = Array.getLength(p);
+      Class<?> t = c.getComponentType();
+      errorStr.append(offsetStr).append(name + " is an array\n");
+      for (int i = 0; i < len; ++i) {
+        dumpObject(errorStr, name + "[" + i + "]", Array.get(p, i), t, level + 1);
+      }
+    } else if (ClassUtils.isAssignable(c, Map.class)) {
+      Map<?,?> c1 = (Map<?,?>)p;
+      errorStr.append(offsetStr).append(name + " is a map\n");
+      dumpObject(errorStr, name + ".keys", c1.keySet(), Set.class, level + 1);
+      dumpObject(errorStr, name + ".vals", c1.values(), Collection.class, level + 1);
+    } else {
+      errorStr.append(offsetStr).append(name + " is of type " + c.getCanonicalName() + "\n");
+      // TODO: this doesn't include superclass.
+      Field[] fields = c.getDeclaredFields();
+      AccessibleObject.setAccessible(fields, true);
+      for (int i = 0; i < fields.length; i++) {
+        Field f = fields[i];
+        if (f.getName().indexOf('$') != -1 || Modifier.isStatic(f.getModifiers())) continue;
+        dumpObject(errorStr, name + "." + f.getName(), f.get(p), f.getType(), level + 1);
+      }
+    }
+  }
+}

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java?rev=1518953&r1=1518952&r2=1518953&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java Fri Aug 30 12:28:22 2013
@@ -217,6 +217,10 @@ public class QTestUtil {
       convertPathsFromWindowsToHdfs();
     }
 
+    // Plug verifying metastore in for testing.
+    conf.setVar(HiveConf.ConfVars.METASTORE_RAW_STORE_IMPL,
+        "org.apache.hadoop.hive.metastore.VerifyingObjectStore");
+
     if (miniMr) {
       assert dfs != null;
       assert mr != null;