You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by gu...@apache.org on 2014/08/22 23:37:21 UTC

svn commit: r1619936 [2/49] - in /hive/branches/cbo: ./ accumulo-handler/ ant/src/org/apache/hadoop/hive/ant/ bin/ common/src/java/org/apache/hadoop/hive/ant/ common/src/java/org/apache/hadoop/hive/common/type/ common/src/java/org/apache/hadoop/hive/co...

Propchange: hive/branches/cbo/
------------------------------------------------------------------------------
  Merged /hive/trunk:r1617651-1619929

Modified: hive/branches/cbo/ant/src/org/apache/hadoop/hive/ant/QTestGenTask.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ant/src/org/apache/hadoop/hive/ant/QTestGenTask.java?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/ant/src/org/apache/hadoop/hive/ant/QTestGenTask.java (original)
+++ hive/branches/cbo/ant/src/org/apache/hadoop/hive/ant/QTestGenTask.java Fri Aug 22 21:36:47 2014
@@ -46,7 +46,7 @@ import org.apache.velocity.exception.Res
 import org.apache.velocity.runtime.RuntimeConstants;
 
 public class QTestGenTask extends Task {
-   private static final Splitter CSV_SPLITTER = Splitter.on(',')
+   private static final Splitter TEST_SPLITTER = Splitter.onPattern("[, ]")
        .trimResults()
        .omitEmptyStrings();
 
@@ -93,15 +93,15 @@ public class QTestGenTask extends Task {
     }
   }
 
-  public class QFileRegexFilter extends QFileFilter {
+  public class QFileRegexFilter implements FileFilter {
     Pattern filterPattern;
-    public QFileRegexFilter(String filter, Set<String> includeOnly) {
-      super(includeOnly);
+    public QFileRegexFilter(String filter) {
       filterPattern = Pattern.compile(filter);
     }
 
     public boolean accept(File filePath) {
-      if (!super.accept(filePath)) {
+      if (filePath.isDirectory() ||
+          !filePath.getName().endsWith(".q")) {
         return false;
       }
       String testName = StringUtils.chomp(filePath.getName(), ".q");
@@ -233,7 +233,7 @@ public class QTestGenTask extends Task {
 
   public void setTemplatePath(String templatePath) throws Exception {
     templatePaths.clear();
-    for (String relativePath : CSV_SPLITTER.split(templatePath)) {
+    for (String relativePath : TEST_SPLITTER.split(templatePath)) {
       templatePaths.add(project.resolveFile(relativePath).getCanonicalPath());
     }
     System.out.println("Template Path:" + getTemplatePath());
@@ -338,7 +338,7 @@ public class QTestGenTask extends Task {
 
     Set<String> includeOnly = null;
     if (includeQueryFile != null && !includeQueryFile.isEmpty()) {
-      includeOnly = Sets.<String>newHashSet(CSV_SPLITTER.split(includeQueryFile));
+      includeOnly = Sets.<String>newHashSet(TEST_SPLITTER.split(includeQueryFile));
     }
 
     List<File> qFiles;
@@ -350,6 +350,13 @@ public class QTestGenTask extends Task {
     File logDir = null;
 
     try {
+
+      System.out.println("Starting Generation of: " + className);
+      System.out.println("Include Files: " + includeQueryFile);
+      System.out.println("Excluded Files: " + excludeQueryFile);
+      System.out.println("Query Files: " + queryFile);
+      System.out.println("Query Files Regex: " + queryFileRegex);
+
       // queryDirectory should not be null
       queryDir = new File(queryDirectory);
 
@@ -357,10 +364,7 @@ public class QTestGenTask extends Task {
       Set<File> testFiles = new HashSet<File>();
       if (queryFile != null && !queryFile.equals("")) {
         // The user may have passed a list of files - comma separated
-        for (String qFile : CSV_SPLITTER.split(queryFile)) {
-          if (includeOnly != null && !includeOnly.contains(qFile)) {
-            continue;
-          }
+        for (String qFile : TEST_SPLITTER.split(queryFile)) {
           if (null != queryDir) {
             testFiles.add(new File(queryDir, qFile));
           } else {
@@ -368,9 +372,9 @@ public class QTestGenTask extends Task {
           }
         }
       } else if (queryFileRegex != null && !queryFileRegex.equals("")) {
-        for (String regex : CSV_SPLITTER.split(queryFileRegex)) {
+        for (String regex : TEST_SPLITTER.split(queryFileRegex)) {
           testFiles.addAll(Arrays.asList(queryDir.listFiles(
-              new QFileRegexFilter(regex, includeOnly))));
+              new QFileRegexFilter(regex))));
         }
       } else if (runDisabled != null && runDisabled.equals("true")) {
         testFiles.addAll(Arrays.asList(queryDir.listFiles(new DisabledQFileFilter(includeOnly))));
@@ -380,7 +384,7 @@ public class QTestGenTask extends Task {
 
       if (excludeQueryFile != null && !excludeQueryFile.equals("")) {
         // Exclude specified query files, comma separated
-        for (String qFile : CSV_SPLITTER.split(excludeQueryFile)) {
+        for (String qFile : TEST_SPLITTER.split(excludeQueryFile)) {
           if (null != queryDir) {
             testFiles.remove(new File(queryDir, qFile));
           } else {

Modified: hive/branches/cbo/bin/hive
URL: http://svn.apache.org/viewvc/hive/branches/cbo/bin/hive?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/bin/hive (original)
+++ hive/branches/cbo/bin/hive Fri Aug 22 21:36:47 2014
@@ -149,7 +149,11 @@ fi
 export HADOOP_HOME_WARN_SUPPRESS=true 
 
 # pass classpath to hadoop
-export HADOOP_CLASSPATH="${HADOOP_CLASSPATH}:${CLASSPATH}"
+if [ "$HADOOP_CLASSPATH" != "" ]; then
+  export HADOOP_CLASSPATH="${HADOOP_CLASSPATH}:${CLASSPATH}"
+else
+  export HADOOP_CLASSPATH="$CLASSPATH"
+fi
 
 # also pass hive classpath to hadoop
 if [ "$HIVE_CLASSPATH" != "" ]; then

Modified: hive/branches/cbo/common/src/java/org/apache/hadoop/hive/ant/GenHiveTemplate.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/common/src/java/org/apache/hadoop/hive/ant/GenHiveTemplate.java?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/common/src/java/org/apache/hadoop/hive/ant/GenHiveTemplate.java (original)
+++ hive/branches/cbo/common/src/java/org/apache/hadoop/hive/ant/GenHiveTemplate.java Fri Aug 22 21:36:47 2014
@@ -108,7 +108,7 @@ public class GenHiveTemplate extends Tas
         continue;
       }
       Element property = appendElement(root, "property", null);
-      appendElement(property, "key", confVars.varname);
+      appendElement(property, "name", confVars.varname);
       appendElement(property, "value", confVars.getDefaultExpr());
       appendElement(property, "description", normalize(confVars.getDescription()));
       // wish to add new line here.

Modified: hive/branches/cbo/common/src/java/org/apache/hadoop/hive/common/type/Decimal128.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/common/src/java/org/apache/hadoop/hive/common/type/Decimal128.java?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/common/src/java/org/apache/hadoop/hive/common/type/Decimal128.java (original)
+++ hive/branches/cbo/common/src/java/org/apache/hadoop/hive/common/type/Decimal128.java Fri Aug 22 21:36:47 2014
@@ -103,6 +103,13 @@ public final class Decimal128 extends Nu
   private short scale;
 
   /**
+   * This is the actual scale detected from the value passed to this Decimal128.
+   * The value is always equals or less than #scale. It is used to return the correct
+   * decimal string from {@link #getHiveDecimalString()}.
+   */
+  private short actualScale;
+
+  /**
    * -1 means negative, 0 means zero, 1 means positive.
    *
    * @serial
@@ -127,6 +134,7 @@ public final class Decimal128 extends Nu
     this.unscaledValue = new UnsignedInt128();
     this.scale = 0;
     this.signum = 0;
+    this.actualScale = 0;
   }
 
   /**
@@ -139,6 +147,7 @@ public final class Decimal128 extends Nu
     this.unscaledValue = new UnsignedInt128(o.unscaledValue);
     this.scale = o.scale;
     this.signum = o.signum;
+    this.actualScale = o.actualScale;
   }
 
   /**
@@ -178,6 +187,7 @@ public final class Decimal128 extends Nu
     checkScaleRange(scale);
     this.unscaledValue = new UnsignedInt128(unscaledVal);
     this.scale = scale;
+    this.actualScale = scale;
     if (unscaledValue.isZero()) {
       this.signum = 0;
     } else {
@@ -264,6 +274,7 @@ public final class Decimal128 extends Nu
     this.unscaledValue.update(o.unscaledValue);
     this.scale = o.scale;
     this.signum = o.signum;
+    this.actualScale = o.actualScale;
     return this;
   }
 
@@ -292,7 +303,7 @@ public final class Decimal128 extends Nu
 
   /**
    * Update the value of this object with the given {@code long} with the given
-   * scal.
+   * scale.
    *
    * @param val
    *          {@code long} value to be set to {@code Decimal128}.
@@ -314,6 +325,8 @@ public final class Decimal128 extends Nu
     if (scale != 0) {
       changeScaleDestructive(scale);
     }
+    // set actualScale to 0 because there is no fractional digits on integer values
+    this.actualScale = 0;
     return this;
   }
 
@@ -341,6 +354,11 @@ public final class Decimal128 extends Nu
     checkScaleRange(scale);
     this.scale = scale;
 
+    // Obtains the scale of the double value to keep a record of the original
+    // scale. This will be used to print the HiveDecimal string with the
+    // correct value scale.
+    this.actualScale = (short) BigDecimal.valueOf(val).scale();
+
     // Translate the double into sign, exponent and significand, according
     // to the formulae in JLS, Section 20.10.22.
     long valBits = Double.doubleToLongBits(val);
@@ -364,6 +382,10 @@ public final class Decimal128 extends Nu
       exponent++;
     }
 
+    // Calculate the real number of fractional digits from the double value
+    this.actualScale -= (exponent > 0) ? exponent : 0;
+    this.actualScale = (this.actualScale < 0) ? 0 : this.actualScale;
+
     // so far same as java.math.BigDecimal, but the scaling below is
     // specific to ANSI SQL Numeric.
 
@@ -426,6 +448,7 @@ public final class Decimal128 extends Nu
   public Decimal128 update(IntBuffer buf, int precision) {
     int scaleAndSignum = buf.get();
     this.scale = (short) (scaleAndSignum >> 16);
+    this.actualScale = this.scale;
     this.signum = (byte) (scaleAndSignum & 0xFF);
     this.unscaledValue.update(buf, precision);
     assert ((signum == 0) == unscaledValue.isZero());
@@ -442,6 +465,7 @@ public final class Decimal128 extends Nu
   public Decimal128 update128(IntBuffer buf) {
     int scaleAndSignum = buf.get();
     this.scale = (short) (scaleAndSignum >> 16);
+    this.actualScale = this.scale;
     this.signum = (byte) (scaleAndSignum & 0xFF);
     this.unscaledValue.update128(buf);
     assert ((signum == 0) == unscaledValue.isZero());
@@ -458,6 +482,7 @@ public final class Decimal128 extends Nu
   public Decimal128 update96(IntBuffer buf) {
     int scaleAndSignum = buf.get();
     this.scale = (short) (scaleAndSignum >> 16);
+    this.actualScale = this.scale;
     this.signum = (byte) (scaleAndSignum & 0xFF);
     this.unscaledValue.update96(buf);
     assert ((signum == 0) == unscaledValue.isZero());
@@ -474,6 +499,7 @@ public final class Decimal128 extends Nu
   public Decimal128 update64(IntBuffer buf) {
     int scaleAndSignum = buf.get();
     this.scale = (short) (scaleAndSignum >> 16);
+    this.actualScale = this.scale;
     this.signum = (byte) (scaleAndSignum & 0xFF);
     this.unscaledValue.update64(buf);
     assert ((signum == 0) == unscaledValue.isZero());
@@ -490,6 +516,7 @@ public final class Decimal128 extends Nu
   public Decimal128 update32(IntBuffer buf) {
     int scaleAndSignum = buf.get();
     this.scale = (short) (scaleAndSignum >> 16);
+    this.actualScale = this.scale;
     this.signum = (byte) (scaleAndSignum & 0xFF);
     this.unscaledValue.update32(buf);
     assert ((signum == 0) == unscaledValue.isZero());
@@ -510,6 +537,7 @@ public final class Decimal128 extends Nu
   public Decimal128 update(int[] array, int offset, int precision) {
     int scaleAndSignum = array[offset];
     this.scale = (short) (scaleAndSignum >> 16);
+    this.actualScale = this.scale;
     this.signum = (byte) (scaleAndSignum & 0xFF);
     this.unscaledValue.update(array, offset + 1, precision);
     return this;
@@ -527,6 +555,7 @@ public final class Decimal128 extends Nu
   public Decimal128 update128(int[] array, int offset) {
     int scaleAndSignum = array[offset];
     this.scale = (short) (scaleAndSignum >> 16);
+    this.actualScale = this.scale;
     this.signum = (byte) (scaleAndSignum & 0xFF);
     this.unscaledValue.update128(array, offset + 1);
     return this;
@@ -544,6 +573,7 @@ public final class Decimal128 extends Nu
   public Decimal128 update96(int[] array, int offset) {
     int scaleAndSignum = array[offset];
     this.scale = (short) (scaleAndSignum >> 16);
+    this.actualScale = this.scale;
     this.signum = (byte) (scaleAndSignum & 0xFF);
     this.unscaledValue.update96(array, offset + 1);
     return this;
@@ -561,6 +591,7 @@ public final class Decimal128 extends Nu
   public Decimal128 update64(int[] array, int offset) {
     int scaleAndSignum = array[offset];
     this.scale = (short) (scaleAndSignum >> 16);
+    this.actualScale = this.scale;
     this.signum = (byte) (scaleAndSignum & 0xFF);
     this.unscaledValue.update64(array, offset + 1);
     return this;
@@ -578,6 +609,7 @@ public final class Decimal128 extends Nu
   public Decimal128 update32(int[] array, int offset) {
     int scaleAndSignum = array[offset];
     this.scale = (short) (scaleAndSignum >> 16);
+    this.actualScale = this.scale;
     this.signum = (byte) (scaleAndSignum & 0xFF);
     this.unscaledValue.update32(array, offset + 1);
     return this;
@@ -600,7 +632,6 @@ public final class Decimal128 extends Nu
    * @param scale
    */
   public Decimal128 update(BigInteger bigInt, short scale) {
-    this.scale = scale;
     this.signum = (byte) bigInt.compareTo(BigInteger.ZERO);
     if (signum == 0) {
       update(0);
@@ -609,6 +640,9 @@ public final class Decimal128 extends Nu
     } else {
       unscaledValue.update(bigInt);
     }
+    this.scale = scale;
+    this.actualScale = scale;
+
     return this;
   }
 
@@ -731,6 +765,9 @@ public final class Decimal128 extends Nu
       this.unscaledValue.addDestructive(accumulated);
     }
 
+    this.actualScale = (short) (fractionalDigits - exponent);
+    this.actualScale = (this.actualScale < 0) ? 0 : this.actualScale;
+
     int scaleAdjust = scale - fractionalDigits + exponent;
     if (scaleAdjust > 0) {
       this.unscaledValue.scaleUpTenDestructive((short) scaleAdjust);
@@ -924,6 +961,7 @@ public final class Decimal128 extends Nu
       this.unscaledValue.scaleUpTenDestructive((short) -scaleDown);
     }
     this.scale = scale;
+    this.actualScale = scale;
 
     this.unscaledValue.throwIfExceedsTenToThirtyEight();
   }
@@ -1125,6 +1163,7 @@ public final class Decimal128 extends Nu
     if (this.signum == 0 || right.signum == 0) {
       this.zeroClear();
       this.scale = newScale;
+      this.actualScale = newScale;
       return;
     }
 
@@ -1154,6 +1193,7 @@ public final class Decimal128 extends Nu
     }
 
     this.scale = newScale;
+    this.actualScale = newScale;
     this.signum = (byte) (this.signum * right.signum);
     if (this.unscaledValue.isZero()) {
       this.signum = 0; // because of scaling down, this could happen
@@ -1244,6 +1284,7 @@ public final class Decimal128 extends Nu
     }
     if (this.signum == 0) {
       this.scale = newScale;
+      this.actualScale = newScale;
       remainder.update(this);
       return;
     }
@@ -1271,6 +1312,7 @@ public final class Decimal128 extends Nu
     }
 
     this.scale = newScale;
+    this.actualScale = newScale;
     this.signum = (byte) (this.unscaledValue.isZero() ? 0
         : (this.signum * right.signum));
     remainder.scale = scale;
@@ -1731,17 +1773,13 @@ public final class Decimal128 extends Nu
   private int [] tmpArray = new int[2];
 
   /**
-   * Returns the string representation of this value. It discards the trailing zeros
-   * in the fractional part to match the HiveDecimal's string representation. However,
+   * Returns the string representation of this value. It returns the original
+   * {@code actualScale} fractional part when this value was created. However,
    * don't use this string representation for the reconstruction of the object.
    *
    * @return string representation of this value
    */
   public String getHiveDecimalString() {
-    if (this.signum == 0) {
-      return "0";
-    }
-
     StringBuilder buf = new StringBuilder(50);
     if (this.signum < 0) {
       buf.append('-');
@@ -1752,32 +1790,40 @@ public final class Decimal128 extends Nu
     int trailingZeros = tmpArray[1];
     int numIntegerDigits = unscaledLength - this.scale;
     if (numIntegerDigits > 0) {
-
       // write out integer part first
       // then write out fractional part
       for (int i=0; i < numIntegerDigits; i++) {
         buf.append(unscaled[i]);
       }
 
-      if (this.scale > trailingZeros) {
+      if (this.actualScale > 0) {
         buf.append('.');
-        for (int i = numIntegerDigits; i < (unscaledLength - trailingZeros); i++) {
+
+        if (trailingZeros > this.actualScale) {
+          for (int i=0; i < (trailingZeros - this.scale); i++) {
+            buf.append("0");
+          }
+        }
+
+        for (int i = numIntegerDigits; i < (numIntegerDigits + this.actualScale); i++) {
           buf.append(unscaled[i]);
         }
       }
     } else {
-
       // no integer part
       buf.append('0');
 
-      if (this.scale > trailingZeros) {
-
+      if (this.actualScale > 0) {
         // fractional part has, starting with zeros
         buf.append('.');
-        for (int i = unscaledLength; i < this.scale; ++i) {
-          buf.append('0');
+
+        if (this.actualScale > trailingZeros) {
+          for (int i = unscaledLength; i < this.scale; ++i) {
+            buf.append('0');
+          }
         }
-        for (int i = 0; i < (unscaledLength - trailingZeros); i++) {
+
+        for (int i = 0; i < (numIntegerDigits + this.actualScale); i++) {
           buf.append(unscaled[i]);
         }
       }
@@ -1836,9 +1882,10 @@ public final class Decimal128 extends Nu
 
   @Override
   public String toString() {
-    return toFormalString() + "(Decimal128: scale=" + scale + ", signum="
-        + signum + ", BigDecimal.toString=" + toBigDecimal().toString()
-        + ", unscaledValue=[" + unscaledValue.toString() + "])";
+    return toFormalString() + "(Decimal128: scale=" + scale + ", actualScale="
+        + this.actualScale + ", signum=" + signum + ", BigDecimal.toString="
+        + toBigDecimal().toString() + ", unscaledValue=[" + unscaledValue.toString()
+        + "])";
   }
 
   /**
@@ -1956,6 +2003,7 @@ public final class Decimal128 extends Nu
    */
   public Decimal128 fastUpdateFromInternalStorage(byte[] internalStorage, short scale) {
     this.scale = scale;
+    this.actualScale = scale;
     this.signum = this.unscaledValue.fastUpdateFromInternalStorage(internalStorage);
 
     return this;

Modified: hive/branches/cbo/common/src/java/org/apache/hadoop/hive/common/type/HiveDecimal.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/common/src/java/org/apache/hadoop/hive/common/type/HiveDecimal.java?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/common/src/java/org/apache/hadoop/hive/common/type/HiveDecimal.java (original)
+++ hive/branches/cbo/common/src/java/org/apache/hadoop/hive/common/type/HiveDecimal.java Fri Aug 22 21:36:47 2014
@@ -30,7 +30,6 @@ import java.math.RoundingMode;
 public class HiveDecimal implements Comparable<HiveDecimal> {
   public static final int MAX_PRECISION = 38;
   public static final int MAX_SCALE = 38;
-
   /**
    * Default precision/scale when user doesn't specify in the column metadata, such as
    * decimal and decimal(8).
@@ -113,7 +112,7 @@ public class HiveDecimal implements Comp
 
   @Override
   public int hashCode() {
-    return bd.hashCode();
+    return trim(bd).hashCode();
   }
 
   @Override
@@ -169,7 +168,7 @@ public class HiveDecimal implements Comp
   }
 
   public HiveDecimal multiply(HiveDecimal dec) {
-    return create(bd.multiply(dec.bd), false);
+    return create(bd.multiply(dec.bd), true);
   }
 
   public BigInteger unscaledValue() {
@@ -202,7 +201,7 @@ public class HiveDecimal implements Comp
   }
 
   public HiveDecimal divide(HiveDecimal dec) {
-    return create(bd.divide(dec.bd, MAX_SCALE, RoundingMode.HALF_UP), true);
+    return create(trim(bd.divide(dec.bd, MAX_SCALE, RoundingMode.HALF_UP)), true);
   }
 
   /**
@@ -232,8 +231,6 @@ public class HiveDecimal implements Comp
       return null;
     }
 
-    bd = trim(bd);
-
     int intDigits = bd.precision() - bd.scale();
 
     if (intDigits > MAX_PRECISION) {
@@ -244,8 +241,6 @@ public class HiveDecimal implements Comp
     if (bd.scale() > maxScale ) {
       if (allowRounding) {
         bd = bd.setScale(maxScale, RoundingMode.HALF_UP);
-        // Trimming is again necessary, because rounding may introduce new trailing 0's.
-        bd = trim(bd);
       } else {
         bd = null;
       }
@@ -259,8 +254,6 @@ public class HiveDecimal implements Comp
       return null;
     }
 
-    bd = trim(bd);
-
     int maxIntDigits = maxPrecision - maxScale;
     int intDigits = bd.precision() - bd.scale();
     if (intDigits > maxIntDigits) {

Modified: hive/branches/cbo/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (original)
+++ hive/branches/cbo/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Fri Aug 22 21:36:47 2014
@@ -36,12 +36,14 @@ import java.util.regex.Pattern;
 
 import javax.security.auth.login.LoginException;
 
-import static org.apache.hadoop.hive.conf.Validator.*;
 import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.common.classification.InterfaceAudience.LimitedPrivate;
+import org.apache.hadoop.hive.conf.Validator.PatternSet;
+import org.apache.hadoop.hive.conf.Validator.RangeValidator;
+import org.apache.hadoop.hive.conf.Validator.StringSet;
 import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.security.UserGroupInformation;
@@ -68,6 +70,7 @@ public class HiveConf extends Configurat
 
 
   private static final Map<String, ConfVars> vars = new HashMap<String, ConfVars>();
+  private static final Map<String, ConfVars> metaConfs = new HashMap<String, ConfVars>();
   private final List<String> restrictList = new ArrayList<String>();
 
   private boolean isWhiteListRestrictionEnabled = false;
@@ -153,6 +156,19 @@ public class HiveConf extends Configurat
       HiveConf.ConfVars.HIVE_TXN_MAX_OPEN_BATCH,
       };
 
+  /**
+   * User configurable Metastore vars
+   */
+  public static final HiveConf.ConfVars[] metaConfVars = {
+      HiveConf.ConfVars.METASTORE_TRY_DIRECT_SQL,
+      HiveConf.ConfVars.METASTORE_TRY_DIRECT_SQL_DDL
+  };
+
+  static {
+    for (ConfVars confVar : metaConfVars) {
+      metaConfs.put(confVar.varname, confVar);
+    }
+  }
 
   /**
    * dbVars are the parameters can be set per database. If these
@@ -282,9 +298,9 @@ public class HiveConf extends Configurat
 
     LOCALMODEAUTO("hive.exec.mode.local.auto", false,
         "Let Hive determine whether to run in local mode automatically"),
-    LOCALMODEMAXBYTES("hive.exec.mode.local.auto.inputbytes.max", 134217728L, 
+    LOCALMODEMAXBYTES("hive.exec.mode.local.auto.inputbytes.max", 134217728L,
         "When hive.exec.mode.local.auto is true, input bytes should less than this for local mode."),
-    LOCALMODEMAXINPUTFILES("hive.exec.mode.local.auto.input.files.max", 4, 
+    LOCALMODEMAXINPUTFILES("hive.exec.mode.local.auto.input.files.max", 4,
         "When hive.exec.mode.local.auto is true, the number of tasks should less than this for local mode."),
 
     DROPIGNORESNONEXISTENT("hive.exec.drop.ignorenonexistent", true,
@@ -355,7 +371,7 @@ public class HiveConf extends Configurat
         "The number of times to retry a HMSHandler call if there were a connection error"),
     HMSHANDLERINTERVAL("hive.hmshandler.retry.interval", 1000,
         "The number of milliseconds between HMSHandler retry attempts"),
-    HMSHANDLERFORCERELOADCONF("hive.hmshandler.force.reload.conf", false, 
+    HMSHANDLERFORCERELOADCONF("hive.hmshandler.force.reload.conf", false,
         "Whether to force reloading of the HMSHandler configuration (including\n" +
         "the connection URL, before the next metastore query that accesses the\n" +
         "datastore. Once reloaded, this value is reset to false. Used for\n" +
@@ -368,7 +384,7 @@ public class HiveConf extends Configurat
         "Whether to enable TCP keepalive for the metastore server. Keepalive will prevent accumulation of half-open connections."),
 
     METASTORE_INT_ORIGINAL("hive.metastore.archive.intermediate.original",
-        "_INTERMEDIATE_ORIGINAL", 
+        "_INTERMEDIATE_ORIGINAL",
         "Intermediate dir suffixes used for archiving. Not important what they\n" +
         "are, as long as collisions are avoided"),
     METASTORE_INT_ARCHIVED("hive.metastore.archive.intermediate.archived",
@@ -544,7 +560,7 @@ public class HiveConf extends Configurat
     HIVE_SESSION_HISTORY_ENABLED("hive.session.history.enabled", false,
         "Whether to log Hive query, query plan, runtime statistics etc."),
 
-    HIVEQUERYSTRING("hive.query.string", "", 
+    HIVEQUERYSTRING("hive.query.string", "",
         "Query being executed (might be multiple per a session)"),
 
     HIVEQUERYID("hive.query.id", "",
@@ -786,7 +802,7 @@ public class HiveConf extends Configurat
         " for small ORC files. Note that enabling this config will not honor padding tolerance\n" +
         " config (hive.exec.orc.block.padding.tolerance)."),
     HIVEMERGEINPUTFORMATSTRIPELEVEL("hive.merge.input.format.stripe.level",
-        "org.apache.hadoop.hive.ql.io.orc.OrcFileStripeMergeInputFormat", 
+        "org.apache.hadoop.hive.ql.io.orc.OrcFileStripeMergeInputFormat",
 	"Input file format to use for ORC stripe level merging (for internal use only)"),
     HIVEMERGECURRENTJOBHASDYNAMICPARTITIONS(
         "hive.merge.current.job.has.dynamic.partitions", false, ""),
@@ -802,7 +818,7 @@ public class HiveConf extends Configurat
     HIVE_RCFILE_TOLERATE_CORRUPTIONS("hive.io.rcfile.tolerate.corruptions", false, ""),
     HIVE_RCFILE_RECORD_BUFFER_SIZE("hive.io.rcfile.record.buffer.size", 4194304, ""),   // 4M
 
-    HIVE_ORC_FILE_MEMORY_POOL("hive.exec.orc.memory.pool", 0.5f, 
+    HIVE_ORC_FILE_MEMORY_POOL("hive.exec.orc.memory.pool", 0.5f,
         "Maximum fraction of heap that can be used by ORC file writers"),
     HIVE_ORC_WRITE_FORMAT("hive.exec.orc.write.format", null,
         "Define the version of the file to write"),
@@ -1088,8 +1104,8 @@ public class HiveConf extends Configurat
         "The Java class (implementing the StatsAggregator interface) that is used by default if hive.stats.dbclass is custom type."),
     HIVE_STATS_JDBC_TIMEOUT("hive.stats.jdbc.timeout", 30,
         "Timeout value (number of seconds) used by JDBC connection and statements."),
-    HIVE_STATS_ATOMIC("hive.stats.atomic", false, 
-        "whether to update metastore stats only if all stats are available"), 
+    HIVE_STATS_ATOMIC("hive.stats.atomic", false,
+        "whether to update metastore stats only if all stats are available"),
     HIVE_STATS_RETRIES_MAX("hive.stats.retries.max", 0,
         "Maximum number of retries when stats publisher/aggregator got an exception updating intermediate database. \n" +
         "Default is no tries on failures."),
@@ -1317,6 +1333,8 @@ public class HiveConf extends Configurat
         "Enables type checking for registered Hive configurations"),
 
     SEMANTIC_ANALYZER_HOOK("hive.semantic.analyzer.hook", "", ""),
+    HIVE_TEST_AUTHORIZATION_SQLSTD_HS2_MODE(
+        "hive.test.authz.sstd.hs2.mode", false, "test hs2 mode from .q tests", true),
     HIVE_AUTHORIZATION_ENABLED("hive.security.authorization.enabled", false,
         "enable or disable the Hive client authorization"),
     HIVE_AUTHORIZATION_MANAGER("hive.security.authorization.manager",
@@ -1461,6 +1479,8 @@ public class HiveConf extends Configurat
         "Minimum number of worker threads when in HTTP mode."),
     HIVE_SERVER2_THRIFT_HTTP_MAX_WORKER_THREADS("hive.server2.thrift.http.max.worker.threads", 500,
         "Maximum number of worker threads when in HTTP mode."),
+    HIVE_SERVER2_THRIFT_HTTP_MAX_IDLE_TIME("hive.server2.thrift.http.max.idle.time", 1800000, 
+        "Maximum idle time in milliseconds for a connection on the server when in HTTP mode."),
 
     // binary transport settings
     HIVE_SERVER2_THRIFT_PORT("hive.server2.thrift.port", 10000,
@@ -1648,7 +1668,7 @@ public class HiveConf extends Configurat
         "Exceeding this will trigger a flush irrelevant of memory pressure condition."),
     HIVE_VECTORIZATION_GROUPBY_FLUSH_PERCENT("hive.vectorized.groupby.flush.percent", (float) 0.1,
         "Percent of entries in the group by aggregation hash flushed when the memory threshold is exceeded."),
-    
+
 
     HIVE_TYPE_CHECK_ON_INSERT("hive.typecheck.on.insert", true, ""),
 
@@ -2035,6 +2055,10 @@ public class HiveConf extends Configurat
     return vars.get(name);
   }
 
+  public static ConfVars getMetaConf(String name) {
+    return metaConfs.get(name);
+  }
+
   public String getVar(ConfVars var) {
     return getVar(this, var);
   }

Modified: hive/branches/cbo/common/src/java/org/apache/hadoop/hive/conf/SystemVariables.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/common/src/java/org/apache/hadoop/hive/conf/SystemVariables.java?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/common/src/java/org/apache/hadoop/hive/conf/SystemVariables.java (original)
+++ hive/branches/cbo/common/src/java/org/apache/hadoop/hive/conf/SystemVariables.java Fri Aug 22 21:36:47 2014
@@ -33,6 +33,7 @@ public class SystemVariables {
   public static final String SYSTEM_PREFIX = "system:";
   public static final String HIVECONF_PREFIX = "hiveconf:";
   public static final String HIVEVAR_PREFIX = "hivevar:";
+  public static final String METACONF_PREFIX = "metaconf:";
   public static final String SET_COLUMN_NAME = "set";
 
   protected String getSubstitute(Configuration conf, String var) {

Modified: hive/branches/cbo/common/src/test/org/apache/hadoop/hive/common/type/TestDecimal128.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/common/src/test/org/apache/hadoop/hive/common/type/TestDecimal128.java?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/common/src/test/org/apache/hadoop/hive/common/type/TestDecimal128.java (original)
+++ hive/branches/cbo/common/src/test/org/apache/hadoop/hive/common/type/TestDecimal128.java Fri Aug 22 21:36:47 2014
@@ -811,7 +811,7 @@ public class TestDecimal128 {
     assertEquals("0.00923076923", d2.getHiveDecimalString());
 
     Decimal128 d3 = new Decimal128("0.00923076000", (short) 15);
-    assertEquals("0.00923076", d3.getHiveDecimalString());
+    assertEquals("0.00923076000", d3.getHiveDecimalString());
 
     Decimal128 d4 = new Decimal128("4294967296.01", (short) 15);
     assertEquals("4294967296.01", d4.getHiveDecimalString());
@@ -849,15 +849,37 @@ public class TestDecimal128 {
     d11.update(hd6.bigDecimalValue());
     assertEquals(hd6.toString(), d11.getHiveDecimalString());
 
+    // The trailing zeros from a double value are trimmed automatically
+    // by the double data type
     Decimal128 d12 = new Decimal128(27.000, (short)3);
-    HiveDecimal hd7 = HiveDecimal.create(new BigDecimal("27.000"));
+    HiveDecimal hd7 = HiveDecimal.create(new BigDecimal("27.0"));
     assertEquals(hd7.toString(), d12.getHiveDecimalString());
-    assertEquals("27", d12.getHiveDecimalString());
+    assertEquals("27.0", d12.getHiveDecimalString());
 
     Decimal128 d13 = new Decimal128(1234123000, (short)3);
     HiveDecimal hd8 = HiveDecimal.create(new BigDecimal("1234123000"));
     assertEquals(hd8.toString(), d13.getHiveDecimalString());
     assertEquals("1234123000", d13.getHiveDecimalString());
+
+    Decimal128 d14 = new Decimal128(1.33e4, (short)10);
+    HiveDecimal hd9 = HiveDecimal.create(new BigDecimal("1.33e4"));
+    assertEquals(hd9.toString(), d14.getHiveDecimalString());
+    assertEquals("13300", d14.getHiveDecimalString());
+
+    Decimal128 d15 = new Decimal128(1.33e-4, (short)10);
+    HiveDecimal hd10 = HiveDecimal.create(new BigDecimal("1.33e-4"));
+    assertEquals(hd10.toString(), d15.getHiveDecimalString());
+    assertEquals("0.000133", d15.getHiveDecimalString());
+
+    Decimal128 d16 = new Decimal128("1.33e4", (short)10);
+    HiveDecimal hd11 = HiveDecimal.create(new BigDecimal("1.33e4"));
+    assertEquals(hd11.toString(), d16.getHiveDecimalString());
+    assertEquals("13300", d16.getHiveDecimalString());
+
+    Decimal128 d17 = new Decimal128("1.33e-4", (short)10);
+    HiveDecimal hd12 = HiveDecimal.create(new BigDecimal("1.33e-4"));
+    assertEquals(hd12.toString(), d17.getHiveDecimalString());
+    assertEquals("0.000133", d17.getHiveDecimalString());
   }
 
   @Test

Modified: hive/branches/cbo/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java (original)
+++ hive/branches/cbo/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java Fri Aug 22 21:36:47 2014
@@ -50,25 +50,35 @@ public class TestHiveDecimal {
     Assert.assertEquals("-1786135888657847525803324040144343378.1", dec.toString());
 
     dec = HiveDecimal.create("005.34000");
-    Assert.assertEquals(dec.precision(), 3);
-    Assert.assertEquals(dec.scale(), 2);
+    Assert.assertEquals(dec.precision(), 6);
+    Assert.assertEquals(dec.scale(), 5);
 
     dec = HiveDecimal.create("178613588865784752580332404014434337809799306448796128931113691624");
     Assert.assertNull(dec);
-  }
 
-  @Test
-  public void testTrailingZeroRemovalAfterEnforcement() {
-    String decStr = "8.090000000000000000000000000000000000000123456";
-    HiveDecimal dec = HiveDecimal.create(decStr);
-    Assert.assertEquals("8.09", dec.toString());
+    // Leaving trailing zeros
+    Assert.assertEquals("0.0", HiveDecimal.enforcePrecisionScale(new BigDecimal("0.0"), 2, 1).toString());
+    Assert.assertEquals("0.00", HiveDecimal.enforcePrecisionScale(new BigDecimal("0.00"), 3, 2).toString());
+    Assert.assertEquals("0.0000", HiveDecimal.enforcePrecisionScale(new BigDecimal("0.0000"), 10, 4).toString());
+    Assert.assertEquals("100.00000", HiveDecimal.enforcePrecisionScale(new BigDecimal("100.00000"), 15, 5).toString());
+    Assert.assertEquals("100.00", HiveDecimal.enforcePrecisionScale(new BigDecimal("100.00"), 15, 5).toString());
+
+    // Rounding numbers
+    Assert.assertEquals("0.01", HiveDecimal.enforcePrecisionScale(new BigDecimal("0.012"), 3, 2).toString());
+    Assert.assertEquals("0.02", HiveDecimal.enforcePrecisionScale(new BigDecimal("0.015"), 3, 2).toString());
+    Assert.assertEquals("0.01", HiveDecimal.enforcePrecisionScale(new BigDecimal("0.0145"), 3, 2).toString());
+
+    // Integers with no scale values are not modified (zeros are not null)
+    Assert.assertEquals("0", HiveDecimal.enforcePrecisionScale(new BigDecimal("0"), 1, 0).toString());
+    Assert.assertEquals("30", HiveDecimal.enforcePrecisionScale(new BigDecimal("30"), 2, 0).toString());
+    Assert.assertEquals("5", HiveDecimal.enforcePrecisionScale(new BigDecimal("5"), 3, 2).toString());
   }
 
   @Test
   public void testMultiply() {
     HiveDecimal dec1 = HiveDecimal.create("0.00001786135888657847525803");
     HiveDecimal dec2 = HiveDecimal.create("3.0000123456789");
-    Assert.assertNull(dec1.multiply(dec2));
+    Assert.assertNotNull(dec1.multiply(dec2));
 
     dec1 = HiveDecimal.create("178613588865784752580323232232323444.4");
     dec2 = HiveDecimal.create("178613588865784752580302323232.3");
@@ -77,6 +87,14 @@ public class TestHiveDecimal {
     dec1 = HiveDecimal.create("47.324");
     dec2 = HiveDecimal.create("9232.309");
     Assert.assertEquals("436909.791116", dec1.multiply(dec2).toString());
+
+    dec1 = HiveDecimal.create("3.140");
+    dec2 = HiveDecimal.create("1.00");
+    Assert.assertEquals("3.14000", dec1.multiply(dec2).toString());
+
+    dec1 = HiveDecimal.create("43.010");
+    dec2 = HiveDecimal.create("2");
+    Assert.assertEquals("86.020", dec1.multiply(dec2).toString());
   }
 
   @Test
@@ -87,6 +105,9 @@ public class TestHiveDecimal {
     HiveDecimal dec1 = HiveDecimal.create("0.000017861358882");
     dec1 = dec1.pow(3);
     Assert.assertNull(dec1);
+
+    dec1 = HiveDecimal.create("3.140");
+    Assert.assertEquals("9.859600", dec1.pow(2).toString());
   }
 
   @Test
@@ -94,6 +115,14 @@ public class TestHiveDecimal {
     HiveDecimal dec1 = HiveDecimal.create("3.14");
     HiveDecimal dec2 = HiveDecimal.create("3");
     Assert.assertNotNull(dec1.divide(dec2));
+
+    dec1 = HiveDecimal.create("15");
+    dec2 = HiveDecimal.create("5");
+    Assert.assertEquals("3", dec1.divide(dec2).toString());
+
+    dec1 = HiveDecimal.create("3.140");
+    dec2 = HiveDecimal.create("1.00");
+    Assert.assertEquals("3.14", dec1.divide(dec2).toString());
   }
 
   @Test
@@ -101,6 +130,18 @@ public class TestHiveDecimal {
     HiveDecimal dec1 = HiveDecimal.create("99999999999999999999999999999999999");
     HiveDecimal dec2 = HiveDecimal.create("1");
     Assert.assertNotNull(dec1.add(dec2));
+
+    dec1 = HiveDecimal.create("3.140");
+    dec2 = HiveDecimal.create("1.00");
+    Assert.assertEquals("4.140", dec1.add(dec2).toString());
+  }
+
+
+  @Test
+  public void testSubtract() {
+      HiveDecimal dec1 = HiveDecimal.create("3.140");
+      HiveDecimal dec2 = HiveDecimal.create("1.00");
+      Assert.assertEquals("2.140", dec1.subtract(dec2).toString());
   }
 
   @Test
@@ -112,6 +153,12 @@ public class TestHiveDecimal {
   }
 
   @Test
+  public void testHashCode() {
+      Assert.assertEquals(HiveDecimal.create("9").hashCode(), HiveDecimal.create("9.00").hashCode());
+      Assert.assertEquals(HiveDecimal.create("0").hashCode(), HiveDecimal.create("0.00").hashCode());
+  }
+
+  @Test
   public void testException() {
     HiveDecimal dec = HiveDecimal.create("3.1415.926");
     Assert.assertNull(dec);
@@ -121,7 +168,7 @@ public class TestHiveDecimal {
 
   @Test
   public void testBinaryConversion() {
-    testBinaryConversion("0.0");
+    testBinaryConversion("0.00");
     testBinaryConversion("-12.25");
     testBinaryConversion("234.79");
   }

Modified: hive/branches/cbo/contrib/src/test/results/clientnegative/serde_regex.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/contrib/src/test/results/clientnegative/serde_regex.q.out?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/contrib/src/test/results/clientnegative/serde_regex.q.out (original)
+++ hive/branches/cbo/contrib/src/test/results/clientnegative/serde_regex.q.out Fri Aug 22 21:36:47 2014
@@ -1,7 +1,9 @@
 PREHOOK: query: USE default
 PREHOOK: type: SWITCHDATABASE
+PREHOOK: Input: database:default
 POSTHOOK: query: USE default
 POSTHOOK: type: SWITCHDATABASE
+POSTHOOK: Input: database:default
 PREHOOK: query: --  This should fail because Regex SerDe supports only columns of type string
 EXPLAIN
 CREATE TABLE serde_regex(
@@ -74,4 +76,5 @@ WITH SERDEPROPERTIES (
 STORED AS TEXTFILE
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@serde_regex
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. java.lang.RuntimeException: MetaException(message:org.apache.hadoop.hive.serde2.SerDeException org.apache.hadoop.hive.contrib.serde2.RegexSerDe only accepts string columns, but column[5] named status has type int)

Modified: hive/branches/cbo/contrib/src/test/results/clientpositive/fileformat_base64.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/contrib/src/test/results/clientpositive/fileformat_base64.q.out?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/contrib/src/test/results/clientpositive/fileformat_base64.q.out (original)
+++ hive/branches/cbo/contrib/src/test/results/clientpositive/fileformat_base64.q.out Fri Aug 22 21:36:47 2014
@@ -29,6 +29,7 @@ PREHOOK: query: CREATE TABLE base64_test
   OUTPUTFORMAT 'org.apache.hadoop.hive.contrib.fileformat.base64.Base64TextOutputFormat'
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@base64_test
 POSTHOOK: query: CREATE TABLE base64_test(key INT, value STRING) STORED AS
   INPUTFORMAT 'org.apache.hadoop.hive.contrib.fileformat.base64.Base64TextInputFormat'
   OUTPUTFORMAT 'org.apache.hadoop.hive.contrib.fileformat.base64.Base64TextOutputFormat'

Modified: hive/branches/cbo/contrib/src/test/results/clientpositive/serde_regex.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/contrib/src/test/results/clientpositive/serde_regex.q.out?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/contrib/src/test/results/clientpositive/serde_regex.q.out (original)
+++ hive/branches/cbo/contrib/src/test/results/clientpositive/serde_regex.q.out Fri Aug 22 21:36:47 2014
@@ -68,6 +68,7 @@ WITH SERDEPROPERTIES (
 STORED AS TEXTFILE
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@serde_regex
 POSTHOOK: query: CREATE TABLE serde_regex(
   host STRING,
   identity STRING,

Modified: hive/branches/cbo/contrib/src/test/results/clientpositive/serde_s3.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/contrib/src/test/results/clientpositive/serde_s3.q.out?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/contrib/src/test/results/clientpositive/serde_s3.q.out (original)
+++ hive/branches/cbo/contrib/src/test/results/clientpositive/serde_s3.q.out Fri Aug 22 21:36:47 2014
@@ -7,6 +7,7 @@ ROW FORMAT SERDE 'org.apache.hadoop.hive
 STORED AS TEXTFILE
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@s3log
 POSTHOOK: query: CREATE TABLE s3log
 ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.s3.S3LogDeserializer'
 STORED AS TEXTFILE

Modified: hive/branches/cbo/contrib/src/test/results/clientpositive/serde_typedbytes.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/contrib/src/test/results/clientpositive/serde_typedbytes.q.out?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/contrib/src/test/results/clientpositive/serde_typedbytes.q.out (original)
+++ hive/branches/cbo/contrib/src/test/results/clientpositive/serde_typedbytes.q.out Fri Aug 22 21:36:47 2014
@@ -5,6 +5,7 @@ POSTHOOK: type: DROPTABLE
 PREHOOK: query: CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@dest1
 POSTHOOK: query: CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default

Modified: hive/branches/cbo/contrib/src/test/results/clientpositive/serde_typedbytes2.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/contrib/src/test/results/clientpositive/serde_typedbytes2.q.out?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/contrib/src/test/results/clientpositive/serde_typedbytes2.q.out (original)
+++ hive/branches/cbo/contrib/src/test/results/clientpositive/serde_typedbytes2.q.out Fri Aug 22 21:36:47 2014
@@ -5,6 +5,7 @@ POSTHOOK: type: DROPTABLE
 PREHOOK: query: CREATE TABLE dest1(key SMALLINT, value STRING) STORED AS TEXTFILE
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@dest1
 POSTHOOK: query: CREATE TABLE dest1(key SMALLINT, value STRING) STORED AS TEXTFILE
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default

Modified: hive/branches/cbo/contrib/src/test/results/clientpositive/serde_typedbytes3.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/contrib/src/test/results/clientpositive/serde_typedbytes3.q.out?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/contrib/src/test/results/clientpositive/serde_typedbytes3.q.out (original)
+++ hive/branches/cbo/contrib/src/test/results/clientpositive/serde_typedbytes3.q.out Fri Aug 22 21:36:47 2014
@@ -5,6 +5,7 @@ POSTHOOK: type: DROPTABLE
 PREHOOK: query: CREATE TABLE dest1(key STRING, value STRING) STORED AS TEXTFILE
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@dest1
 POSTHOOK: query: CREATE TABLE dest1(key STRING, value STRING) STORED AS TEXTFILE
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default

Modified: hive/branches/cbo/contrib/src/test/results/clientpositive/serde_typedbytes4.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/contrib/src/test/results/clientpositive/serde_typedbytes4.q.out?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/contrib/src/test/results/clientpositive/serde_typedbytes4.q.out (original)
+++ hive/branches/cbo/contrib/src/test/results/clientpositive/serde_typedbytes4.q.out Fri Aug 22 21:36:47 2014
@@ -5,6 +5,7 @@ POSTHOOK: type: DROPTABLE
 PREHOOK: query: CREATE TABLE dest1(key STRING, value STRING) STORED AS TEXTFILE
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@dest1
 POSTHOOK: query: CREATE TABLE dest1(key STRING, value STRING) STORED AS TEXTFILE
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default

Modified: hive/branches/cbo/contrib/src/test/results/clientpositive/serde_typedbytes5.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/contrib/src/test/results/clientpositive/serde_typedbytes5.q.out?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/contrib/src/test/results/clientpositive/serde_typedbytes5.q.out (original)
+++ hive/branches/cbo/contrib/src/test/results/clientpositive/serde_typedbytes5.q.out Fri Aug 22 21:36:47 2014
@@ -5,6 +5,7 @@ POSTHOOK: type: DROPTABLE
 PREHOOK: query: CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@dest1
 POSTHOOK: query: CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default

Modified: hive/branches/cbo/contrib/src/test/results/clientpositive/serde_typedbytes6.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/contrib/src/test/results/clientpositive/serde_typedbytes6.q.out?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/contrib/src/test/results/clientpositive/serde_typedbytes6.q.out (original)
+++ hive/branches/cbo/contrib/src/test/results/clientpositive/serde_typedbytes6.q.out Fri Aug 22 21:36:47 2014
@@ -13,6 +13,7 @@ POSTHOOK: type: DROPTABLE
 PREHOOK: query: CREATE TABLE table1 (a STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe'
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@table1
 POSTHOOK: query: CREATE TABLE table1 (a STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe'
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
@@ -20,6 +21,7 @@ POSTHOOK: Output: default@table1
 PREHOOK: query: CREATE TABLE table2 (a STRING, b STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe'
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@table2
 POSTHOOK: query: CREATE TABLE table2 (a STRING, b STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe'
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
@@ -27,6 +29,7 @@ POSTHOOK: Output: default@table2
 PREHOOK: query: CREATE TABLE table3 (a STRING, b STRING, c STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe'
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@table3
 POSTHOOK: query: CREATE TABLE table3 (a STRING, b STRING, c STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe'
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default

Modified: hive/branches/cbo/contrib/src/test/results/clientpositive/serde_typedbytes_null.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/contrib/src/test/results/clientpositive/serde_typedbytes_null.q.out?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/contrib/src/test/results/clientpositive/serde_typedbytes_null.q.out (original)
+++ hive/branches/cbo/contrib/src/test/results/clientpositive/serde_typedbytes_null.q.out Fri Aug 22 21:36:47 2014
@@ -5,6 +5,7 @@ POSTHOOK: type: DROPTABLE
 PREHOOK: query: CREATE TABLE table1 (a STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe' STORED AS SEQUENCEFILE
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@table1
 POSTHOOK: query: CREATE TABLE table1 (a STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe' STORED AS SEQUENCEFILE
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default

Modified: hive/branches/cbo/data/files/parquet_types.txt
URL: http://svn.apache.org/viewvc/hive/branches/cbo/data/files/parquet_types.txt?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/data/files/parquet_types.txt (original)
+++ hive/branches/cbo/data/files/parquet_types.txt Fri Aug 22 21:36:47 2014
@@ -1,21 +1,21 @@
-100|1|1|1.0|0.0|abc|2011-01-01 01:01:01.111111111
-101|2|2|1.1|0.3|def|2012-02-02 02:02:02.222222222
-102|3|3|1.2|0.6|ghi|2013-03-03 03:03:03.333333333
-103|1|4|1.3|0.9|jkl|2014-04-04 04:04:04.444444444
-104|2|5|1.4|1.2|mno|2015-05-05 05:05:05.555555555
-105|3|1|1.0|1.5|pqr|2016-06-06 06:06:06.666666666
-106|1|2|1.1|1.8|stu|2017-07-07 07:07:07.777777777
-107|2|3|1.2|2.1|vwx|2018-08-08 08:08:08.888888888
-108|3|4|1.3|2.4|yza|2019-09-09 09:09:09.999999999
-109|1|5|1.4|2.7|bcd|2020-10-10 10:10:10.101010101
-110|2|1|1.0|3.0|efg|2021-11-11 11:11:11.111111111
-111|3|2|1.1|3.3|hij|2022-12-12 12:12:12.121212121
-112|1|3|1.2|3.6|klm|2023-01-02 13:13:13.131313131
-113|2|4|1.3|3.9|nop|2024-02-02 14:14:14.141414141
-114|3|5|1.4|4.2|qrs|2025-03-03 15:15:15.151515151
-115|1|1|1.0|4.5|tuv|2026-04-04 16:16:16.161616161
-116|2|2|1.1|4.8|wxy|2027-05-05 17:17:17.171717171
-117|3|3|1.2|5.1|zab|2028-06-06 18:18:18.181818181
-118|1|4|1.3|5.4|cde|2029-07-07 19:19:19.191919191
-119|2|5|1.4|5.7|fgh|2030-08-08 20:20:20.202020202
-120|3|1|1.0|6.0|ijk|2031-09-09 21:21:21.212121212
+100|1|1|1.0|0.0|abc|2011-01-01 01:01:01.111111111|a   |a  
+101|2|2|1.1|0.3|def|2012-02-02 02:02:02.222222222|ab  |ab  
+102|3|3|1.2|0.6|ghi|2013-03-03 03:03:03.333333333|abc|abc
+103|1|4|1.3|0.9|jkl|2014-04-04 04:04:04.444444444|abcd|abcd
+104|2|5|1.4|1.2|mno|2015-05-05 05:05:05.555555555|abcde|abcde
+105|3|1|1.0|1.5|pqr|2016-06-06 06:06:06.666666666|abcdef|abcdef
+106|1|2|1.1|1.8|stu|2017-07-07 07:07:07.777777777|abcdefg|abcdefg
+107|2|3|1.2|2.1|vwx|2018-08-08 08:08:08.888888888|bcdefg|abcdefgh
+108|3|4|1.3|2.4|yza|2019-09-09 09:09:09.999999999|cdefg|abcdefghijklmnop
+109|1|5|1.4|2.7|bcd|2020-10-10 10:10:10.101010101|klmno|abcdedef
+110|2|1|1.0|3.0|efg|2021-11-11 11:11:11.111111111|pqrst|abcdede
+111|3|2|1.1|3.3|hij|2022-12-12 12:12:12.121212121|nopqr|abcded
+112|1|3|1.2|3.6|klm|2023-01-02 13:13:13.131313131|opqrs|abcdd
+113|2|4|1.3|3.9|nop|2024-02-02 14:14:14.141414141|pqrst|abc
+114|3|5|1.4|4.2|qrs|2025-03-03 15:15:15.151515151|qrstu|b
+115|1|1|1.0|4.5|tuv|2026-04-04 16:16:16.161616161|rstuv|abcded
+116|2|2|1.1|4.8|wxy|2027-05-05 17:17:17.171717171|stuvw|abcded
+117|3|3|1.2|5.1|zab|2028-06-06 18:18:18.181818181|tuvwx|abcded
+118|1|4|1.3|5.4|cde|2029-07-07 19:19:19.191919191|uvwzy|abcdede
+119|2|5|1.4|5.7|fgh|2030-08-08 20:20:20.202020202|vwxyz|abcdede
+120|3|1|1.0|6.0|ijk|2031-09-09 21:21:21.212121212|wxyza|abcde

Modified: hive/branches/cbo/hbase-handler/src/test/results/negative/cascade_dbdrop.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hbase-handler/src/test/results/negative/cascade_dbdrop.q.out?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/hbase-handler/src/test/results/negative/cascade_dbdrop.q.out (original)
+++ hive/branches/cbo/hbase-handler/src/test/results/negative/cascade_dbdrop.q.out Fri Aug 22 21:36:47 2014
@@ -1,7 +1,9 @@
 PREHOOK: query: CREATE DATABASE hbaseDB
 PREHOOK: type: CREATEDATABASE
+PREHOOK: Output: database:hbaseDB
 POSTHOOK: query: CREATE DATABASE hbaseDB
 POSTHOOK: type: CREATEDATABASE
+POSTHOOK: Output: database:hbaseDB
 PREHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20, 0.20S)
 -- Hadoop 0.23 changes the behavior FsShell on Exit Codes
 -- In Hadoop 0.20
@@ -18,6 +20,7 @@ WITH SERDEPROPERTIES ("hbase.columns.map
 TBLPROPERTIES ("hbase.table.name" = "hbase_table_0")
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:hbasedb
+PREHOOK: Output: hbaseDB@hbaseDB.hbase_table_0
 POSTHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20, 0.20S)
 -- Hadoop 0.23 changes the behavior FsShell on Exit Codes
 -- In Hadoop 0.20
@@ -34,6 +37,7 @@ WITH SERDEPROPERTIES ("hbase.columns.map
 TBLPROPERTIES ("hbase.table.name" = "hbase_table_0")
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:hbasedb
+POSTHOOK: Output: hbaseDB@hbaseDB.hbase_table_0
 POSTHOOK: Output: hbaseDB@hbase_table_0
 Found 3 items
 #### A masked pattern was here ####

Modified: hive/branches/cbo/hbase-handler/src/test/results/negative/generatehfiles_require_family_path.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hbase-handler/src/test/results/negative/generatehfiles_require_family_path.q.out?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/hbase-handler/src/test/results/negative/generatehfiles_require_family_path.q.out (original)
+++ hive/branches/cbo/hbase-handler/src/test/results/negative/generatehfiles_require_family_path.q.out Fri Aug 22 21:36:47 2014
@@ -11,6 +11,7 @@ STORED BY 'org.apache.hadoop.hive.hbase.
 WITH SERDEPROPERTIES ('hbase.columns.mapping' = ':key,cf:string')
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@hbase_bulk
 POSTHOOK: query: CREATE TABLE hbase_bulk (key INT, value STRING)
 STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
 WITH SERDEPROPERTIES ('hbase.columns.mapping' = ':key,cf:string')

Modified: hive/branches/cbo/hbase-handler/src/test/results/positive/external_table_ppd.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hbase-handler/src/test/results/positive/external_table_ppd.q.out?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/hbase-handler/src/test/results/positive/external_table_ppd.q.out (original)
+++ hive/branches/cbo/hbase-handler/src/test/results/positive/external_table_ppd.q.out Fri Aug 22 21:36:47 2014
@@ -16,6 +16,7 @@ TBLPROPERTIES ("hbase.table.name" = "t_h
                "hbase.table.default.storage.type" = "binary")
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@t_hbase
 POSTHOOK: query: CREATE TABLE t_hbase(key STRING,
                      tinyint_col TINYINT,
                      smallint_col SMALLINT,

Modified: hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_binary_external_table_queries.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_binary_external_table_queries.q.out?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_binary_external_table_queries.q.out (original)
+++ hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_binary_external_table_queries.q.out Fri Aug 22 21:36:47 2014
@@ -10,6 +10,7 @@ WITH SERDEPROPERTIES ("hbase.columns.map
 TBLPROPERTIES ("hbase.table.name" = "HiveExternalTable")
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@t_ext_hbase_1
 POSTHOOK: query: CREATE EXTERNAL TABLE t_ext_hbase_1
 (key STRING, c_bool BOOLEAN, c_byte TINYINT, c_short SMALLINT,
  c_int INT, c_long BIGINT, c_string STRING, c_float FLOAT, c_double DOUBLE)
@@ -50,6 +51,7 @@ WITH SERDEPROPERTIES ("hbase.columns.map
 TBLPROPERTIES ("hbase.table.name" = "HiveExternalTable")
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@t_ext_hbase_2
 POSTHOOK: query: CREATE EXTERNAL TABLE t_ext_hbase_2
 (key STRING, c_bool BOOLEAN, c_byte TINYINT, c_short SMALLINT,
  c_int INT, c_long BIGINT, c_string STRING, c_float FLOAT, c_double DOUBLE)
@@ -92,6 +94,7 @@ TBLPROPERTIES (
 "hbase.table.default.storage.type" = "binary")
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@t_ext_hbase_3
 POSTHOOK: query: CREATE EXTERNAL TABLE t_ext_hbase_3
 (key STRING, c_bool BOOLEAN, c_byte TINYINT, c_short SMALLINT,
  c_int INT, c_long BIGINT, c_string STRING, c_float FLOAT, c_double DOUBLE)

Modified: hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_binary_map_queries.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_binary_map_queries.q.out?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_binary_map_queries.q.out (original)
+++ hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_binary_map_queries.q.out Fri Aug 22 21:36:47 2014
@@ -12,6 +12,7 @@ PREHOOK: query: CREATE TABLE hbase_src(k
                        string_col STRING)
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@hbase_src
 POSTHOOK: query: CREATE TABLE hbase_src(key STRING,
                        tinyint_col TINYINT,
                        smallint_col SMALLINT,
@@ -63,6 +64,7 @@ WITH SERDEPROPERTIES ("hbase.columns.map
 TBLPROPERTIES ("hbase.table.name"="t_hive_maps")
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@t_hbase_maps
 POSTHOOK: query: CREATE TABLE t_hbase_maps(key STRING,
                           tinyint_map_col MAP<TINYINT, TINYINT>,
                           smallint_map_col MAP<SMALLINT, SMALLINT>,
@@ -166,6 +168,7 @@ WITH SERDEPROPERTIES ("hbase.columns.map
 TBLPROPERTIES ("hbase.table.name"="t_hive_maps")
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@t_ext_hbase_maps
 POSTHOOK: query: CREATE EXTERNAL TABLE t_ext_hbase_maps(key STRING,
                                        tinyint_map_col MAP<TINYINT, TINYINT>,
                                        smallint_map_col MAP<SMALLINT, SMALLINT>,
@@ -217,6 +220,7 @@ WITH SERDEPROPERTIES ("hbase.columns.map
 TBLPROPERTIES ("hbase.table.name"="t_hive_maps")
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@t_ext_hbase_maps_1
 POSTHOOK: query: CREATE EXTERNAL TABLE t_ext_hbase_maps_1(key STRING,
                                          tinyint_map_col MAP<TINYINT, TINYINT>,
                                          smallint_map_col MAP<SMALLINT, SMALLINT>,
@@ -270,6 +274,7 @@ TBLPROPERTIES (
 "hbase.table.default.storage.type"="binary")
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@t_ext_hbase_maps_2
 POSTHOOK: query: CREATE EXTERNAL TABLE t_ext_hbase_maps_2(key STRING,
                                          tinyint_map_col MAP<TINYINT, TINYINT>,
                                          smallint_map_col MAP<SMALLINT, SMALLINT>,
@@ -323,6 +328,7 @@ WITH SERDEPROPERTIES ("hbase.columns.map
 TBLPROPERTIES ("hbase.table.name"="t_hive_maps_1")
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@t_hbase_maps_1
 POSTHOOK: query: CREATE TABLE t_hbase_maps_1(key STRING,
                             tinyint_map_col MAP<TINYINT, TINYINT>,
                             smallint_map_col MAP<SMALLINT, SMALLINT>,
@@ -426,6 +432,7 @@ WITH SERDEPROPERTIES ("hbase.columns.map
 TBLPROPERTIES ("hbase.table.name"="t_hive_maps_1")
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@t_ext_hbase_maps_3
 POSTHOOK: query: CREATE EXTERNAL TABLE t_ext_hbase_maps_3(key STRING,
                                          tinyint_map_col MAP<TINYINT, TINYINT>,
                                          smallint_map_col MAP<SMALLINT, SMALLINT>,
@@ -477,6 +484,7 @@ WITH SERDEPROPERTIES ("hbase.columns.map
 TBLPROPERTIES ("hbase.table.name"="t_hive_maps_1")
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@t_ext_hbase_maps_4
 POSTHOOK: query: CREATE EXTERNAL TABLE t_ext_hbase_maps_4(key STRING,
                                          tinyint_map_col MAP<TINYINT, TINYINT>,
                                          smallint_map_col MAP<SMALLINT, SMALLINT>,
@@ -530,6 +538,7 @@ TBLPROPERTIES (
 "hbase.table.default.storage.type"="binary")
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@t_ext_hbase_maps_5
 POSTHOOK: query: CREATE EXTERNAL TABLE t_ext_hbase_maps_5(key STRING,
                                          tinyint_map_col MAP<TINYINT, TINYINT>,
                                          smallint_map_col MAP<SMALLINT, SMALLINT>,

Modified: hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_binary_map_queries_prefix.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_binary_map_queries_prefix.q.out?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_binary_map_queries_prefix.q.out (original)
+++ hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_binary_map_queries_prefix.q.out Fri Aug 22 21:36:47 2014
@@ -12,6 +12,7 @@ PREHOOK: query: CREATE TABLE hbase_src(k
                        string_col STRING)
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@hbase_src
 POSTHOOK: query: CREATE TABLE hbase_src(key STRING,
                        tinyint_col TINYINT,
                        smallint_col SMALLINT,
@@ -57,6 +58,7 @@ WITH SERDEPROPERTIES ("hbase.columns.map
 TBLPROPERTIES ("hbase.table.name"="t_hive_maps")
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@t_hbase_maps
 POSTHOOK: query: CREATE TABLE t_hbase_maps(key STRING,
                           string_map_col MAP<STRING, STRING>,
                           simple_string_col STRING)
@@ -123,6 +125,7 @@ WITH SERDEPROPERTIES ("hbase.columns.map
 TBLPROPERTIES ("hbase.table.name"="t_hive_maps")
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@t_ext_hbase_maps
 POSTHOOK: query: CREATE EXTERNAL TABLE t_ext_hbase_maps(key STRING,
                                        string_map_cols MAP<STRING, STRING>, simple_string_col STRING)
 STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'

Modified: hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_binary_storage_queries.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_binary_storage_queries.q.out?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_binary_storage_queries.q.out (original)
+++ hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_binary_storage_queries.q.out Fri Aug 22 21:36:47 2014
@@ -16,6 +16,7 @@ TBLPROPERTIES ("hbase.table.name" = "t_h
                "hbase.table.default.storage.type" = "binary")
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@t_hbase
 POSTHOOK: query: CREATE TABLE t_hbase(key STRING,
                      tinyint_col TINYINT,
                      smallint_col SMALLINT,
@@ -192,6 +193,7 @@ WITH SERDEPROPERTIES ("hbase.columns.map
 TBLPROPERTIES ("hbase.table.name" = "t_hive")
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@t_hbase_1
 POSTHOOK: query: CREATE EXTERNAL TABLE t_hbase_1(key STRING,
                                 tinyint_col TINYINT,
                                 smallint_col SMALLINT,
@@ -341,6 +343,7 @@ WITH SERDEPROPERTIES ("hbase.columns.map
 TBLPROPERTIES ("hbase.table.name" = "t_hive_2")
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@t_hbase_2
 POSTHOOK: query: CREATE TABLE t_hbase_2(key STRING,
                      tinyint_col TINYINT,
                      smallint_col SMALLINT,
@@ -475,6 +478,7 @@ WITH SERDEPROPERTIES ("hbase.columns.map
 TBLPROPERTIES ("hbase.table.name" = "t_hive_2")
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@t_hbase_3
 POSTHOOK: query: CREATE EXTERNAL TABLE t_hbase_3(key STRING,
                                 tinyint_col TINYINT,
                                 smallint_col SMALLINT,
@@ -577,6 +581,7 @@ TBLPROPERTIES (
 "hbase.table.default.storage.type" = "binary")
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@t_hbase_4
 POSTHOOK: query: CREATE EXTERNAL TABLE t_hbase_4(key STRING,
                      tinyint_col TINYINT,
                      smallint_col SMALLINT,

Modified: hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_bulk.m.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_bulk.m.out?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_bulk.m.out (original)
+++ hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_bulk.m.out Fri Aug 22 21:36:47 2014
@@ -15,6 +15,7 @@ OUTPUTFORMAT 'org.apache.hadoop.hive.hba
 #### A masked pattern was here ####
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@hbsort
 POSTHOOK: query: -- this is a dummy table used for controlling how the HFiles are
 -- created
 create table hbsort(key string, val string, val2 string)
@@ -39,6 +40,7 @@ outputformat
 PREHOOK: type: CREATETABLE
 #### A masked pattern was here ####
 PREHOOK: Output: database:default
+PREHOOK: Output: default@hbpartition
 POSTHOOK: query: -- this is a dummy table used for controlling how the input file
 -- for TotalOrderPartitioner is created
 create table hbpartition(part_break string)

Modified: hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_custom_key.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_custom_key.q.out?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_custom_key.q.out (original)
+++ hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_custom_key.q.out Fri Aug 22 21:36:47 2014
@@ -6,6 +6,7 @@ WITH SERDEPROPERTIES (
     "hbase.composite.key.factory"="org.apache.hadoop.hive.hbase.TestHBaseKeyFactory")
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@hbase_ck_1
 POSTHOOK: query: CREATE TABLE hbase_ck_1(key struct<col1:string,col2:string,col3:string>, value string)
 STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
 WITH SERDEPROPERTIES (
@@ -22,6 +23,7 @@ WITH SERDEPROPERTIES (
     "hbase.columns.mapping" = ":key,cf:string")
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@hbase_ck_2
 POSTHOOK: query: CREATE EXTERNAL TABLE hbase_ck_2(key string, value string)
 STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
 WITH SERDEPROPERTIES (

Modified: hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_custom_key2.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_custom_key2.q.out?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_custom_key2.q.out (original)
+++ hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_custom_key2.q.out Fri Aug 22 21:36:47 2014
@@ -6,6 +6,7 @@ WITH SERDEPROPERTIES (
     "hbase.composite.key.factory"="org.apache.hadoop.hive.hbase.TestHBaseKeyFactory2")
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@hbase_ck_4
 POSTHOOK: query: CREATE TABLE hbase_ck_4(key struct<col1:string,col2:string,col3:string>, value string)
 STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
 WITH SERDEPROPERTIES (

Modified: hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_custom_key3.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_custom_key3.q.out?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_custom_key3.q.out (original)
+++ hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_custom_key3.q.out Fri Aug 22 21:36:47 2014
@@ -6,6 +6,7 @@ WITH SERDEPROPERTIES (
     "hbase.composite.key.factory"="org.apache.hadoop.hive.hbase.TestHBaseKeyFactory3")
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@hbase_ck_5
 POSTHOOK: query: CREATE TABLE hbase_ck_5(key struct<col1:string,col2:string,col3:string>, value string)
 STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
 WITH SERDEPROPERTIES (

Modified: hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_handler_bulk.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_handler_bulk.q.out?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_handler_bulk.q.out (original)
+++ hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_handler_bulk.q.out Fri Aug 22 21:36:47 2014
@@ -13,6 +13,7 @@ with serdeproperties ('hbase.columns.map
 tblproperties ('hbase.table.name' = 'positive_hbase_handler_bulk')
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@hb_target
 POSTHOOK: query: -- this is the target HBase table
 create table hb_target(key int, val string)
 stored by 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'

Modified: hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_joins.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_joins.q.out?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_joins.q.out (original)
+++ hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_joins.q.out Fri Aug 22 21:36:47 2014
@@ -23,6 +23,7 @@ WITH SERDEPROPERTIES (
 )
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@users
 POSTHOOK: query: -- From HIVE-1257
 
 CREATE TABLE users(key string, state string, country string, country_id int)
@@ -40,6 +41,7 @@ WITH SERDEPROPERTIES (
 )
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@states
 POSTHOOK: query: CREATE TABLE states(key string, name string)
 STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
 WITH SERDEPROPERTIES (
@@ -55,6 +57,7 @@ WITH SERDEPROPERTIES (
 )
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@countries
 POSTHOOK: query: CREATE TABLE countries(key string, name string, country string, country_id int)
 STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
 WITH SERDEPROPERTIES (
@@ -224,6 +227,7 @@ STORED BY 'org.apache.hadoop.hive.hbase.
 WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,f:userid,f:nickname,f:created")
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@users
 POSTHOOK: query: CREATE TABLE users(key int, userid int, username string, created int) 
 STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
 WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,f:userid,f:nickname,f:created")
@@ -235,6 +239,7 @@ STORED BY 'org.apache.hadoop.hive.hbase.
 WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,f:userid,f:level")
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@users_level
 POSTHOOK: query: CREATE TABLE users_level(key int, userid int, level int)
 STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
 WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,f:userid,f:level")

Modified: hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_ppd_key_range.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_ppd_key_range.q.out?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_ppd_key_range.q.out (original)
+++ hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_ppd_key_range.q.out Fri Aug 22 21:36:47 2014
@@ -3,6 +3,7 @@ STORED BY 'org.apache.hadoop.hive.hbase.
 WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,cf:string")
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@hbase_pushdown
 POSTHOOK: query: CREATE TABLE hbase_pushdown(key string, value string) 
 STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
 WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,cf:string")

Modified: hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_pushdown.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_pushdown.q.out?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_pushdown.q.out (original)
+++ hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_pushdown.q.out Fri Aug 22 21:36:47 2014
@@ -3,6 +3,7 @@ STORED BY 'org.apache.hadoop.hive.hbase.
 WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,cf:string")
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@hbase_pushdown
 POSTHOOK: query: CREATE TABLE hbase_pushdown(key int, value string) 
 STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
 WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,cf:string")

Modified: hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_queries.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_queries.q.out?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_queries.q.out (original)
+++ hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_queries.q.out Fri Aug 22 21:36:47 2014
@@ -8,6 +8,7 @@ WITH SERDEPROPERTIES ("hbase.columns.map
 TBLPROPERTIES ("hbase.table.name" = "hbase_table_0")
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@hbase_table_1
 POSTHOOK: query: CREATE TABLE hbase_table_1(key int, value string) 
 STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
 WITH SERDEPROPERTIES ("hbase.columns.mapping" = "cf:string")
@@ -81,6 +82,7 @@ WITH SERDEPROPERTIES ("hbase.columns.map
 TBLPROPERTIES ("hbase.table.name" = "hbase_table_0")
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@hbase_table_2
 POSTHOOK: query: CREATE EXTERNAL TABLE hbase_table_2(key int, value string) 
 STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
 WITH SERDEPROPERTIES ("hbase.columns.mapping" = "cf:string")
@@ -378,6 +380,7 @@ STORED BY 'org.apache.hadoop.hive.hbase.
 WITH SERDEPROPERTIES ("hbase.columns.mapping" = "cf:string")
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@empty_hbase_table
 POSTHOOK: query: CREATE TABLE empty_hbase_table(key int, value string) 
 STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
 WITH SERDEPROPERTIES ("hbase.columns.mapping" = "cf:string")
@@ -391,6 +394,7 @@ POSTHOOK: type: DROPTABLE
 PREHOOK: query: CREATE TABLE empty_normal_table(key int, value string)
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@empty_normal_table
 POSTHOOK: query: CREATE TABLE empty_normal_table(key int, value string)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
@@ -450,6 +454,7 @@ WITH SERDEPROPERTIES (
 )
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@hbase_table_3
 POSTHOOK: query: CREATE TABLE hbase_table_3(key int, value string, count int) 
 STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
 WITH SERDEPROPERTIES (
@@ -640,6 +645,7 @@ WITH SERDEPROPERTIES (
 )
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@hbase_table_4
 POSTHOOK: query: CREATE TABLE hbase_table_4(key int, value1 string, value2 int, value3 int) 
 STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
 WITH SERDEPROPERTIES (
@@ -678,6 +684,7 @@ WITH SERDEPROPERTIES ("hbase.columns.map
 TBLPROPERTIES ("hbase.table.name" = "hbase_table_4")
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@hbase_table_5
 POSTHOOK: query: CREATE EXTERNAL TABLE hbase_table_5(key int, value map<string,string>) 
 STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
 WITH SERDEPROPERTIES ("hbase.columns.mapping" = "a:")
@@ -706,6 +713,7 @@ WITH SERDEPROPERTIES (
 )
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@hbase_table_6
 POSTHOOK: query: CREATE TABLE hbase_table_6(key int, value map<string,string>) 
 STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
 WITH SERDEPROPERTIES (
@@ -745,6 +753,7 @@ WITH SERDEPROPERTIES (
 )
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@hbase_table_7
 POSTHOOK: query: CREATE TABLE hbase_table_7(value map<string,string>, key int) 
 STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
 WITH SERDEPROPERTIES (
@@ -786,6 +795,7 @@ WITH SERDEPROPERTIES (
 )
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@hbase_table_8
 POSTHOOK: query: CREATE TABLE hbase_table_8(key int, value1 string, value2 int, value3 int) 
 STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
 WITH SERDEPROPERTIES (

Modified: hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_scan_params.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_scan_params.q.out?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_scan_params.q.out (original)
+++ hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_scan_params.q.out Fri Aug 22 21:36:47 2014
@@ -4,6 +4,7 @@ WITH SERDEPROPERTIES ("hbase.columns.map
 "hbase.scan.cache" = "500", "hbase.scan.cacheblocks" = "true", "hbase.scan.batch" = "1")
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@hbase_pushdown
 POSTHOOK: query: CREATE TABLE hbase_pushdown(key int, value string)
 STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
 WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,cf:string",

Modified: hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_single_sourced_multi_insert.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_single_sourced_multi_insert.q.out?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_single_sourced_multi_insert.q.out (original)
+++ hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_single_sourced_multi_insert.q.out Fri Aug 22 21:36:47 2014
@@ -2,6 +2,7 @@ PREHOOK: query: -- HIVE-4375 Single sour
 CREATE TABLE src_x1(key string, value string)
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@src_x1
 POSTHOOK: query: -- HIVE-4375 Single sourced multi insert consists of native and non-native table mixed throws NPE
 CREATE TABLE src_x1(key string, value string)
 POSTHOOK: type: CREATETABLE
@@ -12,6 +13,7 @@ STORED BY 'org.apache.hadoop.hive.hbase.
 WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key, cf:value")
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@src_x2
 POSTHOOK: query: CREATE TABLE src_x2(key string, value string)
 STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
 WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key, cf:value")

Modified: hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_stats.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_stats.q.out?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_stats.q.out (original)
+++ hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_stats.q.out Fri Aug 22 21:36:47 2014
@@ -1,6 +1,7 @@
 PREHOOK: query: create table stats_src like src
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@stats_src
 POSTHOOK: query: create table stats_src like src
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
@@ -62,6 +63,7 @@ Storage Desc Params:	 	 
 PREHOOK: query: create table stats_part like srcpart
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@stats_part
 POSTHOOK: query: create table stats_part like srcpart
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default

Modified: hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_stats2.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_stats2.q.out?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_stats2.q.out (original)
+++ hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_stats2.q.out Fri Aug 22 21:36:47 2014
@@ -1,6 +1,7 @@
 PREHOOK: query: create table stats_src like src
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@stats_src
 POSTHOOK: query: create table stats_src like src
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
@@ -62,6 +63,7 @@ Storage Desc Params:	 	 
 PREHOOK: query: create table stats_part like srcpart
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@stats_part
 POSTHOOK: query: create table stats_part like srcpart
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default

Modified: hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_stats3.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_stats3.q.out?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_stats3.q.out (original)
+++ hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_stats3.q.out Fri Aug 22 21:36:47 2014
@@ -1,6 +1,7 @@
 PREHOOK: query: create table stats_part like srcpart
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@stats_part
 POSTHOOK: query: create table stats_part like srcpart
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default

Modified: hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_stats_empty_partition.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_stats_empty_partition.q.out?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_stats_empty_partition.q.out (original)
+++ hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_stats_empty_partition.q.out Fri Aug 22 21:36:47 2014
@@ -4,6 +4,7 @@ PREHOOK: query: -- This test verifies th
 create table tmptable(key string, value string) partitioned by (part string)
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@tmptable
 POSTHOOK: query: -- This test verifies that writing an empty partition succeeds when
 -- hive.stats.reliable is set to true.
 

Modified: hive/branches/cbo/hbase-handler/src/test/results/positive/ppd_key_ranges.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hbase-handler/src/test/results/positive/ppd_key_ranges.q.out?rev=1619936&r1=1619935&r2=1619936&view=diff
==============================================================================
--- hive/branches/cbo/hbase-handler/src/test/results/positive/ppd_key_ranges.q.out (original)
+++ hive/branches/cbo/hbase-handler/src/test/results/positive/ppd_key_ranges.q.out Fri Aug 22 21:36:47 2014
@@ -3,6 +3,7 @@ STORED BY 'org.apache.hadoop.hive.hbase.
 WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key#binary,cf:string")
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@hbase_ppd_keyrange
 POSTHOOK: query: CREATE TABLE hbase_ppd_keyrange(key int, value string) 
 STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
 WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key#binary,cf:string")