You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by pr...@apache.org on 2018/05/02 16:45:03 UTC

[7/7] hive git commit: HIVE-19211: New streaming ingest API and support for dynamic partitioning (Prasanth Jayachandran reviewed by Eugene Koifman)

HIVE-19211: New streaming ingest API and support for dynamic partitioning (Prasanth Jayachandran reviewed by Eugene Koifman)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/bf8d305a
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/bf8d305a
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/bf8d305a

Branch: refs/heads/master
Commit: bf8d305a6fd435b3b510dc3f78b5f3329acbe423
Parents: 46c5580
Author: Prasanth Jayachandran <pr...@apache.org>
Authored: Wed May 2 09:41:43 2018 -0700
Committer: Prasanth Jayachandran <pr...@apache.org>
Committed: Wed May 2 09:41:43 2018 -0700

----------------------------------------------------------------------
 .../org/apache/hadoop/hive/conf/HiveConf.java   |   10 +
 .../streaming/AbstractRecordWriter.java         |    5 +-
 .../streaming/DelimitedInputWriter.java         |    2 +
 .../hive/hcatalog/streaming/HiveEndPoint.java   |    2 +
 .../hive/hcatalog/streaming/RecordWriter.java   |    4 +
 .../hcatalog/streaming/StreamingConnection.java |    2 +
 .../hcatalog/streaming/StrictJsonWriter.java    |    2 +
 .../hcatalog/streaming/StrictRegexWriter.java   |    2 +
 .../hcatalog/streaming/TransactionBatch.java    |    3 +-
 .../streaming/mutate/HiveConfFactory.java       |    5 +-
 .../mutate/UgiMetaStoreClientFactory.java       |    2 +
 .../streaming/mutate/client/AcidTable.java      |    2 +
 .../mutate/client/AcidTableSerializer.java      |    2 +
 .../streaming/mutate/client/MutatorClient.java  |    2 +
 .../mutate/client/MutatorClientBuilder.java     |    5 +-
 .../streaming/mutate/client/Transaction.java    |    4 +
 .../streaming/mutate/client/lock/Lock.java      |    2 +
 .../mutate/client/lock/LockFailureListener.java |    5 +-
 .../hive/hcatalog/streaming/mutate/package.html |    2 +-
 .../mutate/worker/BucketIdResolver.java         |    5 +-
 .../mutate/worker/BucketIdResolverImpl.java     |    2 +
 .../streaming/mutate/worker/Mutator.java        |    2 +
 .../mutate/worker/MutatorCoordinator.java       |    2 +
 .../worker/MutatorCoordinatorBuilder.java       |    5 +-
 .../streaming/mutate/worker/MutatorFactory.java |    4 +
 .../streaming/mutate/worker/MutatorImpl.java    |    5 +-
 .../mutate/worker/PartitionHelper.java          |    5 +-
 .../mutate/worker/RecordInspector.java          |    5 +-
 .../mutate/worker/RecordInspectorImpl.java      |    2 +
 .../hive/hcatalog/streaming/package-info.java   |   19 -
 .../hive/hcatalog/streaming/TestStreaming.java  |  121 +-
 itests/hive-unit/pom.xml                        |    5 +
 .../hive/ql/txn/compactor/TestCompactor.java    |  761 ++++---
 .../hadoop/hive/metastore/HiveClientCache.java  |  536 +++++
 .../hive/metastore/HiveMetaStoreUtils.java      |   35 +
 .../hadoop/hive/ql/io/orc/OrcRecordUpdater.java |   24 +-
 .../hadoop/hive/ql/lockmgr/DbTxnManager.java    |    4 +-
 .../apache/hadoop/hive/ql/metadata/Hive.java    |    6 +-
 .../apache/hadoop/hive/serde2/JsonSerDe.java    |  669 ++++++
 .../hive/metastore/utils/MetaStoreUtils.java    |    8 +-
 streaming/pom.xml                               |   42 +-
 .../hive/streaming/AbstractRecordWriter.java    |  460 ++--
 .../apache/hive/streaming/ConnectionError.java  |    6 +-
 .../apache/hive/streaming/ConnectionInfo.java   |   76 +
 .../hive/streaming/DelimitedInputWriter.java    |  331 ---
 .../apache/hive/streaming/HeartBeatFailure.java |   33 -
 .../org/apache/hive/streaming/HiveEndPoint.java | 1117 ---------
 .../hive/streaming/HiveStreamingConnection.java | 1039 +++++++++
 .../hive/streaming/ImpersonationFailed.java     |   25 -
 .../apache/hive/streaming/InvalidColumn.java    |   26 -
 .../apache/hive/streaming/InvalidPartition.java |   28 -
 .../org/apache/hive/streaming/InvalidTable.java |    8 +-
 .../hive/streaming/InvalidTransactionState.java |   25 +
 .../hive/streaming/InvalidTrasactionState.java  |   26 -
 .../hive/streaming/PartitionCreationFailed.java |    4 +-
 .../apache/hive/streaming/PartitionHandler.java |   33 +
 .../apache/hive/streaming/PartitionInfo.java    |   58 +
 .../hive/streaming/QueryFailedException.java    |   28 -
 .../org/apache/hive/streaming/RecordWriter.java |   41 +-
 .../hive/streaming/SerializationError.java      |    2 +-
 .../hive/streaming/StreamingConnection.java     |   59 +-
 .../hive/streaming/StreamingException.java      |    2 +-
 .../hive/streaming/StreamingIOFailure.java      |    4 +-
 .../streaming/StrictDelimitedInputWriter.java   |  110 +
 .../apache/hive/streaming/StrictJsonWriter.java |  117 +-
 .../hive/streaming/StrictRegexWriter.java       |  130 +-
 .../apache/hive/streaming/TransactionBatch.java |  125 --
 .../streaming/TransactionBatchUnAvailable.java  |   25 -
 .../apache/hive/streaming/TransactionError.java |    4 +-
 .../streaming/TestDelimitedInputWriter.java     |   73 -
 .../apache/hive/streaming/TestStreaming.java    | 2115 +++++++++++-------
 .../TestStreamingDynamicPartitioning.java       |  921 ++++++++
 72 files changed, 5799 insertions(+), 3582 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/bf8d305a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index fe7b23f..6358ff3 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -665,6 +665,16 @@ public class HiveConf extends Configuration {
     @Deprecated
     METASTORE_CAPABILITY_CHECK("hive.metastore.client.capability.check", true,
         "Whether to check client capabilities for potentially breaking API usage."),
+    METASTORE_CLIENT_CACHE_ENABLED("hive.metastore.client.cache.enabled", false,
+      "Whether to enable metastore client cache"),
+    METASTORE_CLIENT_CACHE_EXPIRY_TIME("hive.metastore.client.cache.expiry.time", "120s",
+      new TimeValidator(TimeUnit.SECONDS), "Expiry time for metastore client cache"),
+    METASTORE_CLIENT_CACHE_INITIAL_CAPACITY("hive.metastore.client.cache.initial.capacity", 50,
+      "Initial capacity for metastore client cache"),
+    METASTORE_CLIENT_CACHE_MAX_CAPACITY("hive.metastore.client.cache.max.capacity", 50,
+      "Max capacity for metastore client cache"),
+    METASTORE_CLIENT_CACHE_STATS_ENABLED("hive.metastore.client.cache.stats.enabled", false,
+      "Whether to enable metastore client cache stats"),
     METASTORE_FASTPATH("hive.metastore.fastpath", false,
         "Used to avoid all of the proxies and object copies in the metastore.  Note, if this is " +
             "set, you MUST use a local metastore (hive.metastore.uris must be empty) otherwise " +

http://git-wip-us.apache.org/repos/asf/hive/blob/bf8d305a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/AbstractRecordWriter.java
----------------------------------------------------------------------
diff --git a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/AbstractRecordWriter.java b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/AbstractRecordWriter.java
index 2a68220..5e12254 100644
--- a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/AbstractRecordWriter.java
+++ b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/AbstractRecordWriter.java
@@ -53,7 +53,10 @@ import java.util.HashSet;
 import java.util.List;
 import java.util.Properties;
 
-
+/**
+ * @deprecated as of Hive 3.0.0, replaced by {@link org.apache.hive.streaming.AbstractRecordWriter}
+ */
+@Deprecated
 public abstract class AbstractRecordWriter implements RecordWriter {
   static final private Logger LOG = LoggerFactory.getLogger(AbstractRecordWriter.class.getName());
 

http://git-wip-us.apache.org/repos/asf/hive/blob/bf8d305a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/DelimitedInputWriter.java
----------------------------------------------------------------------
diff --git a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/DelimitedInputWriter.java b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/DelimitedInputWriter.java
index 999c37e..32dae45 100644
--- a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/DelimitedInputWriter.java
+++ b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/DelimitedInputWriter.java
@@ -46,7 +46,9 @@ import java.util.Properties;
  * Streaming Writer handles delimited input (eg. CSV).
  * Delimited input is parsed & reordered to match column order in table
  * Uses Lazy Simple Serde to process delimited input
+ * @deprecated as of Hive 3.0.0, replaced by {@link org.apache.hive.streaming.StrictDelimitedInputWriter}
  */
+@Deprecated
 public class DelimitedInputWriter extends AbstractRecordWriter {
   private final boolean reorderingNeeded;
   private String delimiter;

http://git-wip-us.apache.org/repos/asf/hive/blob/bf8d305a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/HiveEndPoint.java
----------------------------------------------------------------------
diff --git a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/HiveEndPoint.java b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/HiveEndPoint.java
index 8582e9a..3ee19dd 100644
--- a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/HiveEndPoint.java
+++ b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/HiveEndPoint.java
@@ -60,7 +60,9 @@ import java.util.List;
  * Information about the hive end point (i.e. table or partition) to write to.
  * A light weight object that does NOT internally hold on to resources such as
  * network connections. It can be stored in Hashed containers such as sets and hash tables.
+ * @deprecated as of Hive 3.0.0, replaced by {@link org.apache.hive.streaming.HiveStreamingConnection}
  */
+@Deprecated
 public class HiveEndPoint {
   public final String metaStoreUri;
   public final String database;

http://git-wip-us.apache.org/repos/asf/hive/blob/bf8d305a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/RecordWriter.java
----------------------------------------------------------------------
diff --git a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/RecordWriter.java b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/RecordWriter.java
index a9bcd9f..19078d2 100644
--- a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/RecordWriter.java
+++ b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/RecordWriter.java
@@ -19,6 +19,10 @@
 package org.apache.hive.hcatalog.streaming;
 
 
+/**
+ * @deprecated as of Hive 3.0.0, replaced by {@link org.apache.hive.streaming.RecordWriter}
+ */
+@Deprecated
 public interface RecordWriter {
 
   /** Writes using a hive RecordUpdater

http://git-wip-us.apache.org/repos/asf/hive/blob/bf8d305a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/StreamingConnection.java
----------------------------------------------------------------------
diff --git a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/StreamingConnection.java b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/StreamingConnection.java
index c2b5157..0de8abc 100644
--- a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/StreamingConnection.java
+++ b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/StreamingConnection.java
@@ -24,7 +24,9 @@ import org.apache.hadoop.security.UserGroupInformation;
  * Represents a connection to a HiveEndPoint. Used to acquire transaction batches.
  * Note: the expectation is that there is at most 1 TransactionBatch outstanding for any given
  * StreamingConnection.  Violating this may result in "out of sequence response".
+ * @deprecated as of Hive 3.0.0, replaced by {@link org.apache.hive.streaming.HiveStreamingConnection}
  */
+@Deprecated
 public interface StreamingConnection {
 
   /**

http://git-wip-us.apache.org/repos/asf/hive/blob/bf8d305a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/StrictJsonWriter.java
----------------------------------------------------------------------
diff --git a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/StrictJsonWriter.java b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/StrictJsonWriter.java
index 4d92c55..48e7e49 100644
--- a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/StrictJsonWriter.java
+++ b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/StrictJsonWriter.java
@@ -37,7 +37,9 @@ import java.util.Properties;
 /**
  * Streaming Writer handles utf8 encoded Json (Strict syntax).
  * Uses org.apache.hive.hcatalog.data.JsonSerDe to process Json input
+ * @deprecated as of Hive 3.0.0, replaced by {@link org.apache.hive.streaming.StrictJsonWriter}
  */
+@Deprecated
 public class StrictJsonWriter extends AbstractRecordWriter {
   private JsonSerDe serde;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/bf8d305a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/StrictRegexWriter.java
----------------------------------------------------------------------
diff --git a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/StrictRegexWriter.java b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/StrictRegexWriter.java
index ae25662..f0540e0 100644
--- a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/StrictRegexWriter.java
+++ b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/StrictRegexWriter.java
@@ -41,7 +41,9 @@ import org.apache.hadoop.io.Text;
 /**
  * Streaming Writer handles text input data with regex. Uses
  * org.apache.hadoop.hive.serde2.RegexSerDe
+ * @deprecated as of Hive 3.0.0, replaced by {@link org.apache.hive.streaming.StrictRegexWriter}
  */
+@Deprecated
 public class StrictRegexWriter extends AbstractRecordWriter {
   private RegexSerDe serde;
   private final StructObjectInspector recordObjInspector;

http://git-wip-us.apache.org/repos/asf/hive/blob/bf8d305a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/TransactionBatch.java
----------------------------------------------------------------------
diff --git a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/TransactionBatch.java b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/TransactionBatch.java
index 1208377..400fd49 100644
--- a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/TransactionBatch.java
+++ b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/TransactionBatch.java
@@ -30,8 +30,9 @@ import java.util.Collection;
  * Note on thread safety: At most 2 threads can run through a given TransactionBatch at the same
  * time.  One thread may call {@link #heartbeat()} and the other all other methods.
  * Violating this may result in "out of sequence response".
- *
+ * @deprecated as of Hive 3.0.0, replaced by {@link org.apache.hive.streaming.HiveStreamingConnection}
  */
+@Deprecated
 public interface TransactionBatch  {
   enum TxnState {
     INACTIVE("I"), OPEN("O"), COMMITTED("C"), ABORTED("A");

http://git-wip-us.apache.org/repos/asf/hive/blob/bf8d305a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/HiveConfFactory.java
----------------------------------------------------------------------
diff --git a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/HiveConfFactory.java b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/HiveConfFactory.java
index f23a78b..ebe032d 100644
--- a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/HiveConfFactory.java
+++ b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/HiveConfFactory.java
@@ -22,7 +22,10 @@ import org.apache.hadoop.hive.conf.HiveConf;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-/** Creates/configures {@link HiveConf} instances with required ACID attributes. */
+/** Creates/configures {@link HiveConf} instances with required ACID attributes.
+ * @deprecated as of Hive 3.0.0
+ */
+@Deprecated
 public class HiveConfFactory {
 
   private static final Logger LOG = LoggerFactory.getLogger(HiveConfFactory.class);

http://git-wip-us.apache.org/repos/asf/hive/blob/bf8d305a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/UgiMetaStoreClientFactory.java
----------------------------------------------------------------------
diff --git a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/UgiMetaStoreClientFactory.java b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/UgiMetaStoreClientFactory.java
index 328b6c0..615fc1a 100644
--- a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/UgiMetaStoreClientFactory.java
+++ b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/UgiMetaStoreClientFactory.java
@@ -40,7 +40,9 @@ import com.google.common.reflect.AbstractInvocationHandler;
 /**
  * Creates a proxied {@link IMetaStoreClient client} that wraps calls in a {@link PrivilegedExceptionAction} if the
  * {@link UserGroupInformation} is specified. Invokes directly otherwise.
+ * @deprecated as of Hive 3.0.0
  */
+@Deprecated
 public class UgiMetaStoreClientFactory {
 
   private static Set<Method> I_META_STORE_CLIENT_METHODS = getIMetaStoreClientMethods();

http://git-wip-us.apache.org/repos/asf/hive/blob/bf8d305a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/client/AcidTable.java
----------------------------------------------------------------------
diff --git a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/client/AcidTable.java b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/client/AcidTable.java
index 50ba0c7..40de497 100644
--- a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/client/AcidTable.java
+++ b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/client/AcidTable.java
@@ -25,7 +25,9 @@ import org.apache.hadoop.hive.metastore.api.Table;
 /**
  * Describes an ACID table that can receive mutation events. Used to encode the information required by workers to write
  * ACID events without requiring them to once more retrieve the data from the meta store db.
+ * @deprecated as of Hive 3.0.0
  */
+@Deprecated
 public class AcidTable implements Serializable {
 
   private static final long serialVersionUID = 1L;

http://git-wip-us.apache.org/repos/asf/hive/blob/bf8d305a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/client/AcidTableSerializer.java
----------------------------------------------------------------------
diff --git a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/client/AcidTableSerializer.java b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/client/AcidTableSerializer.java
index 98f9f40..43ac527 100644
--- a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/client/AcidTableSerializer.java
+++ b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/client/AcidTableSerializer.java
@@ -37,7 +37,9 @@ import org.slf4j.LoggerFactory;
 /**
  * Utility to serialize/deserialize {@link AcidTable AcidTables} into strings so that they can be easily transported as
  * {@link Configuration} properties.
+ * @deprecated as of Hive 3.0.0
  */
+@Deprecated
 public class AcidTableSerializer {
 
   private static final Logger LOG = LoggerFactory.getLogger(AcidTableSerializer.class);

http://git-wip-us.apache.org/repos/asf/hive/blob/bf8d305a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/client/MutatorClient.java
----------------------------------------------------------------------
diff --git a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/client/MutatorClient.java b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/client/MutatorClient.java
index 8ba6cf6..11664f6 100644
--- a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/client/MutatorClient.java
+++ b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/client/MutatorClient.java
@@ -39,7 +39,9 @@ import org.slf4j.LoggerFactory;
 /**
  * Responsible for orchestrating {@link Transaction Transactions} within which ACID table mutation events can occur.
  * Typically this will be a large batch of delta operations.
+ * @deprecated as of Hive 3.0.0
  */
+@Deprecated
 public class MutatorClient implements Closeable {
 
   private static final Logger LOG = LoggerFactory.getLogger(MutatorClient.class);

http://git-wip-us.apache.org/repos/asf/hive/blob/bf8d305a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/client/MutatorClientBuilder.java
----------------------------------------------------------------------
diff --git a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/client/MutatorClientBuilder.java b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/client/MutatorClientBuilder.java
index 30a060c..1575d8d 100644
--- a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/client/MutatorClientBuilder.java
+++ b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/client/MutatorClientBuilder.java
@@ -32,7 +32,10 @@ import org.apache.hive.hcatalog.streaming.mutate.UgiMetaStoreClientFactory;
 import org.apache.hive.hcatalog.streaming.mutate.client.lock.Lock;
 import org.apache.hive.hcatalog.streaming.mutate.client.lock.LockFailureListener;
 
-/** Convenience class for building {@link MutatorClient} instances. */
+/** Convenience class for building {@link MutatorClient} instances.
+ * @deprecated as of Hive 3.0.0
+ */
+@Deprecated
 public class MutatorClientBuilder {
 
   private final Map<String, AcidTable> tables = new HashMap<>();

http://git-wip-us.apache.org/repos/asf/hive/blob/bf8d305a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/client/Transaction.java
----------------------------------------------------------------------
diff --git a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/client/Transaction.java b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/client/Transaction.java
index fa03f74..e1c6735 100644
--- a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/client/Transaction.java
+++ b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/client/Transaction.java
@@ -27,6 +27,10 @@ import org.apache.thrift.TException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+/**
+ * @deprecated as of Hive 3.0.0
+ */
+@Deprecated
 public class Transaction {
 
   private static final Logger LOG = LoggerFactory.getLogger(Transaction.class);

http://git-wip-us.apache.org/repos/asf/hive/blob/bf8d305a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/client/lock/Lock.java
----------------------------------------------------------------------
diff --git a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/client/lock/Lock.java b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/client/lock/Lock.java
index abbb125..52eb613 100644
--- a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/client/lock/Lock.java
+++ b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/client/lock/Lock.java
@@ -43,7 +43,9 @@ import org.slf4j.LoggerFactory;
 
 /**
  * Manages the state required to safely read/write from/to an ACID table.
+ * @deprecated as of Hive 3.0.0
  */
+@Deprecated
 public class Lock {
 
   private static final Logger LOG = LoggerFactory.getLogger(Lock.class);

http://git-wip-us.apache.org/repos/asf/hive/blob/bf8d305a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/client/lock/LockFailureListener.java
----------------------------------------------------------------------
diff --git a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/client/lock/LockFailureListener.java b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/client/lock/LockFailureListener.java
index 55502b4..a3845ea 100644
--- a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/client/lock/LockFailureListener.java
+++ b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/client/lock/LockFailureListener.java
@@ -20,7 +20,10 @@ package org.apache.hive.hcatalog.streaming.mutate.client.lock;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-/** Provides a means to handle the situation when a held lock fails. */
+/** Provides a means to handle the situation when a held lock fails.
+ * @deprecated as of Hive 3.0.0
+ */
+@Deprecated
 public interface LockFailureListener {
 
   static final Logger LOG = LoggerFactory.getLogger(LockFailureListener.class);

http://git-wip-us.apache.org/repos/asf/hive/blob/bf8d305a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/package.html
----------------------------------------------------------------------
diff --git a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/package.html b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/package.html
index d133c46..7bc75c0 100644
--- a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/package.html
+++ b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/package.html
@@ -13,7 +13,7 @@
 <body>
 
 <h1>HCatalog Streaming Mutation API -- high level description</h1>
-
+<h2>@deprecated as of Hive 3.0.0</h2>
 <h2>Background</h2>
 <p>
 In certain data processing use cases it is necessary to modify existing

http://git-wip-us.apache.org/repos/asf/hive/blob/bf8d305a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/BucketIdResolver.java
----------------------------------------------------------------------
diff --git a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/BucketIdResolver.java b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/BucketIdResolver.java
index 5dd0b8e..3432baa 100644
--- a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/BucketIdResolver.java
+++ b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/BucketIdResolver.java
@@ -17,7 +17,10 @@
  */
 package org.apache.hive.hcatalog.streaming.mutate.worker;
 
-/** Computes and appends bucket ids to records that are due to be inserted. */
+/** Computes and appends bucket ids to records that are due to be inserted.
+ * @deprecated as of Hive 3.0.0
+ */
+@Deprecated
 public interface BucketIdResolver {
 
   Object attachBucketIdToRecord(Object record);

http://git-wip-us.apache.org/repos/asf/hive/blob/bf8d305a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/BucketIdResolverImpl.java
----------------------------------------------------------------------
diff --git a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/BucketIdResolverImpl.java b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/BucketIdResolverImpl.java
index 7c2cade..1d51d85 100644
--- a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/BucketIdResolverImpl.java
+++ b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/BucketIdResolverImpl.java
@@ -30,7 +30,9 @@ import org.apache.hadoop.hive.serde2.objectinspector.StructField;
 /**
  * Implementation of a {@link BucketIdResolver} that includes the logic required to calculate a bucket id from a record
  * that is consistent with Hive's own internal computation scheme.
+ * @deprecated as of Hive 3.0.0
  */
+@Deprecated
 public class BucketIdResolverImpl implements BucketIdResolver {
 
   private static final long INVALID_TRANSACTION_ID = -1L;

http://git-wip-us.apache.org/repos/asf/hive/blob/bf8d305a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/Mutator.java
----------------------------------------------------------------------
diff --git a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/Mutator.java b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/Mutator.java
index 7126a88..e6f968e 100644
--- a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/Mutator.java
+++ b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/Mutator.java
@@ -24,7 +24,9 @@ import java.io.IOException;
 /**
  * Interface for submitting mutation events to a given partition and bucket in an ACID table. Requires records to arrive
  * in the order defined by the {@link SequenceValidator}.
+ * @deprecated as of Hive 3.0.0
  */
+@Deprecated
 public interface Mutator extends Closeable, Flushable {
 
   void insert(Object record) throws IOException;

http://git-wip-us.apache.org/repos/asf/hive/blob/bf8d305a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/MutatorCoordinator.java
----------------------------------------------------------------------
diff --git a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/MutatorCoordinator.java b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/MutatorCoordinator.java
index ad14c72..67785d0 100644
--- a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/MutatorCoordinator.java
+++ b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/MutatorCoordinator.java
@@ -52,7 +52,9 @@ import org.slf4j.LoggerFactory;
  * grouping phase so that they are grouped correctly. Note that any write id or row id assigned to the
  * {@link RecordIdentifier RecordIdentifier} of such events will be ignored by both the coordinator and the underlying
  * {@link RecordUpdater}.
+ * @deprecated as of Hive 3.0.0
  */
+@Deprecated
 public class MutatorCoordinator implements Closeable, Flushable {
 
   private static final Logger LOG = LoggerFactory.getLogger(MutatorCoordinator.class);

http://git-wip-us.apache.org/repos/asf/hive/blob/bf8d305a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/MutatorCoordinatorBuilder.java
----------------------------------------------------------------------
diff --git a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/MutatorCoordinatorBuilder.java b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/MutatorCoordinatorBuilder.java
index 80b90a2..698ba7c 100644
--- a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/MutatorCoordinatorBuilder.java
+++ b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/MutatorCoordinatorBuilder.java
@@ -32,7 +32,10 @@ import org.apache.hive.hcatalog.streaming.mutate.HiveConfFactory;
 import org.apache.hive.hcatalog.streaming.mutate.UgiMetaStoreClientFactory;
 import org.apache.hive.hcatalog.streaming.mutate.client.AcidTable;
 
-/** Convenience class for building {@link MutatorCoordinator} instances. */
+/** Convenience class for building {@link MutatorCoordinator} instances.
+ * @deprecated as of Hive 3.0.0
+ */
+@Deprecated
 public class MutatorCoordinatorBuilder {
 
   private HiveConf configuration;

http://git-wip-us.apache.org/repos/asf/hive/blob/bf8d305a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/MutatorFactory.java
----------------------------------------------------------------------
diff --git a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/MutatorFactory.java b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/MutatorFactory.java
index da7558f..d3d3210 100644
--- a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/MutatorFactory.java
+++ b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/MutatorFactory.java
@@ -22,6 +22,10 @@ import java.io.IOException;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.io.AcidOutputFormat;
 
+/**
+ * @deprecated as of Hive 3.0.0
+ */
+@Deprecated
 public interface MutatorFactory {
 
   Mutator newMutator(AcidOutputFormat<?, ?> outputFormat, long writeId, Path partitionPath, int bucketId)

http://git-wip-us.apache.org/repos/asf/hive/blob/bf8d305a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/MutatorImpl.java
----------------------------------------------------------------------
diff --git a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/MutatorImpl.java b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/MutatorImpl.java
index 84c477f..1e0cb72 100644
--- a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/MutatorImpl.java
+++ b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/MutatorImpl.java
@@ -28,7 +28,10 @@ import org.apache.hadoop.hive.ql.io.RecordUpdater;
 import org.apache.hadoop.hive.ql.io.orc.OrcRecordUpdater;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 
-/** Base {@link Mutator} implementation. Creates a suitable {@link RecordUpdater} and delegates mutation events. */
+/** Base {@link Mutator} implementation. Creates a suitable {@link RecordUpdater} and delegates mutation events.
+ * @deprecated as of Hive 3.0.0
+ */
+@Deprecated
 public class MutatorImpl implements Mutator {
 
   private final long writeId;

http://git-wip-us.apache.org/repos/asf/hive/blob/bf8d305a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/PartitionHelper.java
----------------------------------------------------------------------
diff --git a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/PartitionHelper.java b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/PartitionHelper.java
index 165bc5e..d064b0c 100644
--- a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/PartitionHelper.java
+++ b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/PartitionHelper.java
@@ -22,7 +22,10 @@ import java.util.List;
 
 import org.apache.hadoop.fs.Path;
 
-/** Implementations are responsible for creating and obtaining path information about partitions. */
+/** Implementations are responsible for creating and obtaining path information about partitions.
+ * @deprecated as of Hive 3.0.0
+ */
+@Deprecated
 interface PartitionHelper extends Closeable {
 
   /** Return the location of the partition described by the provided values. */

http://git-wip-us.apache.org/repos/asf/hive/blob/bf8d305a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/RecordInspector.java
----------------------------------------------------------------------
diff --git a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/RecordInspector.java b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/RecordInspector.java
index 55add67..5d1f175 100644
--- a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/RecordInspector.java
+++ b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/RecordInspector.java
@@ -19,7 +19,10 @@ package org.apache.hive.hcatalog.streaming.mutate.worker;
 
 import org.apache.hadoop.hive.ql.io.RecordIdentifier;
 
-/** Provide a means to extract {@link RecordIdentifier} from record objects. */
+/** Provide a means to extract {@link RecordIdentifier} from record objects.
+ * @deprecated as of Hive 3.0.0
+ */
+@Deprecated
 public interface RecordInspector {
 
   /** Get the {@link RecordIdentifier} from the record - to be used for updates and deletes only. */

http://git-wip-us.apache.org/repos/asf/hive/blob/bf8d305a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/RecordInspectorImpl.java
----------------------------------------------------------------------
diff --git a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/RecordInspectorImpl.java b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/RecordInspectorImpl.java
index 9438e13..37329c3 100644
--- a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/RecordInspectorImpl.java
+++ b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/RecordInspectorImpl.java
@@ -29,7 +29,9 @@ import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
  * Standard {@link RecordInspector} implementation that uses the supplied {@link ObjectInspector} and
  * {@link AcidOutputFormat.Options#recordIdColumn(int) record id column} to extract {@link RecordIdentifier
  * RecordIdentifiers}, and calculate bucket ids from records.
+ * @deprecated as of Hive 3.0.0
  */
+@Deprecated
 public class RecordInspectorImpl implements RecordInspector {
 
   private final StructObjectInspector structObjectInspector;

http://git-wip-us.apache.org/repos/asf/hive/blob/bf8d305a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/package-info.java
----------------------------------------------------------------------
diff --git a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/package-info.java b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/package-info.java
deleted file mode 100644
index 36d6b13..0000000
--- a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/package-info.java
+++ /dev/null
@@ -1,19 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-@Deprecated // use org.apache.hive.streaming instead
-package org.apache.hive.hcatalog.streaming;
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/bf8d305a/hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java
----------------------------------------------------------------------
diff --git a/hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java b/hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java
index 90dbdac..13aa5e9 100644
--- a/hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java
+++ b/hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java
@@ -439,68 +439,75 @@ public class TestStreaming {
     String tableLoc  = "'" + dbUri + Path.SEPARATOR + "streamedtable" + "'";
     String tableLoc2 = "'" + dbUri + Path.SEPARATOR + "finaltable" + "'";
     String tableLoc3 = "'" + dbUri + Path.SEPARATOR + "nobucket" + "'";
+    // disabling vectorization as this test yields incorrect results with vectorization
+    conf.setBoolVar(HiveConf.ConfVars.HIVE_VECTORIZATION_ENABLED, false);
+    try (IDriver driver = DriverFactory.newDriver(conf)) {
+      runDDL(driver, "create database testBucketing3");
+      runDDL(driver, "use testBucketing3");
+      runDDL(driver, "create table streamedtable ( key1 string,key2 int,data string ) clustered by ( key1,key2 ) into "
+        + bucketCount + " buckets  stored as orc  location " + tableLoc + " TBLPROPERTIES ('transactional'='true')");
+      //  In 'nobucket' table we capture bucketid from streamedtable to workaround a hive bug that prevents joins two identically bucketed tables
+      runDDL(driver, "create table nobucket ( bucketid int, key1 string,key2 int,data string ) location " + tableLoc3);
+      runDDL(driver,
+        "create table finaltable ( bucketid int, key1 string,key2 int,data string ) clustered by ( key1,key2 ) into "
+          + bucketCount + " buckets  stored as orc location " + tableLoc2 + " TBLPROPERTIES ('transactional'='true')");
+
+
+      String[] records = new String[]{
+        "PSFAHYLZVC,29,EPNMA",
+        "PPPRKWAYAU,96,VUTEE",
+        "MIAOFERCHI,3,WBDSI",
+        "CEGQAZOWVN,0,WCUZL",
+        "XWAKMNSVQF,28,YJVHU",
+        "XBWTSAJWME,2,KDQFO",
+        "FUVLQTAXAY,5,LDSDG",
+        "QTQMDJMGJH,6,QBOMA",
+        "EFLOTLWJWN,71,GHWPS",
+        "PEQNAOJHCM,82,CAAFI",
+        "MOEKQLGZCP,41,RUACR",
+        "QZXMCOPTID,37,LFLWE",
+        "EYALVWICRD,13,JEZLC",
+        "VYWLZAYTXX,16,DMVZX",
+        "OSALYSQIXR,47,HNZVE",
+        "JGKVHKCEGQ,25,KSCJB",
+        "WQFMMYDHET,12,DTRWA",
+        "AJOVAYZKZQ,15,YBKFO",
+        "YAQONWCUAU,31,QJNHZ",
+        "DJBXUEUOEB,35,IYCBL"
+      };
+
+      HiveEndPoint endPt = new HiveEndPoint(metaStoreURI, "testBucketing3", "streamedtable", null);
+      String[] colNames1 = new String[]{"key1", "key2", "data"};
+      DelimitedInputWriter wr = new DelimitedInputWriter(colNames1, ",", endPt);
+      StreamingConnection connection = endPt.newConnection(false, "UT_" + Thread.currentThread().getName());
+
+      TransactionBatch txnBatch = connection.fetchTransactionBatch(2, wr);
+      txnBatch.beginNextTransaction();
 
-    runDDL(driver, "create database testBucketing3");
-    runDDL(driver, "use testBucketing3");
-    runDDL(driver, "create table streamedtable ( key1 string,key2 int,data string ) clustered by ( key1,key2 ) into "
-            + bucketCount + " buckets  stored as orc  location " + tableLoc + " TBLPROPERTIES ('transactional'='true')") ;
-//  In 'nobucket' table we capture bucketid from streamedtable to workaround a hive bug that prevents joins two identically bucketed tables
-    runDDL(driver, "create table nobucket ( bucketid int, key1 string,key2 int,data string ) location " + tableLoc3) ;
-    runDDL(driver, "create table finaltable ( bucketid int, key1 string,key2 int,data string ) clustered by ( key1,key2 ) into "
-            + bucketCount + " buckets  stored as orc location " + tableLoc2 + " TBLPROPERTIES ('transactional'='true')");
-
-
-    String[] records = new String[] {
-    "PSFAHYLZVC,29,EPNMA",
-    "PPPRKWAYAU,96,VUTEE",
-    "MIAOFERCHI,3,WBDSI",
-    "CEGQAZOWVN,0,WCUZL",
-    "XWAKMNSVQF,28,YJVHU",
-    "XBWTSAJWME,2,KDQFO",
-    "FUVLQTAXAY,5,LDSDG",
-    "QTQMDJMGJH,6,QBOMA",
-    "EFLOTLWJWN,71,GHWPS",
-    "PEQNAOJHCM,82,CAAFI",
-    "MOEKQLGZCP,41,RUACR",
-    "QZXMCOPTID,37,LFLWE",
-    "EYALVWICRD,13,JEZLC",
-    "VYWLZAYTXX,16,DMVZX",
-    "OSALYSQIXR,47,HNZVE",
-    "JGKVHKCEGQ,25,KSCJB",
-    "WQFMMYDHET,12,DTRWA",
-    "AJOVAYZKZQ,15,YBKFO",
-    "YAQONWCUAU,31,QJNHZ",
-    "DJBXUEUOEB,35,IYCBL"
-    };
-
-    HiveEndPoint endPt = new HiveEndPoint(metaStoreURI, "testBucketing3", "streamedtable", null);
-    String[] colNames1 = new String[] { "key1", "key2", "data" };
-    DelimitedInputWriter wr = new DelimitedInputWriter(colNames1,",",  endPt);
-    StreamingConnection connection = endPt.newConnection(false, "UT_" + Thread.currentThread().getName());
-
-    TransactionBatch txnBatch =  connection.fetchTransactionBatch(2, wr);
-    txnBatch.beginNextTransaction();
-
-    for (String record : records) {
-      txnBatch.write(record.toString().getBytes());
-    }
+      for (String record : records) {
+        txnBatch.write(record.toString().getBytes());
+      }
 
-    txnBatch.commit();
-    txnBatch.close();
-    connection.close();
+      txnBatch.commit();
+      txnBatch.close();
+      connection.close();
 
-    ArrayList<String> res1 = queryTable(driver, "select row__id.bucketid, * from streamedtable order by key2");
-    for (String re : res1) {
-      System.out.println(re);
-    }
+      ArrayList<String> res1 = queryTable(driver, "select row__id.bucketid, * from streamedtable order by key2");
+      for (String re : res1) {
+        System.out.println(re);
+      }
 
-    driver.run("insert into nobucket select row__id.bucketid,* from streamedtable");
-    runDDL(driver, " insert into finaltable select * from nobucket");
-    ArrayList<String> res2 = queryTable(driver, "select row__id.bucketid,* from finaltable where row__id.bucketid<>bucketid");
-    for (String s : res2) {
-      LOG.error(s);
+      driver.run("insert into nobucket select row__id.bucketid,* from streamedtable");
+      runDDL(driver, " insert into finaltable select * from nobucket");
+      ArrayList<String> res2 = queryTable(driver,
+        "select row__id.bucketid,* from finaltable where row__id.bucketid<>bucketid");
+      for (String s : res2) {
+        LOG.error(s);
+      }
+      Assert.assertTrue(res2.isEmpty());
+    } finally {
+      conf.unset(HiveConf.ConfVars.HIVE_VECTORIZATION_ENABLED.varname);
     }
-    Assert.assertTrue(res2.isEmpty());
   }
 
 

http://git-wip-us.apache.org/repos/asf/hive/blob/bf8d305a/itests/hive-unit/pom.xml
----------------------------------------------------------------------
diff --git a/itests/hive-unit/pom.xml b/itests/hive-unit/pom.xml
index 3ae7f2f..b51ebf2 100644
--- a/itests/hive-unit/pom.xml
+++ b/itests/hive-unit/pom.xml
@@ -76,6 +76,11 @@
       <version>${project.version}</version>
     </dependency>
     <dependency>
+      <groupId>org.apache.hive.hcatalog</groupId>
+      <artifactId>hive-hcatalog-streaming</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
       <groupId>org.apache.hive</groupId>
       <artifactId>hive-streaming</artifactId>
       <version>${project.version}</version>