You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by gu...@apache.org on 2014/09/18 21:39:29 UTC
svn commit: r1626058 [1/2] - in /hive/branches/cbo: ./
itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/
metastore/scripts/upgrade/mssql/
metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/
...
Author: gunther
Date: Thu Sep 18 19:39:27 2014
New Revision: 1626058
URL: http://svn.apache.org/r1626058
Log:
Merge latest trunk into cbo branch. (Gunther Hagleitner)
Added:
hive/branches/cbo/metastore/scripts/upgrade/mssql/hive-schema-0.14.0.mssql.sql
- copied unchanged from r1626046, hive/trunk/metastore/scripts/upgrade/mssql/hive-schema-0.14.0.mssql.sql
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFCountMerge.java
- copied unchanged from r1626046, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFCountMerge.java
hive/branches/cbo/ql/src/test/queries/clientnegative/authorization_delete_nodeletepriv.q
- copied unchanged from r1626046, hive/trunk/ql/src/test/queries/clientnegative/authorization_delete_nodeletepriv.q
hive/branches/cbo/ql/src/test/queries/clientnegative/authorization_update_noupdatepriv.q
- copied unchanged from r1626046, hive/trunk/ql/src/test/queries/clientnegative/authorization_update_noupdatepriv.q
hive/branches/cbo/ql/src/test/queries/clientpositive/authorization_delete.q
- copied unchanged from r1626046, hive/trunk/ql/src/test/queries/clientpositive/authorization_delete.q
hive/branches/cbo/ql/src/test/queries/clientpositive/authorization_delete_own_table.q
- copied unchanged from r1626046, hive/trunk/ql/src/test/queries/clientpositive/authorization_delete_own_table.q
hive/branches/cbo/ql/src/test/queries/clientpositive/authorization_update.q
- copied unchanged from r1626046, hive/trunk/ql/src/test/queries/clientpositive/authorization_update.q
hive/branches/cbo/ql/src/test/queries/clientpositive/authorization_update_own_table.q
- copied unchanged from r1626046, hive/trunk/ql/src/test/queries/clientpositive/authorization_update_own_table.q
hive/branches/cbo/ql/src/test/results/clientnegative/authorization_delete_nodeletepriv.q.out
- copied unchanged from r1626046, hive/trunk/ql/src/test/results/clientnegative/authorization_delete_nodeletepriv.q.out
hive/branches/cbo/ql/src/test/results/clientnegative/authorization_update_noupdatepriv.q.out
- copied unchanged from r1626046, hive/trunk/ql/src/test/results/clientnegative/authorization_update_noupdatepriv.q.out
hive/branches/cbo/ql/src/test/results/clientpositive/authorization_delete.q.out
- copied unchanged from r1626046, hive/trunk/ql/src/test/results/clientpositive/authorization_delete.q.out
hive/branches/cbo/ql/src/test/results/clientpositive/authorization_delete_own_table.q.out
- copied unchanged from r1626046, hive/trunk/ql/src/test/results/clientpositive/authorization_delete_own_table.q.out
hive/branches/cbo/ql/src/test/results/clientpositive/authorization_update.q.out
- copied unchanged from r1626046, hive/trunk/ql/src/test/results/clientpositive/authorization_update.q.out
hive/branches/cbo/ql/src/test/results/clientpositive/authorization_update_own_table.q.out
- copied unchanged from r1626046, hive/trunk/ql/src/test/results/clientpositive/authorization_update_own_table.q.out
Modified:
hive/branches/cbo/ (props changed)
hive/branches/cbo/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java
hive/branches/cbo/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FieldSchema.java
hive/branches/cbo/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Partition.java
hive/branches/cbo/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SerDeInfo.java
hive/branches/cbo/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/StorageDescriptor.java
hive/branches/cbo/packaging/pom.xml
hive/branches/cbo/pom.xml
hive/branches/cbo/ql/if/queryplan.thrift
hive/branches/cbo/ql/src/gen/thrift/gen-cpp/queryplan_types.cpp
hive/branches/cbo/ql/src/gen/thrift/gen-cpp/queryplan_types.h
hive/branches/cbo/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/OperatorType.java
hive/branches/cbo/ql/src/gen/thrift/gen-php/Types.php
hive/branches/cbo/ql/src/gen/thrift/gen-py/queryplan/ttypes.py
hive/branches/cbo/ql/src/gen/thrift/gen-rb/queryplan_types.rb
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFCount.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFCountStar.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/orc/CompressionCodec.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ZlibCodec.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnAccessInfo.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java
hive/branches/cbo/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java
hive/branches/cbo/ql/src/test/queries/clientpositive/vectorization_short_regress.q
hive/branches/cbo/ql/src/test/results/clientpositive/tez/vectorization_short_regress.q.out
hive/branches/cbo/ql/src/test/results/clientpositive/vectorization_short_regress.q.out
hive/branches/cbo/serde/src/gen/thrift/gen-cpp/complex_types.cpp
hive/branches/cbo/serde/src/gen/thrift/gen-cpp/complex_types.h
hive/branches/cbo/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java
hive/branches/cbo/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/Complex.java
hive/branches/cbo/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/PropValueUnion.java
hive/branches/cbo/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/SetIntString.java
hive/branches/cbo/serde/src/gen/thrift/gen-py/complex/ttypes.py
hive/branches/cbo/serde/src/gen/thrift/gen-rb/complex_types.rb
Propchange: hive/branches/cbo/
------------------------------------------------------------------------------
Merged /hive/trunk:r1625876-1626046
Modified: hive/branches/cbo/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java?rev=1626058&r1=1626057&r2=1626058&view=diff
==============================================================================
--- hive/branches/cbo/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java (original)
+++ hive/branches/cbo/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java Thu Sep 18 19:39:27 2014
@@ -33,10 +33,13 @@ import java.util.List;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.commons.lang3.tuple.Pair;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.ql.CommandNeedRetryException;
import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.lockmgr.DbTxnManager;
import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
import org.apache.hadoop.hive.ql.security.SessionStateUserAuthenticator;
@@ -52,10 +55,12 @@ import org.mockito.Mockito;
* Test HiveAuthorizer api invocation
*/
public class TestHiveAuthorizerCheckInvocation {
+ private final Log LOG = LogFactory.getLog(this.getClass().getName());;
protected static HiveConf conf;
protected static Driver driver;
private static final String tableName = TestHiveAuthorizerCheckInvocation.class.getSimpleName()
+ "Table";
+ private static final String acidTableName = tableName + "_acid";
private static final String dbName = TestHiveAuthorizerCheckInvocation.class.getSimpleName()
+ "Db";
static HiveAuthorizer mockedAuthorizer;
@@ -82,14 +87,18 @@ public class TestHiveAuthorizerCheckInvo
conf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER, MockedHiveAuthorizerFactory.class.getName());
conf.setVar(ConfVars.HIVE_AUTHENTICATOR_MANAGER, SessionStateUserAuthenticator.class.getName());
conf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true);
- conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false);
+ conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, true);
+ conf.setVar(ConfVars.HIVE_TXN_MANAGER, DbTxnManager.class.getName());
SessionState.start(conf);
driver = new Driver(conf);
runCmd("create table " + tableName
+ " (i int, j int, k string) partitioned by (city string, date string) ");
runCmd("create database " + dbName);
+ // Need a separate table for ACID testing since it has to be bucketed and it has to be Acid
+ runCmd("create table " + acidTableName + " (i int, j int) clustered by (i) into 2 buckets " +
+ "stored as orc");
}
private static void runCmd(String cmd) throws CommandNeedRetryException {
@@ -99,6 +108,10 @@ public class TestHiveAuthorizerCheckInvo
@AfterClass
public static void afterTests() throws Exception {
+ // Drop the tables when we're done. This makes the test work inside an IDE
+ runCmd("drop table if exists " + acidTableName);
+ runCmd("drop table if exists " + tableName);
+ runCmd("drop database if exists " + dbName);
driver.close();
}
@@ -244,6 +257,63 @@ public class TestHiveAuthorizerCheckInvo
assertEquals("db name", null, funcObj.getDbname());
}
+ @Test
+ public void testUpdateSomeColumnsUsed() throws HiveAuthzPluginException,
+ HiveAccessControlException, CommandNeedRetryException {
+ reset(mockedAuthorizer);
+ int status = driver.compile("update " + acidTableName + " set i = 5 where j = 3");
+ assertEquals(0, status);
+
+ Pair<List<HivePrivilegeObject>, List<HivePrivilegeObject>> io = getHivePrivilegeObjectInputs();
+ List<HivePrivilegeObject> outputs = io.getRight();
+ HivePrivilegeObject tableObj = outputs.get(0);
+ LOG.debug("Got privilege object " + tableObj);
+ assertEquals("no of columns used", 1, tableObj.getColumns().size());
+ assertEquals("Column used", "i", tableObj.getColumns().get(0));
+ List<HivePrivilegeObject> inputs = io.getLeft();
+ assertEquals(1, inputs.size());
+ tableObj = inputs.get(0);
+ assertEquals(1, tableObj.getColumns().size());
+ assertEquals("j", tableObj.getColumns().get(0));
+ }
+
+ @Test
+ public void testUpdateSomeColumnsUsedExprInSet() throws HiveAuthzPluginException,
+ HiveAccessControlException, CommandNeedRetryException {
+ reset(mockedAuthorizer);
+ int status = driver.compile("update " + acidTableName + " set i = 5, l = k where j = 3");
+ assertEquals(0, status);
+
+ Pair<List<HivePrivilegeObject>, List<HivePrivilegeObject>> io = getHivePrivilegeObjectInputs();
+ List<HivePrivilegeObject> outputs = io.getRight();
+ HivePrivilegeObject tableObj = outputs.get(0);
+ LOG.debug("Got privilege object " + tableObj);
+ assertEquals("no of columns used", 2, tableObj.getColumns().size());
+ assertEquals("Columns used", Arrays.asList("i", "l"),
+ getSortedList(tableObj.getColumns()));
+ List<HivePrivilegeObject> inputs = io.getLeft();
+ assertEquals(1, inputs.size());
+ tableObj = inputs.get(0);
+ assertEquals(2, tableObj.getColumns().size());
+ assertEquals("Columns used", Arrays.asList("j", "k"),
+ getSortedList(tableObj.getColumns()));
+ }
+
+ @Test
+ public void testDelete() throws HiveAuthzPluginException,
+ HiveAccessControlException, CommandNeedRetryException {
+ reset(mockedAuthorizer);
+ int status = driver.compile("delete from " + acidTableName + " where j = 3");
+ assertEquals(0, status);
+
+ Pair<List<HivePrivilegeObject>, List<HivePrivilegeObject>> io = getHivePrivilegeObjectInputs();
+ List<HivePrivilegeObject> inputs = io.getLeft();
+ assertEquals(1, inputs.size());
+ HivePrivilegeObject tableObj = inputs.get(0);
+ assertEquals(1, tableObj.getColumns().size());
+ assertEquals("j", tableObj.getColumns().get(0));
+ }
+
private void checkSingleTableInput(List<HivePrivilegeObject> inputs) {
assertEquals("number of inputs", 1, inputs.size());
Modified: hive/branches/cbo/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FieldSchema.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FieldSchema.java?rev=1626058&r1=1626057&r2=1626058&view=diff
==============================================================================
--- hive/branches/cbo/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FieldSchema.java (original)
+++ hive/branches/cbo/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FieldSchema.java Thu Sep 18 19:39:27 2014
@@ -135,9 +135,9 @@ public class FieldSchema implements org.
String comment)
{
this();
- this.name = name;
- this.type = type;
- this.comment = comment;
+ this.name = org.apache.hive.common.util.HiveStringUtils.intern(name);
+ this.type = org.apache.hive.common.util.HiveStringUtils.intern(type);
+ this.comment = org.apache.hive.common.util.HiveStringUtils.intern(comment);
}
/**
@@ -145,13 +145,13 @@ public class FieldSchema implements org.
*/
public FieldSchema(FieldSchema other) {
if (other.isSetName()) {
- this.name = other.name;
+ this.name = org.apache.hive.common.util.HiveStringUtils.intern(other.name);
}
if (other.isSetType()) {
- this.type = other.type;
+ this.type = org.apache.hive.common.util.HiveStringUtils.intern(other.type);
}
if (other.isSetComment()) {
- this.comment = other.comment;
+ this.comment = org.apache.hive.common.util.HiveStringUtils.intern(other.comment);
}
}
@@ -171,7 +171,7 @@ public class FieldSchema implements org.
}
public void setName(String name) {
- this.name = name;
+ this.name = org.apache.hive.common.util.HiveStringUtils.intern(name);
}
public void unsetName() {
@@ -194,7 +194,7 @@ public class FieldSchema implements org.
}
public void setType(String type) {
- this.type = type;
+ this.type = org.apache.hive.common.util.HiveStringUtils.intern(type);
}
public void unsetType() {
@@ -217,7 +217,7 @@ public class FieldSchema implements org.
}
public void setComment(String comment) {
- this.comment = comment;
+ this.comment = org.apache.hive.common.util.HiveStringUtils.intern(comment);
}
public void unsetComment() {
Modified: hive/branches/cbo/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Partition.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Partition.java?rev=1626058&r1=1626057&r2=1626058&view=diff
==============================================================================
--- hive/branches/cbo/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Partition.java (original)
+++ hive/branches/cbo/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Partition.java Thu Sep 18 19:39:27 2014
@@ -182,14 +182,14 @@ public class Partition implements org.ap
{
this();
this.values = values;
- this.dbName = dbName;
- this.tableName = tableName;
+ this.dbName = org.apache.hive.common.util.HiveStringUtils.intern(dbName);
+ this.tableName = org.apache.hive.common.util.HiveStringUtils.intern(tableName);
this.createTime = createTime;
setCreateTimeIsSet(true);
this.lastAccessTime = lastAccessTime;
setLastAccessTimeIsSet(true);
this.sd = sd;
- this.parameters = parameters;
+ this.parameters = org.apache.hive.common.util.HiveStringUtils.intern(parameters);
}
/**
@@ -205,10 +205,10 @@ public class Partition implements org.ap
this.values = __this__values;
}
if (other.isSetDbName()) {
- this.dbName = other.dbName;
+ this.dbName = org.apache.hive.common.util.HiveStringUtils.intern(other.dbName);
}
if (other.isSetTableName()) {
- this.tableName = other.tableName;
+ this.tableName = org.apache.hive.common.util.HiveStringUtils.intern(other.tableName);
}
this.createTime = other.createTime;
this.lastAccessTime = other.lastAccessTime;
@@ -222,9 +222,9 @@ public class Partition implements org.ap
String other_element_key = other_element.getKey();
String other_element_value = other_element.getValue();
- String __this__parameters_copy_key = other_element_key;
+ String __this__parameters_copy_key = org.apache.hive.common.util.HiveStringUtils.intern(other_element_key);
- String __this__parameters_copy_value = other_element_value;
+ String __this__parameters_copy_value = org.apache.hive.common.util.HiveStringUtils.intern(other_element_value);
__this__parameters.put(__this__parameters_copy_key, __this__parameters_copy_value);
}
@@ -296,7 +296,7 @@ public class Partition implements org.ap
}
public void setDbName(String dbName) {
- this.dbName = dbName;
+ this.dbName = org.apache.hive.common.util.HiveStringUtils.intern(dbName);
}
public void unsetDbName() {
@@ -319,7 +319,7 @@ public class Partition implements org.ap
}
public void setTableName(String tableName) {
- this.tableName = tableName;
+ this.tableName = org.apache.hive.common.util.HiveStringUtils.intern(tableName);
}
public void unsetTableName() {
@@ -420,7 +420,7 @@ public class Partition implements org.ap
}
public void setParameters(Map<String,String> parameters) {
- this.parameters = parameters;
+ this.parameters = org.apache.hive.common.util.HiveStringUtils.intern(parameters);
}
public void unsetParameters() {
Modified: hive/branches/cbo/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SerDeInfo.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SerDeInfo.java?rev=1626058&r1=1626057&r2=1626058&view=diff
==============================================================================
--- hive/branches/cbo/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SerDeInfo.java (original)
+++ hive/branches/cbo/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SerDeInfo.java Thu Sep 18 19:39:27 2014
@@ -137,9 +137,9 @@ public class SerDeInfo implements org.ap
Map<String,String> parameters)
{
this();
- this.name = name;
- this.serializationLib = serializationLib;
- this.parameters = parameters;
+ this.name = org.apache.hive.common.util.HiveStringUtils.intern(name);
+ this.serializationLib = org.apache.hive.common.util.HiveStringUtils.intern(serializationLib);
+ this.parameters = org.apache.hive.common.util.HiveStringUtils.intern(parameters);
}
/**
@@ -147,10 +147,10 @@ public class SerDeInfo implements org.ap
*/
public SerDeInfo(SerDeInfo other) {
if (other.isSetName()) {
- this.name = other.name;
+ this.name = org.apache.hive.common.util.HiveStringUtils.intern(other.name);
}
if (other.isSetSerializationLib()) {
- this.serializationLib = other.serializationLib;
+ this.serializationLib = org.apache.hive.common.util.HiveStringUtils.intern(other.serializationLib);
}
if (other.isSetParameters()) {
Map<String,String> __this__parameters = new HashMap<String,String>();
@@ -159,9 +159,9 @@ public class SerDeInfo implements org.ap
String other_element_key = other_element.getKey();
String other_element_value = other_element.getValue();
- String __this__parameters_copy_key = other_element_key;
+ String __this__parameters_copy_key = org.apache.hive.common.util.HiveStringUtils.intern(other_element_key);
- String __this__parameters_copy_value = other_element_value;
+ String __this__parameters_copy_value = org.apache.hive.common.util.HiveStringUtils.intern(other_element_value);
__this__parameters.put(__this__parameters_copy_key, __this__parameters_copy_value);
}
@@ -185,7 +185,7 @@ public class SerDeInfo implements org.ap
}
public void setName(String name) {
- this.name = name;
+ this.name = org.apache.hive.common.util.HiveStringUtils.intern(name);
}
public void unsetName() {
@@ -208,7 +208,7 @@ public class SerDeInfo implements org.ap
}
public void setSerializationLib(String serializationLib) {
- this.serializationLib = serializationLib;
+ this.serializationLib = org.apache.hive.common.util.HiveStringUtils.intern(serializationLib);
}
public void unsetSerializationLib() {
@@ -242,7 +242,7 @@ public class SerDeInfo implements org.ap
}
public void setParameters(Map<String,String> parameters) {
- this.parameters = parameters;
+ this.parameters = org.apache.hive.common.util.HiveStringUtils.intern(parameters);
}
public void unsetParameters() {
Modified: hive/branches/cbo/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/StorageDescriptor.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/StorageDescriptor.java?rev=1626058&r1=1626057&r2=1626058&view=diff
==============================================================================
--- hive/branches/cbo/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/StorageDescriptor.java (original)
+++ hive/branches/cbo/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/StorageDescriptor.java Thu Sep 18 19:39:27 2014
@@ -216,17 +216,17 @@ public class StorageDescriptor implement
{
this();
this.cols = cols;
- this.location = location;
- this.inputFormat = inputFormat;
- this.outputFormat = outputFormat;
+ this.location = org.apache.hive.common.util.HiveStringUtils.intern(location);
+ this.inputFormat = org.apache.hive.common.util.HiveStringUtils.intern(inputFormat);
+ this.outputFormat = org.apache.hive.common.util.HiveStringUtils.intern(outputFormat);
this.compressed = compressed;
setCompressedIsSet(true);
this.numBuckets = numBuckets;
setNumBucketsIsSet(true);
this.serdeInfo = serdeInfo;
- this.bucketCols = bucketCols;
+ this.bucketCols = org.apache.hive.common.util.HiveStringUtils.intern(bucketCols);
this.sortCols = sortCols;
- this.parameters = parameters;
+ this.parameters = org.apache.hive.common.util.HiveStringUtils.intern(parameters);
}
/**
@@ -242,13 +242,13 @@ public class StorageDescriptor implement
this.cols = __this__cols;
}
if (other.isSetLocation()) {
- this.location = other.location;
+ this.location = org.apache.hive.common.util.HiveStringUtils.intern(other.location);
}
if (other.isSetInputFormat()) {
- this.inputFormat = other.inputFormat;
+ this.inputFormat = org.apache.hive.common.util.HiveStringUtils.intern(other.inputFormat);
}
if (other.isSetOutputFormat()) {
- this.outputFormat = other.outputFormat;
+ this.outputFormat = org.apache.hive.common.util.HiveStringUtils.intern(other.outputFormat);
}
this.compressed = other.compressed;
this.numBuckets = other.numBuckets;
@@ -276,9 +276,9 @@ public class StorageDescriptor implement
String other_element_key = other_element.getKey();
String other_element_value = other_element.getValue();
- String __this__parameters_copy_key = other_element_key;
+ String __this__parameters_copy_key = org.apache.hive.common.util.HiveStringUtils.intern(other_element_key);
- String __this__parameters_copy_value = other_element_value;
+ String __this__parameters_copy_value = org.apache.hive.common.util.HiveStringUtils.intern(other_element_value);
__this__parameters.put(__this__parameters_copy_key, __this__parameters_copy_value);
}
@@ -356,7 +356,7 @@ public class StorageDescriptor implement
}
public void setLocation(String location) {
- this.location = location;
+ this.location = org.apache.hive.common.util.HiveStringUtils.intern(location);
}
public void unsetLocation() {
@@ -379,7 +379,7 @@ public class StorageDescriptor implement
}
public void setInputFormat(String inputFormat) {
- this.inputFormat = inputFormat;
+ this.inputFormat = org.apache.hive.common.util.HiveStringUtils.intern(inputFormat);
}
public void unsetInputFormat() {
@@ -402,7 +402,7 @@ public class StorageDescriptor implement
}
public void setOutputFormat(String outputFormat) {
- this.outputFormat = outputFormat;
+ this.outputFormat = org.apache.hive.common.util.HiveStringUtils.intern(outputFormat);
}
public void unsetOutputFormat() {
@@ -507,7 +507,7 @@ public class StorageDescriptor implement
}
public void setBucketCols(List<String> bucketCols) {
- this.bucketCols = bucketCols;
+ this.bucketCols = org.apache.hive.common.util.HiveStringUtils.intern(bucketCols);
}
public void unsetBucketCols() {
@@ -579,7 +579,7 @@ public class StorageDescriptor implement
}
public void setParameters(Map<String,String> parameters) {
- this.parameters = parameters;
+ this.parameters = org.apache.hive.common.util.HiveStringUtils.intern(parameters);
}
public void unsetParameters() {
Modified: hive/branches/cbo/packaging/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/cbo/packaging/pom.xml?rev=1626058&r1=1626057&r2=1626058&view=diff
==============================================================================
--- hive/branches/cbo/packaging/pom.xml (original)
+++ hive/branches/cbo/packaging/pom.xml Thu Sep 18 19:39:27 2014
@@ -60,6 +60,33 @@
</executions>
</plugin>
<plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-dependency-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>copy</id>
+ <phase>package</phase>
+ <goals>
+ <goal>copy</goal>
+ </goals>
+ <configuration>
+ <artifactItems>
+ <artifactItem>
+ <groupId>${project.groupId}</groupId>
+ <artifactId>hive-jdbc</artifactId>
+ <version>${project.version}</version>
+ <type>jar</type>
+ <classifier>${hive.jdbc.driver.classifier}</classifier>
+ <overWrite>true</overWrite>
+ <outputDirectory>${project.build.directory}</outputDirectory>
+ <destFileName>${hive.jdbc.driver.jar}</destFileName>
+ </artifactItem>
+ </artifactItems>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>build-helper-maven-plugin</artifactId>
<executions>
Modified: hive/branches/cbo/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/cbo/pom.xml?rev=1626058&r1=1626057&r2=1626058&view=diff
==============================================================================
--- hive/branches/cbo/pom.xml (original)
+++ hive/branches/cbo/pom.xml Thu Sep 18 19:39:27 2014
@@ -105,7 +105,7 @@
<commons-exec.version>1.1</commons-exec.version>
<commons-httpclient.version>3.0.1</commons-httpclient.version>
<commons-io.version>2.4</commons-io.version>
- <commons-lang.version>2.4</commons-lang.version>
+ <commons-lang.version>2.6</commons-lang.version>
<commons-lang3.version>3.1</commons-lang3.version>
<commons-logging.version>1.1.3</commons-logging.version>
<commons-pool.version>1.5.4</commons-pool.version>
Modified: hive/branches/cbo/ql/if/queryplan.thrift
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/if/queryplan.thrift?rev=1626058&r1=1626057&r2=1626058&view=diff
==============================================================================
--- hive/branches/cbo/ql/if/queryplan.thrift (original)
+++ hive/branches/cbo/ql/if/queryplan.thrift Thu Sep 18 19:39:27 2014
@@ -57,6 +57,8 @@ enum OperatorType {
MUX,
DEMUX,
EVENT,
+ ORCFILEMERGE,
+ RCFILEMERGE,
}
struct Operator {
Modified: hive/branches/cbo/ql/src/gen/thrift/gen-cpp/queryplan_types.cpp
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/gen/thrift/gen-cpp/queryplan_types.cpp?rev=1626058&r1=1626057&r2=1626058&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/gen/thrift/gen-cpp/queryplan_types.cpp (original)
+++ hive/branches/cbo/ql/src/gen/thrift/gen-cpp/queryplan_types.cpp Thu Sep 18 19:39:27 2014
@@ -52,7 +52,9 @@ int _kOperatorTypeValues[] = {
OperatorType::PTF,
OperatorType::MUX,
OperatorType::DEMUX,
- OperatorType::EVENT
+ OperatorType::EVENT,
+ OperatorType::ORCFILEMERGE,
+ OperatorType::RCFILEMERGE
};
const char* _kOperatorTypeNames[] = {
"JOIN",
@@ -76,9 +78,11 @@ const char* _kOperatorTypeNames[] = {
"PTF",
"MUX",
"DEMUX",
- "EVENT"
+ "EVENT",
+ "ORCFILEMERGE",
+ "RCFILEMERGE"
};
-const std::map<int, const char*> _OperatorType_VALUES_TO_NAMES(::apache::thrift::TEnumIterator(22, _kOperatorTypeValues, _kOperatorTypeNames), ::apache::thrift::TEnumIterator(-1, NULL, NULL));
+const std::map<int, const char*> _OperatorType_VALUES_TO_NAMES(::apache::thrift::TEnumIterator(24, _kOperatorTypeValues, _kOperatorTypeNames), ::apache::thrift::TEnumIterator(-1, NULL, NULL));
int _kTaskTypeValues[] = {
TaskType::MAP,
Modified: hive/branches/cbo/ql/src/gen/thrift/gen-cpp/queryplan_types.h
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/gen/thrift/gen-cpp/queryplan_types.h?rev=1626058&r1=1626057&r2=1626058&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/gen/thrift/gen-cpp/queryplan_types.h (original)
+++ hive/branches/cbo/ql/src/gen/thrift/gen-cpp/queryplan_types.h Thu Sep 18 19:39:27 2014
@@ -57,7 +57,9 @@ struct OperatorType {
PTF = 18,
MUX = 19,
DEMUX = 20,
- EVENT = 21
+ EVENT = 21,
+ ORCFILEMERGE = 22,
+ RCFILEMERGE = 23
};
};
Modified: hive/branches/cbo/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/OperatorType.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/OperatorType.java?rev=1626058&r1=1626057&r2=1626058&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/OperatorType.java (original)
+++ hive/branches/cbo/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/OperatorType.java Thu Sep 18 19:39:27 2014
@@ -7,6 +7,10 @@
package org.apache.hadoop.hive.ql.plan.api;
+import java.util.Map;
+import java.util.HashMap;
+import org.apache.thrift.TEnum;
+
public enum OperatorType implements org.apache.thrift.TEnum {
JOIN(0),
MAPJOIN(1),
Modified: hive/branches/cbo/ql/src/gen/thrift/gen-php/Types.php
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/gen/thrift/gen-php/Types.php?rev=1626058&r1=1626057&r2=1626058&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/gen/thrift/gen-php/Types.php (original)
+++ hive/branches/cbo/ql/src/gen/thrift/gen-php/Types.php Thu Sep 18 19:39:27 2014
@@ -57,6 +57,8 @@ final class OperatorType {
const MUX = 19;
const DEMUX = 20;
const EVENT = 21;
+ const ORCFILEMERGE = 22;
+ const RCFILEMERGE = 23;
static public $__names = array(
0 => 'JOIN',
1 => 'MAPJOIN',
@@ -80,6 +82,8 @@ final class OperatorType {
19 => 'MUX',
20 => 'DEMUX',
21 => 'EVENT',
+ 22 => 'ORCFILEMERGE',
+ 23 => 'RCFILEMERGE',
);
}
Modified: hive/branches/cbo/ql/src/gen/thrift/gen-py/queryplan/ttypes.py
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/gen/thrift/gen-py/queryplan/ttypes.py?rev=1626058&r1=1626057&r2=1626058&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/gen/thrift/gen-py/queryplan/ttypes.py (original)
+++ hive/branches/cbo/ql/src/gen/thrift/gen-py/queryplan/ttypes.py Thu Sep 18 19:39:27 2014
@@ -67,6 +67,8 @@ class OperatorType:
MUX = 19
DEMUX = 20
EVENT = 21
+ ORCFILEMERGE = 22
+ RCFILEMERGE = 23
_VALUES_TO_NAMES = {
0: "JOIN",
@@ -91,6 +93,8 @@ class OperatorType:
19: "MUX",
20: "DEMUX",
21: "EVENT",
+ 22: "ORCFILEMERGE",
+ 23: "RCFILEMERGE",
}
_NAMES_TO_VALUES = {
@@ -116,6 +120,8 @@ class OperatorType:
"MUX": 19,
"DEMUX": 20,
"EVENT": 21,
+ "ORCFILEMERGE": 22,
+ "RCFILEMERGE": 23,
}
class TaskType:
Modified: hive/branches/cbo/ql/src/gen/thrift/gen-rb/queryplan_types.rb
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/gen/thrift/gen-rb/queryplan_types.rb?rev=1626058&r1=1626057&r2=1626058&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/gen/thrift/gen-rb/queryplan_types.rb (original)
+++ hive/branches/cbo/ql/src/gen/thrift/gen-rb/queryplan_types.rb Thu Sep 18 19:39:27 2014
@@ -43,8 +43,10 @@ module OperatorType
MUX = 19
DEMUX = 20
EVENT = 21
- VALUE_MAP = {0 => "JOIN", 1 => "MAPJOIN", 2 => "EXTRACT", 3 => "FILTER", 4 => "FORWARD", 5 => "GROUPBY", 6 => "LIMIT", 7 => "SCRIPT", 8 => "SELECT", 9 => "TABLESCAN", 10 => "FILESINK", 11 => "REDUCESINK", 12 => "UNION", 13 => "UDTF", 14 => "LATERALVIEWJOIN", 15 => "LATERALVIEWFORWARD", 16 => "HASHTABLESINK", 17 => "HASHTABLEDUMMY", 18 => "PTF", 19 => "MUX", 20 => "DEMUX", 21 => "EVENT"}
- VALID_VALUES = Set.new([JOIN, MAPJOIN, EXTRACT, FILTER, FORWARD, GROUPBY, LIMIT, SCRIPT, SELECT, TABLESCAN, FILESINK, REDUCESINK, UNION, UDTF, LATERALVIEWJOIN, LATERALVIEWFORWARD, HASHTABLESINK, HASHTABLEDUMMY, PTF, MUX, DEMUX, EVENT]).freeze
+ ORCFILEMERGE = 22
+ RCFILEMERGE = 23
+ VALUE_MAP = {0 => "JOIN", 1 => "MAPJOIN", 2 => "EXTRACT", 3 => "FILTER", 4 => "FORWARD", 5 => "GROUPBY", 6 => "LIMIT", 7 => "SCRIPT", 8 => "SELECT", 9 => "TABLESCAN", 10 => "FILESINK", 11 => "REDUCESINK", 12 => "UNION", 13 => "UDTF", 14 => "LATERALVIEWJOIN", 15 => "LATERALVIEWFORWARD", 16 => "HASHTABLESINK", 17 => "HASHTABLEDUMMY", 18 => "PTF", 19 => "MUX", 20 => "DEMUX", 21 => "EVENT", 22 => "ORCFILEMERGE", 23 => "RCFILEMERGE"}
+ VALID_VALUES = Set.new([JOIN, MAPJOIN, EXTRACT, FILTER, FORWARD, GROUPBY, LIMIT, SCRIPT, SELECT, TABLESCAN, FILESINK, REDUCESINK, UNION, UDTF, LATERALVIEWJOIN, LATERALVIEWFORWARD, HASHTABLESINK, HASHTABLEDUMMY, PTF, MUX, DEMUX, EVENT, ORCFILEMERGE, RCFILEMERGE]).freeze
end
module TaskType
Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/Driver.java?rev=1626058&r1=1626057&r2=1626058&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/Driver.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/Driver.java Thu Sep 18 19:39:27 2014
@@ -503,9 +503,11 @@ public class Driver implements CommandPr
// get mapping of tables to columns used
ColumnAccessInfo colAccessInfo = sem.getColumnAccessInfo();
// colAccessInfo is set only in case of SemanticAnalyzer
- Map<String, List<String>> tab2Cols = colAccessInfo != null ? colAccessInfo
+ Map<String, List<String>> selectTab2Cols = colAccessInfo != null ? colAccessInfo
.getTableToColumnAccessMap() : null;
- doAuthorizationV2(ss, op, inputs, outputs, command, tab2Cols);
+ Map<String, List<String>> updateTab2Cols = sem.getUpdateColumnAccessInfo() != null ?
+ sem.getUpdateColumnAccessInfo().getTableToColumnAccessMap() : null;
+ doAuthorizationV2(ss, op, inputs, outputs, command, selectTab2Cols, updateTab2Cols);
return;
}
if (op == null) {
@@ -696,7 +698,13 @@ public class Driver implements CommandPr
}
private static void doAuthorizationV2(SessionState ss, HiveOperation op, HashSet<ReadEntity> inputs,
- HashSet<WriteEntity> outputs, String command, Map<String, List<String>> tab2cols) throws HiveException {
+ HashSet<WriteEntity> outputs, String command, Map<String, List<String>> tab2cols,
+ Map<String, List<String>> updateTab2Cols) throws HiveException {
+
+ /* comment for reviewers -> updateTab2Cols needed to be separate from tab2cols because if I
+ pass tab2cols to getHivePrivObjects for the output case it will trip up insert/selects,
+ since the insert will get passed the columns from the select.
+ */
HiveAuthzContext.Builder authzContextBuilder = new HiveAuthzContext.Builder();
authzContextBuilder.setUserIpAddress(ss.getUserIpAddress());
@@ -704,7 +712,7 @@ public class Driver implements CommandPr
HiveOperationType hiveOpType = getHiveOperationType(op);
List<HivePrivilegeObject> inputsHObjs = getHivePrivObjects(inputs, tab2cols);
- List<HivePrivilegeObject> outputHObjs = getHivePrivObjects(outputs, null);
+ List<HivePrivilegeObject> outputHObjs = getHivePrivObjects(outputs, updateTab2Cols);
ss.getAuthorizerV2().checkPrivileges(hiveOpType, inputsHObjs, outputHObjs, authzContextBuilder.build());
}
@@ -730,12 +738,6 @@ public class Driver implements CommandPr
//do not authorize temporary uris
continue;
}
- if (privObject instanceof ReadEntity && ((ReadEntity)privObject).isUpdateOrDelete()) {
- // Skip this one, as we don't want to check select privileges for the table we're reading
- // for an update or delete.
- continue;
- }
-
//support for authorization on partitions needs to be added
String dbname = null;
String objName = null;
Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java?rev=1626058&r1=1626057&r2=1626058&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java Thu Sep 18 19:39:27 2014
@@ -48,6 +48,7 @@ import org.apache.hadoop.hive.ql.exec.ve
import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorAggregateExpression;
import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorUDAFAvgDecimal;
import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorUDAFCount;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorUDAFCountMerge;
import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorUDAFCountStar;
import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorUDAFSumDecimal;
import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.VectorUDAFAvgDouble;
@@ -1898,7 +1899,7 @@ public class VectorizationContext {
add(new AggregateDefinition("max", VectorExpressionDescriptor.ArgumentType.DECIMAL, null, VectorUDAFMaxDecimal.class));
add(new AggregateDefinition("count", VectorExpressionDescriptor.ArgumentType.NONE, GroupByDesc.Mode.HASH, VectorUDAFCountStar.class));
add(new AggregateDefinition("count", VectorExpressionDescriptor.ArgumentType.INT_FAMILY, GroupByDesc.Mode.HASH, VectorUDAFCount.class));
- add(new AggregateDefinition("count", VectorExpressionDescriptor.ArgumentType.INT_FAMILY, GroupByDesc.Mode.MERGEPARTIAL, VectorUDAFSumLong.class));
+ add(new AggregateDefinition("count", VectorExpressionDescriptor.ArgumentType.INT_FAMILY, GroupByDesc.Mode.MERGEPARTIAL, VectorUDAFCountMerge.class));
add(new AggregateDefinition("count", VectorExpressionDescriptor.ArgumentType.FLOAT_FAMILY, GroupByDesc.Mode.HASH, VectorUDAFCount.class));
add(new AggregateDefinition("count", VectorExpressionDescriptor.ArgumentType.STRING_FAMILY, GroupByDesc.Mode.HASH, VectorUDAFCount.class));
add(new AggregateDefinition("count", VectorExpressionDescriptor.ArgumentType.DECIMAL, GroupByDesc.Mode.HASH, VectorUDAFCount.class));
Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFCount.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFCount.java?rev=1626058&r1=1626057&r2=1626058&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFCount.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFCount.java Thu Sep 18 19:39:27 2014
@@ -46,15 +46,7 @@ public class VectorUDAFCount extends Vec
private static final long serialVersionUID = 1L;
- transient private long value;
- transient private boolean isNull;
-
- public void initIfNull() {
- if (isNull) {
- isNull = false;
- value = 0;
- }
- }
+ transient private long count;
@Override
public int getVariableSize() {
@@ -63,8 +55,7 @@ public class VectorUDAFCount extends Vec
@Override
public void reset() {
- isNull = true;
- value = 0L;
+ count = 0L;
}
}
@@ -131,8 +122,7 @@ public class VectorUDAFCount extends Vec
aggregationBufferSets,
aggregateIndex,
i);
- myagg.initIfNull();
- myagg.value++;
+ myagg.count++;
}
}
@@ -148,8 +138,7 @@ public class VectorUDAFCount extends Vec
aggregationBufferSets,
aggregateIndex,
i);
- myagg.initIfNull();
- myagg.value++;
+ myagg.count++;
}
}
}
@@ -168,8 +157,7 @@ public class VectorUDAFCount extends Vec
aggregationBufferSets,
aggregateIndex,
j);
- myagg.initIfNull();
- myagg.value++;
+ myagg.count++;
}
}
}
@@ -191,17 +179,15 @@ public class VectorUDAFCount extends Vec
Aggregation myagg = (Aggregation)agg;
- myagg.initIfNull();
-
if (inputVector.isRepeating) {
if (inputVector.noNulls || !inputVector.isNull[0]) {
- myagg.value += batchSize;
+ myagg.count += batchSize;
}
return;
}
if (inputVector.noNulls) {
- myagg.value += batchSize;
+ myagg.count += batchSize;
return;
}
else if (!batch.selectedInUse) {
@@ -221,7 +207,7 @@ public class VectorUDAFCount extends Vec
for (int j=0; j< batchSize; ++j) {
int i = selected[j];
if (!isNull[i]) {
- myagg.value += 1;
+ myagg.count += 1;
}
}
}
@@ -233,7 +219,7 @@ public class VectorUDAFCount extends Vec
for (int i=0; i< batchSize; ++i) {
if (!isNull[i]) {
- myagg.value += 1;
+ myagg.count += 1;
}
}
}
@@ -251,14 +237,9 @@ public class VectorUDAFCount extends Vec
@Override
public Object evaluateOutput(AggregationBuffer agg) throws HiveException {
- Aggregation myagg = (Aggregation) agg;
- if (myagg.isNull) {
- return null;
- }
- else {
- result.set (myagg.value);
+ Aggregation myagg = (Aggregation) agg;
+ result.set (myagg.count);
return result;
- }
}
@Override
Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFCountStar.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFCountStar.java?rev=1626058&r1=1626057&r2=1626058&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFCountStar.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFCountStar.java Thu Sep 18 19:39:27 2014
@@ -44,8 +44,7 @@ public class VectorUDAFCountStar extends
private static final long serialVersionUID = 1L;
- transient private long value;
- transient private boolean isNull;
+ transient private long count;
@Override
public int getVariableSize() {
@@ -54,8 +53,7 @@ public class VectorUDAFCountStar extends
@Override
public void reset() {
- isNull = true;
- value = 0L;
+ count = 0L;
}
}
@@ -95,8 +93,7 @@ public class VectorUDAFCountStar extends
for (int i=0; i < batchSize; ++i) {
Aggregation myAgg = getCurrentAggregationBuffer(
aggregationBufferSets, aggregateIndex, i);
- myAgg.isNull = false;
- ++myAgg.value;
+ ++myAgg.count;
}
}
@@ -111,8 +108,7 @@ public class VectorUDAFCountStar extends
}
Aggregation myagg = (Aggregation)agg;
- myagg.isNull = false;
- myagg.value += batchSize;
+ myagg.count += batchSize;
}
@Override
@@ -128,14 +124,9 @@ public class VectorUDAFCountStar extends
@Override
public Object evaluateOutput(AggregationBuffer agg) throws HiveException {
- Aggregation myagg = (Aggregation) agg;
- if (myagg.isNull) {
- return null;
- }
- else {
- result.set (myagg.value);
+ Aggregation myagg = (Aggregation) agg;
+ result.set (myagg.count);
return result;
- }
}
@Override
Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/orc/CompressionCodec.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/orc/CompressionCodec.java?rev=1626058&r1=1626057&r2=1626058&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/orc/CompressionCodec.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/orc/CompressionCodec.java Thu Sep 18 19:39:27 2014
@@ -21,6 +21,8 @@ import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.EnumSet;
+import javax.annotation.Nullable;
+
interface CompressionCodec {
public enum Modifier {
@@ -62,6 +64,6 @@ interface CompressionCodec {
* @param modifiers compression modifiers
* @return codec for use after optional modification
*/
- CompressionCodec modify(EnumSet<Modifier> modifiers);
+ CompressionCodec modify(@Nullable EnumSet<Modifier> modifiers);
}
Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java?rev=1626058&r1=1626057&r2=1626058&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java Thu Sep 18 19:39:27 2014
@@ -485,6 +485,7 @@ class WriterImpl implements Writer, Memo
modifiers = EnumSet.of(Modifier.FASTEST, Modifier.BINARY);
break;
default:
+ LOG.warn("Missing ORC compression modifiers for " + kind);
modifiers = null;
break;
}
Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ZlibCodec.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ZlibCodec.java?rev=1626058&r1=1626057&r2=1626058&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ZlibCodec.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ZlibCodec.java Thu Sep 18 19:39:27 2014
@@ -24,6 +24,8 @@ import java.util.zip.DataFormatException
import java.util.zip.Deflater;
import java.util.zip.Inflater;
+import javax.annotation.Nullable;
+
import org.apache.hadoop.hive.shims.HadoopShims;
import org.apache.hadoop.hive.shims.HadoopShims.DirectCompressionType;
import org.apache.hadoop.hive.shims.HadoopShims.DirectDecompressorShim;
@@ -130,7 +132,12 @@ class ZlibCodec implements CompressionCo
}
@Override
- public CompressionCodec modify(EnumSet<Modifier> modifiers) {
+ public CompressionCodec modify(@Nullable EnumSet<Modifier> modifiers) {
+
+ if (modifiers == null) {
+ return this;
+ }
+
int l = this.level;
int s = this.strategy;
Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java?rev=1626058&r1=1626057&r2=1626058&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java Thu Sep 18 19:39:27 2014
@@ -115,6 +115,10 @@ public abstract class BaseSemanticAnalyz
protected LineageInfo linfo;
protected TableAccessInfo tableAccessInfo;
protected ColumnAccessInfo columnAccessInfo;
+ /**
+ * Columns accessed by updates
+ */
+ protected ColumnAccessInfo updateColumnAccessInfo;
public boolean skipAuthorization() {
@@ -866,6 +870,14 @@ public abstract class BaseSemanticAnalyz
this.columnAccessInfo = columnAccessInfo;
}
+ public ColumnAccessInfo getUpdateColumnAccessInfo() {
+ return updateColumnAccessInfo;
+ }
+
+ public void setUpdateColumnAccessInfo(ColumnAccessInfo updateColumnAccessInfo) {
+ this.updateColumnAccessInfo = updateColumnAccessInfo;
+ }
+
protected LinkedHashMap<String, String> extractPartitionSpecs(Tree partspec)
throws SemanticException {
LinkedHashMap<String, String> partSpec = new LinkedHashMap<String, String>();
Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnAccessInfo.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnAccessInfo.java?rev=1626058&r1=1626057&r2=1626058&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnAccessInfo.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnAccessInfo.java Thu Sep 18 19:39:27 2014
@@ -18,6 +18,8 @@
package org.apache.hadoop.hive.ql.parse;
+import org.apache.hadoop.hive.ql.metadata.VirtualColumn;
+
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
@@ -54,4 +56,21 @@ public class ColumnAccessInfo {
}
return mapping;
}
+
+ /**
+ * Strip a virtual column out of the set of columns. This is useful in cases where we do not
+ * want to be checking against the user reading virtual columns, namely update and delete.
+ * @param vc
+ */
+ public void stripVirtualColumn(VirtualColumn vc) {
+ for (Map.Entry<String, Set<String>> e : tableToColumnAccessMap.entrySet()) {
+ for (String columnName : e.getValue()) {
+ if (vc.getName().equalsIgnoreCase(columnName)) {
+ e.getValue().remove(columnName);
+ break;
+ }
+ }
+ }
+
+ }
}
Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java?rev=1626058&r1=1626057&r2=1626058&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java Thu Sep 18 19:39:27 2014
@@ -28,11 +28,13 @@ import org.apache.hadoop.hive.ql.io.Acid
import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.metadata.VirtualColumn;
import org.apache.hadoop.hive.ql.session.SessionState;
import java.io.IOException;
import java.util.HashMap;
+import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
@@ -148,6 +150,8 @@ public class UpdateDeleteSemanticAnalyze
rewrittenQueryStr.append(" select ROW__ID");
Map<Integer, ASTNode> setColExprs = null;
+ Map<String, ASTNode> setCols = null;
+ Set<String> setRCols = new HashSet<String>();
if (updating()) {
// An update needs to select all of the columns, as we rewrite the entire row. Also,
// we need to figure out which columns we are going to replace. We won't write the set
@@ -160,7 +164,7 @@ public class UpdateDeleteSemanticAnalyze
// Get the children of the set clause, each of which should be a column assignment
List<? extends Node> assignments = setClause.getChildren();
- Map<String, ASTNode> setCols = new HashMap<String, ASTNode>(assignments.size());
+ setCols = new HashMap<String, ASTNode>(assignments.size());
setColExprs = new HashMap<Integer, ASTNode>(assignments.size());
for (Node a : assignments) {
ASTNode assignment = (ASTNode)a;
@@ -173,6 +177,8 @@ public class UpdateDeleteSemanticAnalyze
assert colName.getToken().getType() == HiveParser.Identifier :
"Expected column name";
+ addSetRCols((ASTNode) assignment.getChildren().get(1), setRCols);
+
String columnName = colName.getText();
// Make sure this isn't one of the partitioning columns, that's not supported.
@@ -323,6 +329,28 @@ public class UpdateDeleteSemanticAnalyze
WriteEntity.WriteType.UPDATE);
}
}
+
+ // For updates, we need to set the column access info so that it contains information on
+ // the columns we are updating.
+ if (updating()) {
+ ColumnAccessInfo cai = new ColumnAccessInfo();
+ for (String colName : setCols.keySet()) {
+ cai.add(Table.getCompleteName(mTable.getDbName(), mTable.getTableName()), colName);
+ }
+ setUpdateColumnAccessInfo(cai);
+
+ // Add the setRCols to the input list
+ for (String colName : setRCols) {
+ columnAccessInfo.add(Table.getCompleteName(mTable.getDbName(), mTable.getTableName()),
+ colName);
+ }
+ }
+
+ // We need to weed ROW__ID out of the input column info, as it doesn't make any sense to
+ // require the user to have authorization on that column.
+ if (columnAccessInfo != null) {
+ columnAccessInfo.stripVirtualColumn(VirtualColumn.ROWID);
+ }
}
private String operation() {
@@ -342,4 +370,22 @@ public class UpdateDeleteSemanticAnalyze
}
return false;
}
+
+ // This method find any columns on the right side of a set statement (thus rcols) and puts them
+ // in a set so we can add them to the list of input cols to check.
+ private void addSetRCols(ASTNode node, Set<String> setRCols) {
+
+ // See if this node is a TOK_TABLE_OR_COL. If so, find the value and put it in the list. If
+ // not, recurse on any children
+ if (node.getToken().getType() == HiveParser.TOK_TABLE_OR_COL) {
+ ASTNode colName = (ASTNode)node.getChildren().get(0);
+ assert colName.getToken().getType() == HiveParser.Identifier :
+ "Expected column name";
+ setRCols.add(colName.getText());
+ } else if (node.getChildren() != null) {
+ for (Node n : node.getChildren()) {
+ addSetRCols(node, setRCols);
+ }
+ }
+ }
}
Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java?rev=1626058&r1=1626057&r2=1626058&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java Thu Sep 18 19:39:27 2014
@@ -310,9 +310,12 @@ public class AuthorizationUtils {
return HivePrivObjectActionType.INSERT;
case INSERT_OVERWRITE:
return HivePrivObjectActionType.INSERT_OVERWRITE;
+ case UPDATE:
+ return HivePrivObjectActionType.UPDATE;
+ case DELETE:
+ return HivePrivObjectActionType.DELETE;
default:
- // Ignore other types for purposes of authorization, we are interested only
- // in INSERT vs INSERT_OVERWRITE as of now
+ // Ignore other types for purposes of authorization
break;
}
}
Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java?rev=1626058&r1=1626057&r2=1626058&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java Thu Sep 18 19:39:27 2014
@@ -81,7 +81,7 @@ public class HivePrivilegeObject impleme
GLOBAL, DATABASE, TABLE_OR_VIEW, PARTITION, COLUMN, LOCAL_URI, DFS_URI, COMMAND_PARAMS, FUNCTION
} ;
public enum HivePrivObjectActionType {
- OTHER, INSERT, INSERT_OVERWRITE
+ OTHER, INSERT, INSERT_OVERWRITE, UPDATE, DELETE
};
private final HivePrivilegeObjectType type;
Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java?rev=1626058&r1=1626057&r2=1626058&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java Thu Sep 18 19:39:27 2014
@@ -118,6 +118,7 @@ public class Operation2Privilege {
private static SQLPrivTypeGrant[] ADMIN_PRIV_AR = arr(SQLPrivTypeGrant.ADMIN_PRIV);
private static SQLPrivTypeGrant[] INS_NOGRANT_AR = arr(SQLPrivTypeGrant.INSERT_NOGRANT);
private static SQLPrivTypeGrant[] DEL_NOGRANT_AR = arr(SQLPrivTypeGrant.DELETE_NOGRANT);
+ private static SQLPrivTypeGrant[] UPD_NOGRANT_AR = arr(SQLPrivTypeGrant.UPDATE_NOGRANT);
private static SQLPrivTypeGrant[] OWNER_INS_SEL_DEL_NOGRANT_AR =
arr(SQLPrivTypeGrant.OWNER_PRIV,
SQLPrivTypeGrant.INSERT_NOGRANT,
@@ -287,8 +288,14 @@ public class Operation2Privilege {
op2Priv.put(HiveOperationType.QUERY,
arr(
new PrivRequirement(SEL_NOGRANT_AR, IOType.INPUT),
- new PrivRequirement(INS_NOGRANT_AR, IOType.OUTPUT, null),
- new PrivRequirement(DEL_NOGRANT_AR, IOType.OUTPUT, HivePrivObjectActionType.INSERT_OVERWRITE)
+ new PrivRequirement(INS_NOGRANT_AR, IOType.OUTPUT, HivePrivObjectActionType.INSERT),
+ new PrivRequirement(
+ arr(SQLPrivTypeGrant.INSERT_NOGRANT, SQLPrivTypeGrant.DELETE_NOGRANT),
+ IOType.OUTPUT,
+ HivePrivObjectActionType.INSERT_OVERWRITE),
+ new PrivRequirement(DEL_NOGRANT_AR, IOType.OUTPUT, HivePrivObjectActionType.DELETE),
+ new PrivRequirement(UPD_NOGRANT_AR, IOType.OUTPUT, HivePrivObjectActionType.UPDATE),
+ new PrivRequirement(INS_NOGRANT_AR, IOType.OUTPUT, HivePrivObjectActionType.OTHER)
)
);
Modified: hive/branches/cbo/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java?rev=1626058&r1=1626057&r2=1626058&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java (original)
+++ hive/branches/cbo/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java Thu Sep 18 19:39:27 2014
@@ -50,6 +50,7 @@ import org.apache.hadoop.hive.ql.plan.Ag
import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.GroupByDesc;
+import org.apache.hadoop.hive.ql.plan.VectorGroupByDesc;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -601,6 +602,30 @@ public class TestVectorGroupByOperator {
}
@Test
+ public void testCountReduce() throws HiveException {
+ testAggregateCountReduce(
+ 2,
+ Arrays.asList(new Long[]{}),
+ 0L);
+ testAggregateCountReduce(
+ 2,
+ Arrays.asList(new Long[]{0L}),
+ 0L);
+ testAggregateCountReduce(
+ 2,
+ Arrays.asList(new Long[]{0L,0L}),
+ 0L);
+ testAggregateCountReduce(
+ 2,
+ Arrays.asList(new Long[]{0L,1L,0L}),
+ 1L);
+ testAggregateCountReduce(
+ 2,
+ Arrays.asList(new Long[]{13L,0L,7L,19L}),
+ 39L);
+ }
+
+ @Test
public void testCountDecimal() throws HiveException {
testAggregateDecimal(
"Decimal",
@@ -1210,7 +1235,7 @@ public class TestVectorGroupByOperator {
"count",
2,
Arrays.asList(new Long[]{}),
- null);
+ 0L);
}
@Test
@@ -2027,6 +2052,17 @@ public class TestVectorGroupByOperator {
testAggregateCountStarIterable (fdr, expected);
}
+ public void testAggregateCountReduce (
+ int batchSize,
+ Iterable<Long> values,
+ Object expected) throws HiveException {
+
+ @SuppressWarnings("unchecked")
+ FakeVectorRowBatchFromLongIterables fdr = new FakeVectorRowBatchFromLongIterables(batchSize,
+ values);
+ testAggregateCountReduceIterable (fdr, expected);
+ }
+
public static interface Validator {
void validate (String key, Object expected, Object result);
@@ -2223,6 +2259,37 @@ public class TestVectorGroupByOperator {
validator.validate("_total", expected, result);
}
+ public void testAggregateCountReduceIterable (
+ Iterable<VectorizedRowBatch> data,
+ Object expected) throws HiveException {
+ Map<String, Integer> mapColumnNames = new HashMap<String, Integer>();
+ mapColumnNames.put("A", 0);
+ VectorizationContext ctx = new VectorizationContext(mapColumnNames, 1);
+
+ GroupByDesc desc = buildGroupByDescType(ctx, "count", "A", TypeInfoFactory.longTypeInfo);
+ VectorGroupByDesc vectorDesc = desc.getVectorDesc();
+ vectorDesc.setIsReduce(true);
+
+ VectorGroupByOperator vgo = new VectorGroupByOperator(ctx, desc);
+
+ FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(vgo);
+ vgo.initialize(null, null);
+
+ for (VectorizedRowBatch unit: data) {
+ vgo.processOp(unit, 0);
+ }
+ vgo.close(false);
+
+ List<Object> outBatchList = out.getCapturedRows();
+ assertNotNull(outBatchList);
+ assertEquals(1, outBatchList.size());
+
+ Object result = outBatchList.get(0);
+
+ Validator validator = getValidator("count");
+ validator.validate("_total", expected, result);
+ }
+
public void testAggregateStringIterable (
String aggregateName,
Iterable<VectorizedRowBatch> data,
Modified: hive/branches/cbo/ql/src/test/queries/clientpositive/vectorization_short_regress.q
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/test/queries/clientpositive/vectorization_short_regress.q?rev=1626058&r1=1626057&r2=1626058&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/test/queries/clientpositive/vectorization_short_regress.q (original)
+++ hive/branches/cbo/ql/src/test/queries/clientpositive/vectorization_short_regress.q Thu Sep 18 19:39:27 2014
@@ -850,3 +850,52 @@ WHERE (((cboolean1 IS NOT NULL))
GROUP BY cboolean1
ORDER BY cboolean1;
+-- These tests verify COUNT on empty or null colulmns work correctly.
+create table test_count(i int) stored as orc;
+
+explain
+select count(*) from test_count;
+
+select count(*) from test_count;
+
+explain
+select count(i) from test_count;
+
+select count(i) from test_count;
+
+create table alltypesnull like alltypesorc;
+alter table alltypesnull set fileformat textfile;
+
+insert into table alltypesnull select null, null, null, null, null, null, null, null, null, null, null, null from alltypesorc;
+
+create table alltypesnullorc stored as orc as select * from alltypesnull;
+
+explain
+select count(*) from alltypesnullorc;
+
+select count(*) from alltypesnullorc;
+
+explain
+select count(ctinyint) from alltypesnullorc;
+
+select count(ctinyint) from alltypesnullorc;
+
+explain
+select count(cint) from alltypesnullorc;
+
+select count(cint) from alltypesnullorc;
+
+explain
+select count(cfloat) from alltypesnullorc;
+
+select count(cfloat) from alltypesnullorc;
+
+explain
+select count(cstring1) from alltypesnullorc;
+
+select count(cstring1) from alltypesnullorc;
+
+explain
+select count(cboolean1) from alltypesnullorc;
+
+select count(cboolean1) from alltypesnullorc;