You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by jd...@apache.org on 2016/05/03 22:31:43 UTC

[30/45] hive git commit: HIVE-13178: Enhance ORC Schema Evolution to handle more standard data type conversions (Matt McCline, reviewed by Prasanth Jayachandran)

http://git-wip-us.apache.org/repos/asf/hive/blob/a16058e1/ql/src/test/queries/clientpositive/schema_evol_text_vec_mapwork_table.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/schema_evol_text_vec_mapwork_table.q b/ql/src/test/queries/clientpositive/schema_evol_text_vec_mapwork_table.q
index 1028777..b20f7e8 100644
--- a/ql/src/test/queries/clientpositive/schema_evol_text_vec_mapwork_table.q
+++ b/ql/src/test/queries/clientpositive/schema_evol_text_vec_mapwork_table.q
@@ -1,11 +1,11 @@
 set hive.explain.user=true;
 set hive.cli.print.header=true;
 SET hive.exec.schema.evolution=true;
-SET hive.vectorized.execution.enabled=true;
 SET hive.vectorized.use.vectorized.input.format=false;
 SET hive.vectorized.use.vector.serde.deserialize=true;
 SET hive.vectorized.use.row.serde.deserialize=false;
 set hive.fetch.task.conversion=none;
+SET hive.vectorized.execution.enabled=true;
 set hive.exec.dynamic.partition.mode=nonstrict;
 set hive.metastore.disallow.incompatible.col.type.changes=true;
 set hive.default.fileformat=textfile;

http://git-wip-us.apache.org/repos/asf/hive/blob/a16058e1/ql/src/test/queries/clientpositive/schema_evol_text_vecrow_mapwork_part.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/schema_evol_text_vecrow_mapwork_part.q b/ql/src/test/queries/clientpositive/schema_evol_text_vecrow_mapwork_part.q
index 086ebd2..c54ed91 100644
--- a/ql/src/test/queries/clientpositive/schema_evol_text_vecrow_mapwork_part.q
+++ b/ql/src/test/queries/clientpositive/schema_evol_text_vecrow_mapwork_part.q
@@ -2,11 +2,11 @@ set hive.explain.user=true;
 set hive.mapred.mode=nonstrict;
 set hive.cli.print.header=true;
 SET hive.exec.schema.evolution=true;
-SET hive.vectorized.execution.enabled=true;
 SET hive.vectorized.use.vectorized.input.format=false;
 SET hive.vectorized.use.vector.serde.deserialize=false;
 SET hive.vectorized.use.row.serde.deserialize=true;
 set hive.fetch.task.conversion=none;
+SET hive.vectorized.execution.enabled=true;
 set hive.exec.dynamic.partition.mode=nonstrict;
 set hive.metastore.disallow.incompatible.col.type.changes=true;
 set hive.default.fileformat=textfile;

http://git-wip-us.apache.org/repos/asf/hive/blob/a16058e1/ql/src/test/queries/clientpositive/schema_evol_text_vecrow_mapwork_part_all_complex.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/schema_evol_text_vecrow_mapwork_part_all_complex.q b/ql/src/test/queries/clientpositive/schema_evol_text_vecrow_mapwork_part_all_complex.q
index 2553527..7737abf 100644
--- a/ql/src/test/queries/clientpositive/schema_evol_text_vecrow_mapwork_part_all_complex.q
+++ b/ql/src/test/queries/clientpositive/schema_evol_text_vecrow_mapwork_part_all_complex.q
@@ -2,11 +2,11 @@ set hive.explain.user=true;
 set hive.mapred.mode=nonstrict;
 set hive.cli.print.header=true;
 SET hive.exec.schema.evolution=true;
-SET hive.vectorized.execution.enabled=true;
 SET hive.vectorized.use.vectorized.input.format=false;
 SET hive.vectorized.use.vector.serde.deserialize=false;
 SET hive.vectorized.use.row.serde.deserialize=true;
 set hive.fetch.task.conversion=none;
+SET hive.vectorized.execution.enabled=true;
 set hive.exec.dynamic.partition.mode=nonstrict;
 set hive.metastore.disallow.incompatible.col.type.changes=false;
 set hive.default.fileformat=textfile;

http://git-wip-us.apache.org/repos/asf/hive/blob/a16058e1/ql/src/test/queries/clientpositive/schema_evol_text_vecrow_mapwork_part_all_primitive.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/schema_evol_text_vecrow_mapwork_part_all_primitive.q b/ql/src/test/queries/clientpositive/schema_evol_text_vecrow_mapwork_part_all_primitive.q
index f2cada2..09e544a 100644
--- a/ql/src/test/queries/clientpositive/schema_evol_text_vecrow_mapwork_part_all_primitive.q
+++ b/ql/src/test/queries/clientpositive/schema_evol_text_vecrow_mapwork_part_all_primitive.q
@@ -2,11 +2,11 @@ set hive.explain.user=true;
 set hive.mapred.mode=nonstrict;
 set hive.cli.print.header=true;
 SET hive.exec.schema.evolution=true;
-SET hive.vectorized.execution.enabled=true;
 SET hive.vectorized.use.vectorized.input.format=false;
 SET hive.vectorized.use.vector.serde.deserialize=false;
 SET hive.vectorized.use.row.serde.deserialize=true;
 set hive.fetch.task.conversion=none;
+SET hive.vectorized.execution.enabled=true;
 set hive.exec.dynamic.partition.mode=nonstrict;
 set hive.metastore.disallow.incompatible.col.type.changes=false;
 set hive.default.fileformat=textfile;

http://git-wip-us.apache.org/repos/asf/hive/blob/a16058e1/ql/src/test/queries/clientpositive/schema_evol_text_vecrow_mapwork_table.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/schema_evol_text_vecrow_mapwork_table.q b/ql/src/test/queries/clientpositive/schema_evol_text_vecrow_mapwork_table.q
index de0b7ba..8871623 100644
--- a/ql/src/test/queries/clientpositive/schema_evol_text_vecrow_mapwork_table.q
+++ b/ql/src/test/queries/clientpositive/schema_evol_text_vecrow_mapwork_table.q
@@ -1,11 +1,11 @@
 set hive.explain.user=true;
 set hive.cli.print.header=true;
 SET hive.exec.schema.evolution=true;
-SET hive.vectorized.execution.enabled=true;
 SET hive.vectorized.use.vectorized.input.format=false;
 SET hive.vectorized.use.vector.serde.deserialize=false;
 SET hive.vectorized.use.row.serde.deserialize=true;
 set hive.fetch.task.conversion=none;
+SET hive.vectorized.execution.enabled=true;
 set hive.exec.dynamic.partition.mode=nonstrict;
 set hive.metastore.disallow.incompatible.col.type.changes=true;
 set hive.default.fileformat=textfile;

http://git-wip-us.apache.org/repos/asf/hive/blob/a16058e1/ql/src/test/results/clientnegative/orc_replace_columns2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/orc_replace_columns2.q.out b/ql/src/test/results/clientnegative/orc_replace_columns2.q.out
index d581c79..83b55f9 100644
--- a/ql/src/test/results/clientnegative/orc_replace_columns2.q.out
+++ b/ql/src/test/results/clientnegative/orc_replace_columns2.q.out
@@ -1,13 +1,18 @@
-PREHOOK: query: create table src_orc (key tinyint, val string) stored as orc
+PREHOOK: query: -- Currently, string to int conversion is not supported because it isn't in the lossless
+-- TypeIntoUtils.implicitConvertible conversions.
+create table src_orc (key tinyint, val string) stored as orc
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
 PREHOOK: Output: default@src_orc
-POSTHOOK: query: create table src_orc (key tinyint, val string) stored as orc
+POSTHOOK: query: -- Currently, string to int conversion is not supported because it isn't in the lossless
+-- TypeIntoUtils.implicitConvertible conversions.
+create table src_orc (key tinyint, val string) stored as orc
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@src_orc
-PREHOOK: query: alter table src_orc replace columns (k smallint, val string)
+PREHOOK: query: alter table src_orc replace columns (k smallint, val int)
 PREHOOK: type: ALTERTABLE_REPLACECOLS
 PREHOOK: Input: default@src_orc
 PREHOOK: Output: default@src_orc
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Replacing columns with unsupported type conversion (from tinyint to smallint) for column k. SerDe may be incompatible
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions :
+val

http://git-wip-us.apache.org/repos/asf/hive/blob/a16058e1/ql/src/test/results/clientnegative/orc_replace_columns2_acid.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/orc_replace_columns2_acid.q.out b/ql/src/test/results/clientnegative/orc_replace_columns2_acid.q.out
index 41df688..2ef833d 100644
--- a/ql/src/test/results/clientnegative/orc_replace_columns2_acid.q.out
+++ b/ql/src/test/results/clientnegative/orc_replace_columns2_acid.q.out
@@ -1,13 +1,18 @@
-PREHOOK: query: create table src_orc (key tinyint, val string) clustered by (val) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')
+PREHOOK: query: -- Currently, string to int conversion is not supported because it isn't in the lossless
+-- TypeIntoUtils.implicitConvertible conversions.
+create table src_orc (key tinyint, val string) clustered by (val) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
 PREHOOK: Output: default@src_orc
-POSTHOOK: query: create table src_orc (key tinyint, val string) clustered by (val) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')
+POSTHOOK: query: -- Currently, string to int conversion is not supported because it isn't in the lossless
+-- TypeIntoUtils.implicitConvertible conversions.
+create table src_orc (key tinyint, val string) clustered by (val) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@src_orc
-PREHOOK: query: alter table src_orc replace columns (k smallint, val string)
+PREHOOK: query: alter table src_orc replace columns (k smallint, val int)
 PREHOOK: type: ALTERTABLE_REPLACECOLS
 PREHOOK: Input: default@src_orc
 PREHOOK: Output: default@src_orc
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Replacing columns with unsupported type conversion (from tinyint to smallint) for column k. SerDe may be incompatible
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions :
+val

http://git-wip-us.apache.org/repos/asf/hive/blob/a16058e1/ql/src/test/results/clientnegative/orc_replace_columns3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/orc_replace_columns3.q.out b/ql/src/test/results/clientnegative/orc_replace_columns3.q.out
index 2deb3cb..54408a0 100644
--- a/ql/src/test/results/clientnegative/orc_replace_columns3.q.out
+++ b/ql/src/test/results/clientnegative/orc_replace_columns3.q.out
@@ -1,8 +1,12 @@
-PREHOOK: query: create table src_orc (key smallint, val string) stored as orc
+PREHOOK: query: -- Currently, smallint to tinyint conversion is not supported because it isn't in the lossless
+-- TypeIntoUtils.implicitConvertible conversions.
+create table src_orc (key smallint, val string) stored as orc
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
 PREHOOK: Output: default@src_orc
-POSTHOOK: query: create table src_orc (key smallint, val string) stored as orc
+POSTHOOK: query: -- Currently, smallint to tinyint conversion is not supported because it isn't in the lossless
+-- TypeIntoUtils.implicitConvertible conversions.
+create table src_orc (key smallint, val string) stored as orc
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@src_orc
@@ -18,4 +22,5 @@ PREHOOK: query: alter table src_orc replace columns (k int, val string, z tinyin
 PREHOOK: type: ALTERTABLE_REPLACECOLS
 PREHOOK: Input: default@src_orc
 PREHOOK: Output: default@src_orc
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Replacing columns with unsupported type conversion (from smallint to tinyint) for column z. SerDe may be incompatible
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions :
+z

http://git-wip-us.apache.org/repos/asf/hive/blob/a16058e1/ql/src/test/results/clientnegative/orc_replace_columns3_acid.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/orc_replace_columns3_acid.q.out b/ql/src/test/results/clientnegative/orc_replace_columns3_acid.q.out
index 3fb1e32..3aefca0 100644
--- a/ql/src/test/results/clientnegative/orc_replace_columns3_acid.q.out
+++ b/ql/src/test/results/clientnegative/orc_replace_columns3_acid.q.out
@@ -1,8 +1,12 @@
-PREHOOK: query: create table src_orc (key smallint, val string) clustered by (val) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')
+PREHOOK: query: -- Currently, smallint to tinyint conversion is not supported because it isn't in the lossless
+-- TypeIntoUtils.implicitConvertible conversions.
+create table src_orc (key smallint, val string) clustered by (val) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
 PREHOOK: Output: default@src_orc
-POSTHOOK: query: create table src_orc (key smallint, val string) clustered by (val) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')
+POSTHOOK: query: -- Currently, smallint to tinyint conversion is not supported because it isn't in the lossless
+-- TypeIntoUtils.implicitConvertible conversions.
+create table src_orc (key smallint, val string) clustered by (val) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@src_orc
@@ -18,4 +22,5 @@ PREHOOK: query: alter table src_orc replace columns (k int, val string, z tinyin
 PREHOOK: type: ALTERTABLE_REPLACECOLS
 PREHOOK: Input: default@src_orc
 PREHOOK: Output: default@src_orc
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Replacing columns with unsupported type conversion (from smallint to tinyint) for column z. SerDe may be incompatible
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions :
+z

http://git-wip-us.apache.org/repos/asf/hive/blob/a16058e1/ql/src/test/results/clientnegative/orc_type_promotion1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/orc_type_promotion1.q.out b/ql/src/test/results/clientnegative/orc_type_promotion1.q.out
index aa3b64a..080cfd0 100644
--- a/ql/src/test/results/clientnegative/orc_type_promotion1.q.out
+++ b/ql/src/test/results/clientnegative/orc_type_promotion1.q.out
@@ -1,13 +1,18 @@
-PREHOOK: query: create table src_orc (key tinyint, val string) stored as orc
+PREHOOK: query: -- Currently, string to int conversion is not supported because it isn't in the lossless
+-- TypeIntoUtils.implicitConvertible conversions.
+create table src_orc (key string, val string) stored as orc
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
 PREHOOK: Output: default@src_orc
-POSTHOOK: query: create table src_orc (key tinyint, val string) stored as orc
+POSTHOOK: query: -- Currently, string to int conversion is not supported because it isn't in the lossless
+-- TypeIntoUtils.implicitConvertible conversions.
+create table src_orc (key string, val string) stored as orc
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@src_orc
-PREHOOK: query: alter table src_orc change key key float
+PREHOOK: query: alter table src_orc change key key int
 PREHOOK: type: ALTERTABLE_RENAMECOL
 PREHOOK: Input: default@src_orc
 PREHOOK: Output: default@src_orc
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Changing from type tinyint to float is not supported for column key. SerDe may be incompatible
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions :
+key

http://git-wip-us.apache.org/repos/asf/hive/blob/a16058e1/ql/src/test/results/clientnegative/orc_type_promotion1_acid.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/orc_type_promotion1_acid.q.out b/ql/src/test/results/clientnegative/orc_type_promotion1_acid.q.out
index 030ca35..f3b1ae8 100644
--- a/ql/src/test/results/clientnegative/orc_type_promotion1_acid.q.out
+++ b/ql/src/test/results/clientnegative/orc_type_promotion1_acid.q.out
@@ -1,13 +1,18 @@
-PREHOOK: query: create table src_orc (key tinyint, val string) clustered by (val) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')
+PREHOOK: query: -- Currently, string to int conversion is not supported because it isn't in the lossless
+-- TypeIntoUtils.implicitConvertible conversions.
+create table src_orc (key string, val string) clustered by (val) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
 PREHOOK: Output: default@src_orc
-POSTHOOK: query: create table src_orc (key tinyint, val string) clustered by (val) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')
+POSTHOOK: query: -- Currently, string to int conversion is not supported because it isn't in the lossless
+-- TypeIntoUtils.implicitConvertible conversions.
+create table src_orc (key string, val string) clustered by (val) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@src_orc
-PREHOOK: query: alter table src_orc change key key float
+PREHOOK: query: alter table src_orc change key key int
 PREHOOK: type: ALTERTABLE_RENAMECOL
 PREHOOK: Input: default@src_orc
 PREHOOK: Output: default@src_orc
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Changing from type tinyint to float is not supported for column key. SerDe may be incompatible
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions :
+key

http://git-wip-us.apache.org/repos/asf/hive/blob/a16058e1/ql/src/test/results/clientnegative/orc_type_promotion2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/orc_type_promotion2.q.out b/ql/src/test/results/clientnegative/orc_type_promotion2.q.out
index 44ac44f..4205901 100644
--- a/ql/src/test/results/clientnegative/orc_type_promotion2.q.out
+++ b/ql/src/test/results/clientnegative/orc_type_promotion2.q.out
@@ -1,8 +1,12 @@
-PREHOOK: query: create table src_orc (key smallint, val string) stored as orc
+PREHOOK: query: -- Currently, bigint to int conversion is not supported because it isn't in the lossless
+-- TypeIntoUtils.implicitConvertible conversions.
+create table src_orc (key smallint, val string) stored as orc
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
 PREHOOK: Output: default@src_orc
-POSTHOOK: query: create table src_orc (key smallint, val string) stored as orc
+POSTHOOK: query: -- Currently, bigint to int conversion is not supported because it isn't in the lossless
+-- TypeIntoUtils.implicitConvertible conversions.
+create table src_orc (key smallint, val string) stored as orc
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@src_orc
@@ -62,8 +66,9 @@ POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@src_orc
 key                 	bigint              	                    
 val                 	string              	                    
-PREHOOK: query: alter table src_orc change val val char(100)
+PREHOOK: query: alter table src_orc change val val int
 PREHOOK: type: ALTERTABLE_RENAMECOL
 PREHOOK: Input: default@src_orc
 PREHOOK: Output: default@src_orc
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Changing from type string to char(100) is not supported for column val. SerDe may be incompatible
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions :
+val

http://git-wip-us.apache.org/repos/asf/hive/blob/a16058e1/ql/src/test/results/clientnegative/orc_type_promotion2_acid.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/orc_type_promotion2_acid.q.out b/ql/src/test/results/clientnegative/orc_type_promotion2_acid.q.out
index 77e2e8d..9129782 100644
--- a/ql/src/test/results/clientnegative/orc_type_promotion2_acid.q.out
+++ b/ql/src/test/results/clientnegative/orc_type_promotion2_acid.q.out
@@ -1,8 +1,12 @@
-PREHOOK: query: create table src_orc (key smallint, val string) clustered by (val) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')
+PREHOOK: query: -- Currently, bigint to int conversion is not supported because it isn't in the lossless
+-- TypeIntoUtils.implicitConvertible conversions.
+create table src_orc (key smallint, val string) clustered by (val) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
 PREHOOK: Output: default@src_orc
-POSTHOOK: query: create table src_orc (key smallint, val string) clustered by (val) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')
+POSTHOOK: query: -- Currently, bigint to int conversion is not supported because it isn't in the lossless
+-- TypeIntoUtils.implicitConvertible conversions.
+create table src_orc (key smallint, val string) clustered by (val) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@src_orc
@@ -62,8 +66,9 @@ POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@src_orc
 key                 	bigint              	                    
 val                 	string              	                    
-PREHOOK: query: alter table src_orc change val val char(100)
+PREHOOK: query: alter table src_orc change val val int
 PREHOOK: type: ALTERTABLE_RENAMECOL
 PREHOOK: Input: default@src_orc
 PREHOOK: Output: default@src_orc
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Changing from type string to char(100) is not supported for column val. SerDe may be incompatible
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions :
+val

http://git-wip-us.apache.org/repos/asf/hive/blob/a16058e1/ql/src/test/results/clientnegative/orc_type_promotion3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/orc_type_promotion3.q.out b/ql/src/test/results/clientnegative/orc_type_promotion3.q.out
index b4630d6..1872803 100644
--- a/ql/src/test/results/clientnegative/orc_type_promotion3.q.out
+++ b/ql/src/test/results/clientnegative/orc_type_promotion3.q.out
@@ -1,8 +1,12 @@
-PREHOOK: query: create table src_orc (key tinyint, val string) stored as orc
+PREHOOK: query: -- Currently, double to smallint conversion is not supported because it isn't in the lossless
+-- TypeIntoUtils.implicitConvertible conversions.
+create table src_orc (key double, val string) stored as orc
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
 PREHOOK: Output: default@src_orc
-POSTHOOK: query: create table src_orc (key tinyint, val string) stored as orc
+POSTHOOK: query: -- Currently, double to smallint conversion is not supported because it isn't in the lossless
+-- TypeIntoUtils.implicitConvertible conversions.
+create table src_orc (key double, val string) stored as orc
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@src_orc
@@ -10,4 +14,5 @@ PREHOOK: query: alter table src_orc change key key smallint
 PREHOOK: type: ALTERTABLE_RENAMECOL
 PREHOOK: Input: default@src_orc
 PREHOOK: Output: default@src_orc
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Changing from type tinyint to smallint is not supported for column key. SerDe may be incompatible
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions :
+key

http://git-wip-us.apache.org/repos/asf/hive/blob/a16058e1/ql/src/test/results/clientnegative/orc_type_promotion3_acid.q
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/orc_type_promotion3_acid.q b/ql/src/test/results/clientnegative/orc_type_promotion3_acid.q
new file mode 100644
index 0000000..bd33c6c
--- /dev/null
+++ b/ql/src/test/results/clientnegative/orc_type_promotion3_acid.q
@@ -0,0 +1,18 @@
+PREHOOK: query: -- Currently, double to smallint conversion is not supported because it isn't in the lossless
+-- TypeIntoUtils.implicitConvertible conversions.
+create table src_orc (key double, val string) clustered by (val) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@src_orc
+POSTHOOK: query: -- Currently, double to smallint conversion is not supported because it isn't in the lossless
+-- TypeIntoUtils.implicitConvertible conversions.
+create table src_orc (key double, val string) clustered by (val) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@src_orc
+PREHOOK: query: alter table src_orc change key key smallint
+PREHOOK: type: ALTERTABLE_RENAMECOL
+PREHOOK: Input: default@src_orc
+PREHOOK: Output: default@src_orc
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions :
+key

http://git-wip-us.apache.org/repos/asf/hive/blob/a16058e1/ql/src/test/results/clientnegative/orc_type_promotion3_acid.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/orc_type_promotion3_acid.q.out b/ql/src/test/results/clientnegative/orc_type_promotion3_acid.q.out
index 19400ce..bd33c6c 100644
--- a/ql/src/test/results/clientnegative/orc_type_promotion3_acid.q.out
+++ b/ql/src/test/results/clientnegative/orc_type_promotion3_acid.q.out
@@ -1,8 +1,12 @@
-PREHOOK: query: create table src_orc (key tinyint, val string) clustered by (val) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')
+PREHOOK: query: -- Currently, double to smallint conversion is not supported because it isn't in the lossless
+-- TypeIntoUtils.implicitConvertible conversions.
+create table src_orc (key double, val string) clustered by (val) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
 PREHOOK: Output: default@src_orc
-POSTHOOK: query: create table src_orc (key tinyint, val string) clustered by (val) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')
+POSTHOOK: query: -- Currently, double to smallint conversion is not supported because it isn't in the lossless
+-- TypeIntoUtils.implicitConvertible conversions.
+create table src_orc (key double, val string) clustered by (val) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@src_orc
@@ -10,4 +14,5 @@ PREHOOK: query: alter table src_orc change key key smallint
 PREHOOK: type: ALTERTABLE_RENAMECOL
 PREHOOK: Input: default@src_orc
 PREHOOK: Output: default@src_orc
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Changing from type tinyint to smallint is not supported for column key. SerDe may be incompatible
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions :
+key