You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by xu...@apache.org on 2013/11/17 05:39:20 UTC
svn commit: r1542643 - in /hive/trunk:
ql/src/java/org/apache/hadoop/hive/ql/exec/
ql/src/test/queries/clientpositive/ ql/src/test/results/clientpositive/
serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/
Author: xuefu
Date: Sun Nov 17 04:39:19 2013
New Revision: 1542643
URL: http://svn.apache.org/r1542643
Log:
HIVE-5564: Need to accomodate table decimal columns that were defined prior to HIVE-3976 (Reviewed by Brock)
Modified:
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
hive/trunk/ql/src/test/queries/clientpositive/decimal_1.q
hive/trunk/ql/src/test/results/clientpositive/decimal_1.q.out
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoUtils.java
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java?rev=1542643&r1=1542642&r2=1542643&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java Sun Nov 17 04:39:19 2013
@@ -54,6 +54,7 @@ import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FsShell;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.metastore.MetaStoreUtils;
@@ -161,6 +162,7 @@ import org.apache.hadoop.hive.serde2.Met
import org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe;
import org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe;
import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
+import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
import org.apache.hadoop.hive.shims.HadoopShims;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.io.IOUtils;
@@ -2840,6 +2842,8 @@ public class DDLTask extends Task<DDLWor
cols = Hive.getFieldsFromDeserializer(colPath, tbl.getDeserializer());
}
+ fixDecimalColumnTypeName(cols);
+
formatter.describeTable(outStream, colPath, tableName, tbl, part, cols,
descTbl.isFormatted(), descTbl.isExt(), descTbl.isPretty());
@@ -2856,6 +2860,22 @@ public class DDLTask extends Task<DDLWor
return 0;
}
+ /**
+ * Fix the type name of a column of type decimal w/o precision/scale specified. This makes
+ * the describe table show "decimal(10,0)" instead of "decimal" even if the type stored
+ * in metastore is "decimal", which is possible with previous hive.
+ *
+ * @param cols columns that to be fixed as such
+ */
+ private static void fixDecimalColumnTypeName(List<FieldSchema> cols) {
+ for (FieldSchema col : cols) {
+ if (serdeConstants.DECIMAL_TYPE_NAME.equals(col.getType())) {
+ col.setType(DecimalTypeInfo.getQualifiedName(HiveDecimal.DEFAULT_PRECISION,
+ HiveDecimal.DEFAULT_SCALE));
+ }
+ }
+ }
+
public static void writeGrantInfo(DataOutput outStream,
PrincipalType principalType, String principalName, String dbName,
String tableName, String partName, String columnName,
Modified: hive/trunk/ql/src/test/queries/clientpositive/decimal_1.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/decimal_1.q?rev=1542643&r1=1542642&r2=1542643&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/decimal_1.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/decimal_1.q Sun Nov 17 04:39:19 2013
@@ -1,12 +1,14 @@
set hive.fetch.task.conversion=more;
-drop table decimal_1;
+drop table if exists decimal_1;
-create table decimal_1 (t decimal(4,2));
+create table decimal_1 (t decimal(4,2), u decimal(5), v decimal);
alter table decimal_1 set serde 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe';
+desc decimal_1;
+
insert overwrite table decimal_1
- select cast('17.29' as decimal(4,2)) from src tablesample (1 rows);
+ select cast('17.29' as decimal(4,2)), 3.1415926BD, 3115926.54321BD from src tablesample (1 rows);
select cast(t as boolean) from decimal_1;
select cast(t as tinyint) from decimal_1;
select cast(t as smallint) from decimal_1;
Modified: hive/trunk/ql/src/test/results/clientpositive/decimal_1.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/decimal_1.q.out?rev=1542643&r1=1542642&r2=1542643&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/decimal_1.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/decimal_1.q.out Sun Nov 17 04:39:19 2013
@@ -1,10 +1,10 @@
-PREHOOK: query: drop table decimal_1
+PREHOOK: query: drop table if exists decimal_1
PREHOOK: type: DROPTABLE
-POSTHOOK: query: drop table decimal_1
+POSTHOOK: query: drop table if exists decimal_1
POSTHOOK: type: DROPTABLE
-PREHOOK: query: create table decimal_1 (t decimal(4,2))
+PREHOOK: query: create table decimal_1 (t decimal(4,2), u decimal(5), v decimal)
PREHOOK: type: CREATETABLE
-POSTHOOK: query: create table decimal_1 (t decimal(4,2))
+POSTHOOK: query: create table decimal_1 (t decimal(4,2), u decimal(5), v decimal)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: default@decimal_1
PREHOOK: query: alter table decimal_1 set serde 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
@@ -15,17 +15,26 @@ POSTHOOK: query: alter table decimal_1 s
POSTHOOK: type: ALTERTABLE_SERIALIZER
POSTHOOK: Input: default@decimal_1
POSTHOOK: Output: default@decimal_1
+PREHOOK: query: desc decimal_1
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: desc decimal_1
+POSTHOOK: type: DESCTABLE
+t decimal(4,2) from deserializer
+u decimal(5,0) from deserializer
+v decimal(10,0) from deserializer
PREHOOK: query: insert overwrite table decimal_1
- select cast('17.29' as decimal(4,2)) from src tablesample (1 rows)
+ select cast('17.29' as decimal(4,2)), 3.1415926BD, 3115926.54321BD from src tablesample (1 rows)
PREHOOK: type: QUERY
PREHOOK: Input: default@src
PREHOOK: Output: default@decimal_1
POSTHOOK: query: insert overwrite table decimal_1
- select cast('17.29' as decimal(4,2)) from src tablesample (1 rows)
+ select cast('17.29' as decimal(4,2)), 3.1415926BD, 3115926.54321BD from src tablesample (1 rows)
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
POSTHOOK: Output: default@decimal_1
POSTHOOK: Lineage: decimal_1.t EXPRESSION []
+POSTHOOK: Lineage: decimal_1.u EXPRESSION []
+POSTHOOK: Lineage: decimal_1.v EXPRESSION []
PREHOOK: query: select cast(t as boolean) from decimal_1
PREHOOK: type: QUERY
PREHOOK: Input: default@decimal_1
@@ -35,6 +44,8 @@ POSTHOOK: type: QUERY
POSTHOOK: Input: default@decimal_1
#### A masked pattern was here ####
POSTHOOK: Lineage: decimal_1.t EXPRESSION []
+POSTHOOK: Lineage: decimal_1.u EXPRESSION []
+POSTHOOK: Lineage: decimal_1.v EXPRESSION []
true
PREHOOK: query: select cast(t as tinyint) from decimal_1
PREHOOK: type: QUERY
@@ -45,6 +56,8 @@ POSTHOOK: type: QUERY
POSTHOOK: Input: default@decimal_1
#### A masked pattern was here ####
POSTHOOK: Lineage: decimal_1.t EXPRESSION []
+POSTHOOK: Lineage: decimal_1.u EXPRESSION []
+POSTHOOK: Lineage: decimal_1.v EXPRESSION []
17
PREHOOK: query: select cast(t as smallint) from decimal_1
PREHOOK: type: QUERY
@@ -55,6 +68,8 @@ POSTHOOK: type: QUERY
POSTHOOK: Input: default@decimal_1
#### A masked pattern was here ####
POSTHOOK: Lineage: decimal_1.t EXPRESSION []
+POSTHOOK: Lineage: decimal_1.u EXPRESSION []
+POSTHOOK: Lineage: decimal_1.v EXPRESSION []
17
PREHOOK: query: select cast(t as int) from decimal_1
PREHOOK: type: QUERY
@@ -65,6 +80,8 @@ POSTHOOK: type: QUERY
POSTHOOK: Input: default@decimal_1
#### A masked pattern was here ####
POSTHOOK: Lineage: decimal_1.t EXPRESSION []
+POSTHOOK: Lineage: decimal_1.u EXPRESSION []
+POSTHOOK: Lineage: decimal_1.v EXPRESSION []
17
PREHOOK: query: select cast(t as bigint) from decimal_1
PREHOOK: type: QUERY
@@ -75,6 +92,8 @@ POSTHOOK: type: QUERY
POSTHOOK: Input: default@decimal_1
#### A masked pattern was here ####
POSTHOOK: Lineage: decimal_1.t EXPRESSION []
+POSTHOOK: Lineage: decimal_1.u EXPRESSION []
+POSTHOOK: Lineage: decimal_1.v EXPRESSION []
17
PREHOOK: query: select cast(t as float) from decimal_1
PREHOOK: type: QUERY
@@ -85,6 +104,8 @@ POSTHOOK: type: QUERY
POSTHOOK: Input: default@decimal_1
#### A masked pattern was here ####
POSTHOOK: Lineage: decimal_1.t EXPRESSION []
+POSTHOOK: Lineage: decimal_1.u EXPRESSION []
+POSTHOOK: Lineage: decimal_1.v EXPRESSION []
17.29
PREHOOK: query: select cast(t as double) from decimal_1
PREHOOK: type: QUERY
@@ -95,6 +116,8 @@ POSTHOOK: type: QUERY
POSTHOOK: Input: default@decimal_1
#### A masked pattern was here ####
POSTHOOK: Lineage: decimal_1.t EXPRESSION []
+POSTHOOK: Lineage: decimal_1.u EXPRESSION []
+POSTHOOK: Lineage: decimal_1.v EXPRESSION []
17.29
PREHOOK: query: select cast(t as string) from decimal_1
PREHOOK: type: QUERY
@@ -105,6 +128,8 @@ POSTHOOK: type: QUERY
POSTHOOK: Input: default@decimal_1
#### A masked pattern was here ####
POSTHOOK: Lineage: decimal_1.t EXPRESSION []
+POSTHOOK: Lineage: decimal_1.u EXPRESSION []
+POSTHOOK: Lineage: decimal_1.v EXPRESSION []
17.29
PREHOOK: query: select cast(t as timestamp) from decimal_1
PREHOOK: type: QUERY
@@ -115,6 +140,8 @@ POSTHOOK: type: QUERY
POSTHOOK: Input: default@decimal_1
#### A masked pattern was here ####
POSTHOOK: Lineage: decimal_1.t EXPRESSION []
+POSTHOOK: Lineage: decimal_1.u EXPRESSION []
+POSTHOOK: Lineage: decimal_1.v EXPRESSION []
1969-12-31 16:00:17.29
PREHOOK: query: drop table decimal_1
PREHOOK: type: DROPTABLE
@@ -125,3 +152,5 @@ POSTHOOK: type: DROPTABLE
POSTHOOK: Input: default@decimal_1
POSTHOOK: Output: default@decimal_1
POSTHOOK: Lineage: decimal_1.t EXPRESSION []
+POSTHOOK: Lineage: decimal_1.u EXPRESSION []
+POSTHOOK: Lineage: decimal_1.v EXPRESSION []
Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoUtils.java?rev=1542643&r1=1542642&r2=1542643&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoUtils.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoUtils.java Sun Nov 17 04:39:19 2013
@@ -29,7 +29,7 @@ import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
-import org.apache.hadoop.hive.common.type.HiveChar;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveVarchar;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
@@ -395,7 +395,6 @@ public final class TypeInfoUtils {
PrimitiveTypeEntry typeEntry =
PrimitiveObjectInspectorUtils.getTypeEntryFromTypeName(t.text);
if (typeEntry != null && typeEntry.primitiveCategory != PrimitiveCategory.UNKNOWN ) {
- String qualifiedTypeName = typeEntry.typeName;
String[] params = parseParams();
switch (typeEntry.primitiveCategory) {
case CHAR:
@@ -405,40 +404,41 @@ public final class TypeInfoUtils {
+ " type is specified without length: " + typeInfoString);
}
+ int length = 1;
if (params.length == 1) {
- int length = Integer.valueOf(params[0]);
+ length = Integer.valueOf(params[0]);
if (typeEntry.primitiveCategory == PrimitiveCategory.VARCHAR) {
BaseCharUtils.validateVarcharParameter(length);
+ return TypeInfoFactory.getVarcharTypeInfo(length);
} else {
BaseCharUtils.validateCharParameter(length);
+ return TypeInfoFactory.getCharTypeInfo(length);
}
- qualifiedTypeName = BaseCharTypeInfo.getQualifiedName(typeEntry.typeName, length);
} else if (params.length > 1) {
throw new IllegalArgumentException(
"Type " + typeEntry.typeName+ " only takes one parameter, but " +
params.length + " is seen");
}
-
- break;
case DECIMAL:
+ int precision = HiveDecimal.DEFAULT_PRECISION;
+ int scale = HiveDecimal.DEFAULT_SCALE;
if (params == null || params.length == 0) {
- throw new IllegalArgumentException( "Decimal type is specified without length: " + typeInfoString);
- }
-
- if (params.length == 2) {
- int precision = Integer.valueOf(params[0]);
- int scale = Integer.valueOf(params[1]);
+ // It's possible that old metadata still refers to "decimal" as a column type w/o
+ // precision/scale. In this case, the default (10,0) is assumed. Thus, do nothing here.
+ } else if (params.length == 2) {
+ // New metadata always have two parameters.
+ precision = Integer.valueOf(params[0]);
+ scale = Integer.valueOf(params[1]);
HiveDecimalUtils.validateParameter(precision, scale);
- qualifiedTypeName = DecimalTypeInfo.getQualifiedName(precision, scale);
- } else if (params.length > 1) {
- throw new IllegalArgumentException("Type varchar only takes one parameter, but " +
+ } else if (params.length > 2) {
+ throw new IllegalArgumentException("Type decimal only takes two parameter, but " +
params.length + " is seen");
}
- break;
+ return TypeInfoFactory.getDecimalTypeInfo(precision, scale);
+ default:
+ return TypeInfoFactory.getPrimitiveTypeInfo(typeEntry.typeName);
}
-
- return TypeInfoFactory.getPrimitiveTypeInfo(qualifiedTypeName);
}
// Is this a list type?