You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by cw...@apache.org on 2011/08/04 11:35:03 UTC
svn commit: r1153809 - in /hive/trunk/jdbc/src:
java/org/apache/hadoop/hive/jdbc/ test/org/apache/hadoop/hive/jdbc/
Author: cws
Date: Thu Aug 4 09:35:02 2011
New Revision: 1153809
URL: http://svn.apache.org/viewvc?rev=1153809&view=rev
Log:
HIVE-1631. JDBC driver returns wrong precision, scale, or column size for some data types (Patrick Hunt via cws)
Added:
hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/Utils.java (with props)
Modified:
hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDatabaseMetaData.java
hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSetMetaData.java
hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/JdbcColumn.java
hive/trunk/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java
Modified: hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDatabaseMetaData.java
URL: http://svn.apache.org/viewvc/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDatabaseMetaData.java?rev=1153809&r1=1153808&r2=1153809&view=diff
==============================================================================
--- hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDatabaseMetaData.java (original)
+++ hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDatabaseMetaData.java Thu Aug 4 09:35:02 2011
@@ -215,10 +215,10 @@ public class HiveDatabaseMetaData implem
, "SQL_DATETIME_SUB", "CHAR_OCTET_LENGTH", "ORDINAL_POSITION"
, "IS_NULLABLE", "SCOPE_CATLOG", "SCOPE_SCHEMA", "SCOPE_TABLE"
, "SOURCE_DATA_TYPE")
- , Arrays.asList("STRING", "STRING", "STRING", "STRING", "I32", "STRING"
- , "I32", "I32", "I32", "I32", "I32", "STRING"
- , "STRING", "I32", "I32", "I32", "I32"
- , "STRING", "STRING", "STRING", "STRING", "I32")
+ , Arrays.asList("STRING", "STRING", "STRING", "STRING", "INT", "STRING"
+ , "INT", "INT", "INT", "INT", "INT", "STRING"
+ , "STRING", "INT", "INT", "INT", "INT"
+ , "STRING", "STRING", "STRING", "STRING", "INT")
, columns) {
private int cnt = 0;
@@ -684,7 +684,7 @@ public class HiveDatabaseMetaData implem
return new HiveMetaDataResultSet(
Arrays.asList("TYPE_CAT", "TYPE_SCHEM", "TYPE_NAME", "CLASS_NAME", "DATA_TYPE"
, "REMARKS", "BASE_TYPE")
- , Arrays.asList("STRING", "STRING", "STRING", "STRING", "I32", "STRING", "I32")
+ , Arrays.asList("STRING", "STRING", "STRING", "STRING", "INT", "STRING", "INT")
, null) {
public boolean next() throws SQLException {
Modified: hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSetMetaData.java
URL: http://svn.apache.org/viewvc/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSetMetaData.java?rev=1153809&r1=1153808&r2=1153809&view=diff
==============================================================================
--- hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSetMetaData.java (original)
+++ hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSetMetaData.java Thu Aug 4 09:35:02 2011
@@ -30,8 +30,8 @@ import org.apache.hadoop.hive.serde.Cons
*
*/
public class HiveResultSetMetaData implements java.sql.ResultSetMetaData {
- private List<String> columnNames;
- private List<String> columnTypes;
+ private final List<String> columnNames;
+ private final List<String> columnTypes;
public HiveResultSetMetaData(List<String> columnNames,
List<String> columnTypes) {
@@ -52,21 +52,9 @@ public class HiveResultSetMetaData imple
}
public int getColumnDisplaySize(int column) throws SQLException {
- // taking a stab at appropriate values
- switch (getColumnType(column)) {
- case Types.VARCHAR:
- case Types.BIGINT:
- return 32;
- case Types.TINYINT:
- return 2;
- case Types.BOOLEAN:
- return 8;
- case Types.DOUBLE:
- case Types.INTEGER:
- return 16;
- default:
- return 32;
- }
+ int columnType = getColumnType(column);
+
+ return JdbcColumn.columnSize(columnType);
}
public String getColumnLabel(int column) throws SQLException {
@@ -91,43 +79,9 @@ public class HiveResultSetMetaData imple
String type = columnTypes.get(column - 1);
// we need to convert the thrift type to the SQL type
- return hiveTypeToSqlType(type);
- }
-
- /**
- * Convert hive types to sql types.
- * @param type
- * @return Integer java.sql.Types values
- * @throws SQLException
- */
- public static int hiveTypeToSqlType(String type) throws SQLException {
- if ("string".equalsIgnoreCase(type)) {
- return Types.VARCHAR;
- } else if ("float".equalsIgnoreCase(type)) {
- return Types.FLOAT;
- } else if ("double".equalsIgnoreCase(type)) {
- return Types.DOUBLE;
- } else if ("boolean".equalsIgnoreCase(type)) {
- return Types.BOOLEAN;
- } else if ("tinyint".equalsIgnoreCase(type)) {
- return Types.TINYINT;
- } else if ("smallint".equalsIgnoreCase(type)) {
- return Types.SMALLINT;
- } else if ("int".equalsIgnoreCase(type)) {
- return Types.INTEGER;
- } else if ("bigint".equalsIgnoreCase(type)) {
- return Types.BIGINT;
- } else if (type.startsWith("map<")) {
- return Types.VARCHAR;
- } else if (type.startsWith("array<")) {
- return Types.VARCHAR;
- } else if (type.startsWith("struct<")) {
- return Types.VARCHAR;
- }
- throw new SQLException("Unrecognized column type: " + type);
+ return Utils.hiveTypeToSqlType(type);
}
-
public String getColumnTypeName(int column) throws SQLException {
if (columnTypes == null) {
throw new SQLException(
@@ -169,19 +123,30 @@ public class HiveResultSetMetaData imple
}
public int getPrecision(int column) throws SQLException {
- if (Types.DOUBLE == getColumnType(column)) {
- return -1; // Do we have a precision limit?
- }
+ int columnType = getColumnType(column);
- return 0;
+ return JdbcColumn.columnPrecision(columnType);
}
public int getScale(int column) throws SQLException {
- if (Types.DOUBLE == getColumnType(column)) {
- return -1; // Do we have a scale limit?
- }
+ int columnType = getColumnType(column);
- return 0;
+ // according to hiveTypeToSqlType possible options are:
+ switch(columnType) {
+ case Types.BOOLEAN:
+ case Types.VARCHAR:
+ case Types.TINYINT:
+ case Types.SMALLINT:
+ case Types.INTEGER:
+ case Types.BIGINT:
+ return 0;
+ case Types.FLOAT:
+ return 7;
+ case Types.DOUBLE:
+ return 15;
+ default:
+ throw new SQLException("Invalid column value: " + column);
+ }
}
public String getSchemaName(int column) throws SQLException {
Modified: hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/JdbcColumn.java
URL: http://svn.apache.org/viewvc/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/JdbcColumn.java?rev=1153809&r1=1153808&r2=1153809&view=diff
==============================================================================
--- hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/JdbcColumn.java (original)
+++ hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/JdbcColumn.java Thu Aug 4 09:35:02 2011
@@ -19,6 +19,7 @@
package org.apache.hadoop.hive.jdbc;
import java.sql.SQLException;
+import java.sql.Types;
/**
* Column metadata.
@@ -58,29 +59,61 @@ public class JdbcColumn {
}
public Integer getSqlType() throws SQLException {
- return HiveResultSetMetaData.hiveTypeToSqlType(type);
+ return Utils.hiveTypeToSqlType(type);
}
- public Integer getColumnSize() {
- if (type.equalsIgnoreCase("string")) {
- return Integer.MAX_VALUE;
- } else if (type.equalsIgnoreCase("tinyint")) {
+ static int columnSize(int columnType) throws SQLException {
+ // according to hiveTypeToSqlType possible options are:
+ switch(columnType) {
+ case Types.BOOLEAN:
+ return columnPrecision(columnType);
+ case Types.VARCHAR:
+ return Integer.MAX_VALUE; // hive has no max limit for strings
+ case Types.TINYINT:
+ case Types.SMALLINT:
+ case Types.INTEGER:
+ case Types.BIGINT:
+ return columnPrecision(columnType) + 1; // allow +/-
+
+ // see http://download.oracle.com/javase/6/docs/api/constant-values.html#java.lang.Float.MAX_EXPONENT
+ case Types.FLOAT:
+ return 24; // e.g. -(17#).e-###
+ // see http://download.oracle.com/javase/6/docs/api/constant-values.html#java.lang.Double.MAX_EXPONENT
+ case Types.DOUBLE:
+ return 25; // e.g. -(17#).e-####
+ default:
+ throw new SQLException("Invalid column type: " + columnType);
+ }
+ }
+
+ static int columnPrecision(int columnType) throws SQLException {
+ // according to hiveTypeToSqlType possible options are:
+ switch(columnType) {
+ case Types.BOOLEAN:
+ return 1;
+ case Types.VARCHAR:
+ return Integer.MAX_VALUE; // hive has no max limit for strings
+ case Types.TINYINT:
return 3;
- } else if (type.equalsIgnoreCase("smallint")) {
+ case Types.SMALLINT:
return 5;
- } else if (type.equalsIgnoreCase("int")) {
+ case Types.INTEGER:
return 10;
- } else if (type.equalsIgnoreCase("bigint")) {
+ case Types.BIGINT:
return 19;
- } else if (type.equalsIgnoreCase("float")) {
- return 12;
- } else if (type.equalsIgnoreCase("double")) {
- return 22;
- } else { // anything else including boolean is null
- return null;
+ case Types.FLOAT:
+ return 7;
+ case Types.DOUBLE:
+ return 15;
+ default:
+ throw new SQLException("Invalid column type: " + columnType);
}
}
+ public Integer getColumnSize() throws SQLException {
+ return columnSize(Utils.hiveTypeToSqlType(type));
+ }
+
public Integer getNumPrecRadix() {
if (type.equalsIgnoreCase("tinyint")) {
return 10;
Added: hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/Utils.java
URL: http://svn.apache.org/viewvc/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/Utils.java?rev=1153809&view=auto
==============================================================================
--- hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/Utils.java (added)
+++ hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/Utils.java Thu Aug 4 09:35:02 2011
@@ -0,0 +1,59 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.jdbc;
+
+import java.sql.SQLException;
+import java.sql.Types;
+
+public class Utils {
+
+ /**
+ * Convert hive types to sql types.
+ * @param type
+ * @return Integer java.sql.Types values
+ * @throws SQLException
+ */
+ public static int hiveTypeToSqlType(String type) throws SQLException {
+ if ("string".equalsIgnoreCase(type)) {
+ return Types.VARCHAR;
+ } else if ("float".equalsIgnoreCase(type)) {
+ return Types.FLOAT;
+ } else if ("double".equalsIgnoreCase(type)) {
+ return Types.DOUBLE;
+ } else if ("boolean".equalsIgnoreCase(type)) {
+ return Types.BOOLEAN;
+ } else if ("tinyint".equalsIgnoreCase(type)) {
+ return Types.TINYINT;
+ } else if ("smallint".equalsIgnoreCase(type)) {
+ return Types.SMALLINT;
+ } else if ("int".equalsIgnoreCase(type)) {
+ return Types.INTEGER;
+ } else if ("bigint".equalsIgnoreCase(type)) {
+ return Types.BIGINT;
+ } else if (type.startsWith("map<")) {
+ return Types.VARCHAR;
+ } else if (type.startsWith("array<")) {
+ return Types.VARCHAR;
+ } else if (type.startsWith("struct<")) {
+ return Types.VARCHAR;
+ }
+ throw new SQLException("Unrecognized column type: " + type);
+ }
+
+}
Propchange: hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/Utils.java
------------------------------------------------------------------------------
svn:eol-style = native
Modified: hive/trunk/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java
URL: http://svn.apache.org/viewvc/hive/trunk/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java?rev=1153809&r1=1153808&r2=1153809&view=diff
==============================================================================
--- hive/trunk/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java (original)
+++ hive/trunk/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java Thu Aug 4 09:35:02 2011
@@ -642,6 +642,11 @@ public class TestJdbcDriver extends Test
for (String[] checkPattern: tests.keySet()) {
ResultSet rs = (ResultSet)con.getMetaData().getColumns(null, null
, checkPattern[0], checkPattern[1]);
+
+ // validate the metadata for the getColumns result set
+ ResultSetMetaData rsmd = rs.getMetaData();
+ assertEquals("TABLE_CAT", rsmd.getColumnName(1));
+
int cnt = 0;
while (rs.next()) {
String columnname = rs.getString("COLUMN_NAME");
@@ -665,6 +670,24 @@ public class TestJdbcDriver extends Test
}
}
+ /**
+ * Validate the Metadata for the result set of a metadata getColumns call.
+ */
+ public void testMetaDataGetColumnsMetaData() throws SQLException {
+ ResultSet rs = (ResultSet)con.getMetaData().getColumns(null, null
+ , "testhivejdbcdriver\\_table", null);
+
+ ResultSetMetaData rsmd = rs.getMetaData();
+
+ assertEquals("TABLE_CAT", rsmd.getColumnName(1));
+ assertEquals(Types.VARCHAR, rsmd.getColumnType(1));
+ assertEquals(Integer.MAX_VALUE, rsmd.getColumnDisplaySize(1));
+
+ assertEquals("ORDINAL_POSITION", rsmd.getColumnName(17));
+ assertEquals(Types.INTEGER, rsmd.getColumnType(17));
+ assertEquals(11, rsmd.getColumnDisplaySize(17));
+ }
+
public void testConversionsBaseResultSet() throws SQLException {
ResultSet rs = new HiveMetaDataResultSet(Arrays.asList("key")
, Arrays.asList("long")
@@ -793,30 +816,32 @@ public class TestJdbcDriver extends Test
assertEquals("int", meta.getColumnTypeName(13));
assertEquals("string", meta.getColumnTypeName(14));
- assertEquals(16, meta.getColumnDisplaySize(1));
- assertEquals(8, meta.getColumnDisplaySize(2));
- assertEquals(16, meta.getColumnDisplaySize(3));
- assertEquals(32, meta.getColumnDisplaySize(4));
- assertEquals(32, meta.getColumnDisplaySize(5));
- assertEquals(32, meta.getColumnDisplaySize(6));
- assertEquals(32, meta.getColumnDisplaySize(7));
- assertEquals(32, meta.getColumnDisplaySize(8));
- assertEquals(2, meta.getColumnDisplaySize(9));
- assertEquals(32, meta.getColumnDisplaySize(10));
- assertEquals(32, meta.getColumnDisplaySize(11));
- assertEquals(32, meta.getColumnDisplaySize(12));
- assertEquals(16, meta.getColumnDisplaySize(13));
- assertEquals(32, meta.getColumnDisplaySize(14));
-
+ assertEquals(11, meta.getColumnDisplaySize(1));
+ assertEquals(1, meta.getColumnDisplaySize(2));
+ assertEquals(25, meta.getColumnDisplaySize(3));
+ assertEquals(Integer.MAX_VALUE, meta.getColumnDisplaySize(4));
+ assertEquals(Integer.MAX_VALUE, meta.getColumnDisplaySize(5));
+ assertEquals(Integer.MAX_VALUE, meta.getColumnDisplaySize(6));
+ assertEquals(Integer.MAX_VALUE, meta.getColumnDisplaySize(7));
+ assertEquals(Integer.MAX_VALUE, meta.getColumnDisplaySize(8));
+ assertEquals(4, meta.getColumnDisplaySize(9));
+ assertEquals(6, meta.getColumnDisplaySize(10));
+ assertEquals(24, meta.getColumnDisplaySize(11));
+ assertEquals(20, meta.getColumnDisplaySize(12));
+ assertEquals(11, meta.getColumnDisplaySize(13));
+ assertEquals(Integer.MAX_VALUE, meta.getColumnDisplaySize(14));
+
+ int[] expectedPrecision = {10, 1, 15, Integer.MAX_VALUE, Integer.MAX_VALUE,
+ Integer.MAX_VALUE, Integer.MAX_VALUE, Integer.MAX_VALUE, 3, 5, 7, 19,
+ 10, Integer.MAX_VALUE};
+ int[] expectedScale = {0, 0, 15, 0, 0, 0, 0, 0, 0, 0, 7, 0, 0, 0};
for (int i = 1; i <= meta.getColumnCount(); i++) {
assertFalse(meta.isAutoIncrement(i));
assertFalse(meta.isCurrency(i));
assertEquals(ResultSetMetaData.columnNullable, meta.isNullable(i));
- int expectedPrecision = i == 3 ? -1 : 0;
- int expectedScale = i == 3 ? -1 : 0;
- assertEquals(expectedPrecision, meta.getPrecision(i));
- assertEquals(expectedScale, meta.getScale(i));
+ assertEquals(expectedPrecision[i-1], meta.getPrecision(i));
+ assertEquals(expectedScale[i-1], meta.getScale(i));
}
}