You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by za...@apache.org on 2022/08/02 10:55:06 UTC

[hive] 01/02: HIVE-26350: IndexOutOfBoundsException when generating splits for external JDBC table with partition column (Stamatis Zampetakis reviewed by Krisztian Kasa, Aman Sinha)

This is an automated email from the ASF dual-hosted git repository.

zabetak pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git

commit a0c500ba668a74be4acafaf76a4b94eb5353268c
Author: Stamatis Zampetakis <za...@gmail.com>
AuthorDate: Wed Jun 22 18:42:56 2022 +0200

    HIVE-26350: IndexOutOfBoundsException when generating splits for external JDBC table with partition column (Stamatis Zampetakis reviewed by Krisztian Kasa, Aman Sinha)
    
    1. Introduce new API DatabaseAccessor#getColumnTypes to:
    * allow fetching column types from the database;
    * align with the code using DatabaseAccessor#getColumnNames.
    2. Use the new API to find the type of the partition column in
    JdbcInputFormat since information is not propagated correctly to
    LIST_COLUMN_TYPES and leads to IOBE.
    3. Some refactoring in GenericJdbcDatabaseAccessor to avoid duplicate
    code with the introduction of the new API.
    4. Add test reproducing the IOBE problem, and tests for the new API.
    5. Adapt existing tests based on the changes.
    
    Closes #3470
---
 .../apache/hive/storage/jdbc/JdbcInputFormat.java  |  4 +-
 .../hive/storage/jdbc/dao/DatabaseAccessor.java    | 19 ++++++
 .../jdbc/dao/GenericJdbcDatabaseAccessor.java      | 76 +++++++++++++++++++---
 .../hive/storage/jdbc/TestJdbcInputFormat.java     | 25 +++++--
 .../jdbc/dao/TestGenericJdbcDatabaseAccessor.java  | 58 +++++++++++++++++
 jdbc-handler/src/test/resources/test_script.sql    | 35 +++++++++-
 .../jdbc_partition_table_pruned_pcolumn.q          | 23 +++++++
 .../llap/jdbc_partition_table_pruned_pcolumn.q.out | 52 +++++++++++++++
 8 files changed, 273 insertions(+), 19 deletions(-)

diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcInputFormat.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcInputFormat.java
index 14c5a777965..ecb7b2ec0cc 100644
--- a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcInputFormat.java
+++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcInputFormat.java
@@ -20,10 +20,8 @@ import org.apache.commons.lang3.tuple.Pair;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.Constants;
 import org.apache.hadoop.hive.ql.io.HiveInputFormat;
-import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.MapWritable;
 import org.apache.hadoop.mapred.FileInputFormat;
@@ -98,7 +96,7 @@ public class JdbcInputFormat extends HiveInputFormat<LongWritable, MapWritable>
         if (!columnNames.contains(partitionColumn)) {
           throw new IOException("Cannot find partitionColumn:" + partitionColumn + " in " + columnNames);
         }
-        List<TypeInfo> hiveColumnTypesList = TypeInfoUtils.getTypeInfosFromTypeString(job.get(serdeConstants.LIST_COLUMN_TYPES));
+        List<TypeInfo> hiveColumnTypesList = dbAccessor.getColumnTypes(job);
         TypeInfo typeInfo = hiveColumnTypesList.get(columnNames.indexOf(partitionColumn));
         if (!(typeInfo instanceof PrimitiveTypeInfo)) {
           throw new IOException(partitionColumn + " is a complex type, only primitive type can be a partition column");
diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/DatabaseAccessor.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/DatabaseAccessor.java
index 654205d1850..11fcfed1939 100644
--- a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/DatabaseAccessor.java
+++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/DatabaseAccessor.java
@@ -18,6 +18,7 @@ import org.apache.commons.lang3.tuple.Pair;
 import org.apache.hadoop.conf.Configuration;
 
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.mapreduce.RecordWriter;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hive.storage.jdbc.exception.HiveJdbcDatabaseAccessException;
@@ -29,6 +30,24 @@ public interface DatabaseAccessor {
 
   List<String> getColumnNames(Configuration conf) throws HiveJdbcDatabaseAccessException;
 
+  /**
+   * Returns a list of types for the columns in the specified configuration.
+   *
+   * The type must represent as close as possible the respective type of the column stored in the
+   * database. Since it does not exist an exact mapping between database types and Hive types the
+   * result is approximate. When it is not possible to derive a type for a given column the 
+   * {@link org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory#unknownTypeInfo} is used.
+   *
+   * There is a one-to-one correspondence between the types returned in this method and the column
+   * names obtained with {@link #getColumnNames(Configuration)}.
+   *
+   * Implementors of the method can derive the types by querying the database, exploit the state
+   * of the accessor, or use the configuration.
+   *
+   * @throws HiveJdbcDatabaseAccessException if some error occurs while accessing the database
+   */
+  List<TypeInfo> getColumnTypes(Configuration conf) throws HiveJdbcDatabaseAccessException;
+
   int getTotalNumberOfRecords(Configuration conf) throws HiveJdbcDatabaseAccessException;
 
   JdbcRecordIterator
diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/GenericJdbcDatabaseAccessor.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/GenericJdbcDatabaseAccessor.java
index 319337814d7..39dd3eefacd 100644
--- a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/GenericJdbcDatabaseAccessor.java
+++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/GenericJdbcDatabaseAccessor.java
@@ -23,6 +23,8 @@ import org.apache.hadoop.hive.conf.Constants;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.mapreduce.RecordWriter;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 
@@ -38,11 +40,13 @@ import javax.sql.DataSource;
 import java.io.IOException;
 import java.net.URISyntaxException;
 import java.sql.Connection;
+import java.sql.JDBCType;
 import java.sql.PreparedStatement;
 import java.sql.ResultSet;
 import java.sql.ResultSetMetaData;
 import java.sql.SQLException;
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
@@ -67,9 +71,8 @@ public class GenericJdbcDatabaseAccessor implements DatabaseAccessor {
   public GenericJdbcDatabaseAccessor() {
   }
 
-
-  @Override
-  public List<String> getColumnNames(Configuration conf) throws HiveJdbcDatabaseAccessException {
+  private <T> List<T> getColumnMetadata(Configuration conf, ColumnMetadataAccessor<T> colAccessor)
+      throws HiveJdbcDatabaseAccessException {
     Connection conn = null;
     PreparedStatement ps = null;
     ResultSet rs = null;
@@ -89,16 +92,15 @@ public class GenericJdbcDatabaseAccessor implements DatabaseAccessor {
 
       ResultSetMetaData metadata = rs.getMetaData();
       int numColumns = metadata.getColumnCount();
-      List<String> columnNames = new ArrayList<String>(numColumns);
+      List<T> columnMeta = new ArrayList<>(numColumns);
       for (int i = 0; i < numColumns; i++) {
-        columnNames.add(metadata.getColumnName(i + 1));
+        columnMeta.add(colAccessor.get(metadata, i + 1));
       }
 
-      return columnNames;
+      return columnMeta;
     }
     catch (Exception e) {
-      LOGGER.error("Error while trying to get column names.", e);
-      throw new HiveJdbcDatabaseAccessException("Error while trying to get column names: " + e.getMessage(), e);
+      throw new HiveJdbcDatabaseAccessException("", e);
     }
     finally {
       cleanupResources(conn, ps, rs);
@@ -106,6 +108,60 @@ public class GenericJdbcDatabaseAccessor implements DatabaseAccessor {
 
   }
 
+  @Override
+  public List<String> getColumnNames(Configuration conf) throws HiveJdbcDatabaseAccessException {
+    return getColumnMetadata(conf, ResultSetMetaData::getColumnName);
+  }
+
+  @Override
+  public List<TypeInfo> getColumnTypes(Configuration conf) throws HiveJdbcDatabaseAccessException {
+    return getColumnMetadata(conf, (meta, col) -> {
+      JDBCType type = JDBCType.valueOf(meta.getColumnType(col));
+      int prec = meta.getPrecision(col);
+      int scal = meta.getScale(col);
+      switch (type) {
+      case BIT:
+      case BOOLEAN:
+        return TypeInfoFactory.booleanTypeInfo;
+      case TINYINT:
+        return TypeInfoFactory.byteTypeInfo;
+      case SMALLINT:
+        return TypeInfoFactory.shortTypeInfo;
+      case INTEGER:
+        return TypeInfoFactory.intTypeInfo;
+      case BIGINT:
+        return TypeInfoFactory.longTypeInfo;
+      case CHAR:
+        return TypeInfoFactory.getCharTypeInfo(prec);
+      case VARCHAR:
+      case NVARCHAR:
+      case LONGNVARCHAR:
+      case LONGVARCHAR:
+        return TypeInfoFactory.getVarcharTypeInfo(Math.min(prec, 65535));
+      case DOUBLE:
+        return TypeInfoFactory.doubleTypeInfo;
+      case REAL:
+      case FLOAT:
+        return TypeInfoFactory.floatTypeInfo;
+      case DATE:
+        return TypeInfoFactory.dateTypeInfo;
+      case TIMESTAMP:
+      case TIMESTAMP_WITH_TIMEZONE:
+        return TypeInfoFactory.timestampTypeInfo;
+      case DECIMAL:
+      case NUMERIC:
+        return TypeInfoFactory.getDecimalTypeInfo(Math.min(prec, 38), scal);
+      case ARRAY:
+        // Best effort with the info that we have at the moment
+        return TypeInfoFactory.getListTypeInfo(TypeInfoFactory.unknownTypeInfo);
+      case STRUCT:
+        // Best effort with the info that we have at the moment
+        return TypeInfoFactory.getStructTypeInfo(Collections.emptyList(), Collections.emptyList());
+      default:
+        return TypeInfoFactory.unknownTypeInfo;
+      }
+    });
+  }
 
   protected String getMetaDataQuery(String sql) {
     return addLimitToQuery(sql, 1);
@@ -490,4 +546,8 @@ public class GenericJdbcDatabaseAccessor implements DatabaseAccessor {
   private static String selectAllFromTable(String tableName) {
     return tableName == null ? null : "select * from " + tableName;
   }
+  
+  private interface ColumnMetadataAccessor<T> {
+    T get(ResultSetMetaData metadata, Integer column) throws SQLException;
+  }
 }
diff --git a/jdbc-handler/src/test/java/org/apache/hive/storage/jdbc/TestJdbcInputFormat.java b/jdbc-handler/src/test/java/org/apache/hive/storage/jdbc/TestJdbcInputFormat.java
index 07102170e2e..ee51a918021 100644
--- a/jdbc-handler/src/test/java/org/apache/hive/storage/jdbc/TestJdbcInputFormat.java
+++ b/jdbc-handler/src/test/java/org/apache/hive/storage/jdbc/TestJdbcInputFormat.java
@@ -18,6 +18,8 @@ import com.google.common.collect.Lists;
 import org.apache.commons.lang3.tuple.ImmutablePair;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.mapred.InputSplit;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hive.storage.jdbc.dao.DatabaseAccessor;
@@ -32,6 +34,8 @@ import org.powermock.core.classloader.annotations.PrepareForTest;
 import org.powermock.modules.junit4.PowerMockRunner;
 
 import java.io.IOException;
+import java.util.Collections;
+import java.util.List;
 
 import static org.hamcrest.Matchers.is;
 import static org.hamcrest.Matchers.notNullValue;
@@ -100,10 +104,11 @@ public class TestJdbcInputFormat {
     BDDMockito.given(DatabaseAccessorFactory.getAccessor(any(Configuration.class))).willReturn(mockDatabaseAccessor);
     JdbcInputFormat f = new JdbcInputFormat();
     when(mockDatabaseAccessor.getColumnNames(any(Configuration.class))).thenReturn(Lists.newArrayList("a"));
+    List<TypeInfo> columnTypes = Collections.singletonList(TypeInfoFactory.intTypeInfo);
+    when(mockDatabaseAccessor.getColumnTypes(any(Configuration.class))).thenReturn(columnTypes);
 
     JobConf conf = new JobConf();
     conf.set("mapred.input.dir", "/temp");
-    conf.set(serdeConstants.LIST_COLUMN_TYPES, "int");
     conf.set("hive.sql.partitionColumn", "a");
     conf.set("hive.sql.numPartitions", "3");
     conf.set("hive.sql.lowerBound", "1");
@@ -127,10 +132,11 @@ public class TestJdbcInputFormat {
     BDDMockito.given(DatabaseAccessorFactory.getAccessor(any(Configuration.class))).willReturn(mockDatabaseAccessor);
     JdbcInputFormat f = new JdbcInputFormat();
     when(mockDatabaseAccessor.getColumnNames(any(Configuration.class))).thenReturn(Lists.newArrayList("a"));
+    List<TypeInfo> columnTypes = Collections.singletonList(TypeInfoFactory.doubleTypeInfo);
+    when(mockDatabaseAccessor.getColumnTypes(any(Configuration.class))).thenReturn(columnTypes);
 
     JobConf conf = new JobConf();
     conf.set("mapred.input.dir", "/temp");
-    conf.set(serdeConstants.LIST_COLUMN_TYPES, "double");
     conf.set("hive.sql.partitionColumn", "a");
     conf.set("hive.sql.numPartitions", "3");
     conf.set("hive.sql.lowerBound", "0");
@@ -158,10 +164,11 @@ public class TestJdbcInputFormat {
     BDDMockito.given(DatabaseAccessorFactory.getAccessor(any(Configuration.class))).willReturn(mockDatabaseAccessor);
     JdbcInputFormat f = new JdbcInputFormat();
     when(mockDatabaseAccessor.getColumnNames(any(Configuration.class))).thenReturn(Lists.newArrayList("a"));
+    List<TypeInfo> columnTypes = Collections.singletonList(TypeInfoFactory.getDecimalTypeInfo(10, 5));
+    when(mockDatabaseAccessor.getColumnTypes(any(Configuration.class))).thenReturn(columnTypes);
 
     JobConf conf = new JobConf();
     conf.set("mapred.input.dir", "/temp");
-    conf.set(serdeConstants.LIST_COLUMN_TYPES, "decimal(10,5)");
     conf.set("hive.sql.partitionColumn", "a");
     conf.set("hive.sql.numPartitions", "4");
     conf.set("hive.sql.lowerBound", "5");
@@ -190,10 +197,11 @@ public class TestJdbcInputFormat {
     when(mockDatabaseAccessor.getBounds(any(Configuration.class), any(String.class), anyBoolean(), anyBoolean()))
             .thenReturn(new ImmutablePair<String, String>("2010-01-01 00:00:00.000000000", "2018-01-01 " +
             "12:00:00.000000000"));
+    List<TypeInfo> columnTypes = Collections.singletonList(TypeInfoFactory.timestampTypeInfo);
+    when(mockDatabaseAccessor.getColumnTypes(any(Configuration.class))).thenReturn(columnTypes);
 
     JobConf conf = new JobConf();
     conf.set("mapred.input.dir", "/temp");
-    conf.set(serdeConstants.LIST_COLUMN_TYPES, "timestamp");
     conf.set("hive.sql.partitionColumn", "a");
     conf.set("hive.sql.numPartitions", "2");
     InputSplit[] splits = f.getSplits(conf, -1);
@@ -215,10 +223,11 @@ public class TestJdbcInputFormat {
     when(mockDatabaseAccessor.getColumnNames(any(Configuration.class))).thenReturn(Lists.newArrayList("a"));
     when(mockDatabaseAccessor.getBounds(any(Configuration.class), any(String.class), anyBoolean(), anyBoolean()))
             .thenReturn(new ImmutablePair<String, String>("2010-01-01", "2018-01-01"));
+    List<TypeInfo> columnTypes = Collections.singletonList(TypeInfoFactory.dateTypeInfo);
+    when(mockDatabaseAccessor.getColumnTypes(any(Configuration.class))).thenReturn(columnTypes);
 
     JobConf conf = new JobConf();
     conf.set("mapred.input.dir", "/temp");
-    conf.set(serdeConstants.LIST_COLUMN_TYPES, "date");
     conf.set("hive.sql.partitionColumn", "a");
     conf.set("hive.sql.numPartitions", "3");
     InputSplit[] splits = f.getSplits(conf, -1);
@@ -240,10 +249,11 @@ public class TestJdbcInputFormat {
     BDDMockito.given(DatabaseAccessorFactory.getAccessor(any(Configuration.class))).willReturn(mockDatabaseAccessor);
     JdbcInputFormat f = new JdbcInputFormat();
     when(mockDatabaseAccessor.getColumnNames(any(Configuration.class))).thenReturn(Lists.newArrayList("a"));
+    List<TypeInfo> columnTypes = Collections.singletonList(TypeInfoFactory.intTypeInfo);
+    when(mockDatabaseAccessor.getColumnTypes(any(Configuration.class))).thenReturn(columnTypes);
 
     JobConf conf = new JobConf();
     conf.set("mapred.input.dir", "/temp");
-    conf.set(serdeConstants.LIST_COLUMN_TYPES, "int");
     conf.set("hive.sql.partitionColumn", "a");
     conf.set("hive.sql.numPartitions", "5");
     conf.set("hive.sql.lowerBound", "2");
@@ -265,10 +275,11 @@ public class TestJdbcInputFormat {
     BDDMockito.given(DatabaseAccessorFactory.getAccessor(any(Configuration.class))).willReturn(mockDatabaseAccessor);
     JdbcInputFormat f = new JdbcInputFormat();
     when(mockDatabaseAccessor.getColumnNames(any(Configuration.class))).thenReturn(Lists.newArrayList("a"));
+    List<TypeInfo> columnTypes = Collections.singletonList(TypeInfoFactory.intTypeInfo);
+    when(mockDatabaseAccessor.getColumnTypes(any(Configuration.class))).thenReturn(columnTypes);
 
     JobConf conf = new JobConf();
     conf.set("mapred.input.dir", "/temp");
-    conf.set(serdeConstants.LIST_COLUMN_TYPES, "int");
     conf.set("hive.sql.partitionColumn", "a");
     conf.set("hive.sql.numPartitions", "5");
     conf.set("hive.sql.lowerBound", "1");
diff --git a/jdbc-handler/src/test/java/org/apache/hive/storage/jdbc/dao/TestGenericJdbcDatabaseAccessor.java b/jdbc-handler/src/test/java/org/apache/hive/storage/jdbc/dao/TestGenericJdbcDatabaseAccessor.java
index 545a71f8f14..98888ecd045 100644
--- a/jdbc-handler/src/test/java/org/apache/hive/storage/jdbc/dao/TestGenericJdbcDatabaseAccessor.java
+++ b/jdbc-handler/src/test/java/org/apache/hive/storage/jdbc/dao/TestGenericJdbcDatabaseAccessor.java
@@ -16,10 +16,15 @@ package org.apache.hive.storage.jdbc.dao;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hive.storage.jdbc.conf.JdbcStorageConfig;
 import org.apache.hive.storage.jdbc.exception.HiveJdbcDatabaseAccessException;
+
+import org.junit.Assert;
 import org.junit.Test;
 
+import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
 
@@ -42,6 +47,47 @@ public class TestGenericJdbcDatabaseAccessor {
     assertThat(columnNames.get(0), is(equalToIgnoringCase("strategy_id")));
   }
 
+  @Test
+  public void testGetColumnTypes_starQuery_allTypes() throws HiveJdbcDatabaseAccessException {
+    Configuration conf = buildConfiguration();
+    conf.set(JdbcStorageConfig.QUERY.getPropertyName(), "select * from all_types_table");
+    DatabaseAccessor accessor = DatabaseAccessorFactory.getAccessor(conf);
+
+    List<TypeInfo> expectedTypes = new ArrayList<>();
+    expectedTypes.add(TypeInfoFactory.getCharTypeInfo(1));
+    expectedTypes.add(TypeInfoFactory.getCharTypeInfo(20));
+    expectedTypes.add(TypeInfoFactory.unknownTypeInfo);
+    expectedTypes.add(TypeInfoFactory.varcharTypeInfo);
+    expectedTypes.add(TypeInfoFactory.getVarcharTypeInfo(1024));
+    expectedTypes.add(TypeInfoFactory.varcharTypeInfo);
+    expectedTypes.add(TypeInfoFactory.booleanTypeInfo);
+    expectedTypes.add(TypeInfoFactory.byteTypeInfo);
+    expectedTypes.add(TypeInfoFactory.shortTypeInfo);
+    expectedTypes.add(TypeInfoFactory.intTypeInfo);
+    expectedTypes.add(TypeInfoFactory.longTypeInfo);
+    expectedTypes.add(TypeInfoFactory.getDecimalTypeInfo(38, 0));
+    expectedTypes.add(TypeInfoFactory.getDecimalTypeInfo(9, 3));
+    expectedTypes.add(TypeInfoFactory.floatTypeInfo);
+    expectedTypes.add(TypeInfoFactory.doubleTypeInfo);
+    expectedTypes.add(TypeInfoFactory.getDecimalTypeInfo(38, 0));
+    expectedTypes.add(TypeInfoFactory.dateTypeInfo);
+    expectedTypes.add(TypeInfoFactory.unknownTypeInfo);
+    expectedTypes.add(TypeInfoFactory.unknownTypeInfo);
+    expectedTypes.add(TypeInfoFactory.timestampTypeInfo);
+    expectedTypes.add(TypeInfoFactory.timestampTypeInfo);
+    expectedTypes.add(TypeInfoFactory.unknownTypeInfo);
+    expectedTypes.add(TypeInfoFactory.unknownTypeInfo);
+    expectedTypes.add(TypeInfoFactory.unknownTypeInfo);
+    expectedTypes.add(TypeInfoFactory.unknownTypeInfo);
+    expectedTypes.add(TypeInfoFactory.unknownTypeInfo);
+    expectedTypes.add(TypeInfoFactory.unknownTypeInfo);
+    expectedTypes.add(TypeInfoFactory.unknownTypeInfo);
+    expectedTypes.add(TypeInfoFactory.unknownTypeInfo);
+    expectedTypes.add(TypeInfoFactory.unknownTypeInfo);
+    expectedTypes.add(TypeInfoFactory.getListTypeInfo(TypeInfoFactory.unknownTypeInfo));
+    expectedTypes.add(TypeInfoFactory.unknownTypeInfo);
+    Assert.assertEquals(expectedTypes, accessor.getColumnTypes(conf));
+  }
 
   @Test
   public void testGetColumnNames_fieldListQuery() throws HiveJdbcDatabaseAccessException {
@@ -55,6 +101,18 @@ public class TestGenericJdbcDatabaseAccessor {
     assertThat(columnNames.get(0), is(equalToIgnoringCase("name")));
   }
 
+  @Test
+  public void testGetColumnTypes_fieldListQuery() throws HiveJdbcDatabaseAccessException {
+    Configuration conf = buildConfiguration();
+    conf.set(JdbcStorageConfig.QUERY.getPropertyName(), "select name,referrer from test_strategy");
+    DatabaseAccessor accessor = DatabaseAccessorFactory.getAccessor(conf);
+
+    List<TypeInfo> expectedTypes = new ArrayList<>(2);
+    expectedTypes.add(TypeInfoFactory.getVarcharTypeInfo(50));
+    expectedTypes.add(TypeInfoFactory.getVarcharTypeInfo(1024));
+    Assert.assertEquals(expectedTypes, accessor.getColumnTypes(conf));
+  }
+
 
   @Test(expected = HiveJdbcDatabaseAccessException.class)
   public void testGetColumnNames_invalidQuery() throws HiveJdbcDatabaseAccessException {
diff --git a/jdbc-handler/src/test/resources/test_script.sql b/jdbc-handler/src/test/resources/test_script.sql
index 5d7f08aa9d0..29a2db7402f 100644
--- a/jdbc-handler/src/test/resources/test_script.sql
+++ b/jdbc-handler/src/test/resources/test_script.sql
@@ -18,4 +18,37 @@ INSERT INTO test_strategy (strategy_id, name, referrer, landing, priority, imple
 INSERT INTO test_strategy (strategy_id, name, referrer, landing, priority, implementation, last_modified) VALUES (4,'S4','ddd','jkl',980,NULL,'2012-05-08 15:01:15');
 INSERT INTO test_strategy (strategy_id, name, referrer, landing, priority, implementation, last_modified) VALUES (5,'S5','eee',NULL,NULL,NULL,'2012-05-08 15:01:15');
 
-
+CREATE TABLE IF NOT EXISTS all_types_table (
+    char_col CHAR,
+    char_with_len_col CHAR(20),
+    char_large_col CHARACTER LARGE OBJECT,
+    varchar_col VARCHAR,
+    varchar_with_len_col VARCHAR(1024),
+    varchar_ci_col VARCHAR_IGNORECASE,
+    bool_col BOOLEAN,
+    tiny_col TINYINT,
+    small_col SMALLINT,
+    int_col INTEGER,
+    big_col BIGINT,
+    numeric_col NUMERIC,
+    numeric_with_prec_scale_col NUMERIC(9,3),
+    real_col REAL,
+    double_col DOUBLE PRECISION,
+    float_col DECFLOAT,
+    date_col DATE,
+    time_col TIME,
+    time_zone_col TIME WITH TIME ZONE,
+    timestamp_col TIMESTAMP,
+    timestamp_zone_col TIMESTAMP WITH TIME ZONE,
+    interval_col INTERVAL YEAR,
+    binary_col BINARY,
+    binary_var_col BINARY VARYING,
+    binarl_large_col BINARY LARGE OBJECT,
+    java_col JAVA_OBJECT,
+    enum_col ENUM('clubs', 'diamonds', 'hearts', 'spades'),
+    geo_col GEOMETRY,
+    json_col JSON,
+    uuid_col UUID,
+    array_col INTEGER ARRAY,
+    row_col ROW(streen_num INTEGER, street_name CHAR(20))
+);
diff --git a/ql/src/test/queries/clientpositive/jdbc_partition_table_pruned_pcolumn.q b/ql/src/test/queries/clientpositive/jdbc_partition_table_pruned_pcolumn.q
new file mode 100644
index 00000000000..75939b66b4e
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/jdbc_partition_table_pruned_pcolumn.q
@@ -0,0 +1,23 @@
+--!qt:database:postgres:q_test_book_table.sql
+
+CREATE EXTERNAL TABLE book
+(
+    id int,
+    title varchar(20),
+    author int
+)
+STORED BY                                          
+'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (                                    
+    "hive.sql.database.type" = "POSTGRES",
+    "hive.sql.jdbc.driver" = "org.postgresql.Driver",
+    "hive.sql.jdbc.url" = "jdbc:postgresql://localhost:5432/qtestDB",
+    "hive.sql.dbcp.username" = "qtestuser",
+    "hive.sql.dbcp.password" = "qtestpassword",
+    "hive.sql.table" = "book",
+    "hive.sql.partitionColumn" = "author",
+    "hive.sql.numPartitions" = "2"
+);
+
+set hive.fetch.task.conversion=none;
+select book.id from book;
diff --git a/ql/src/test/results/clientpositive/llap/jdbc_partition_table_pruned_pcolumn.q.out b/ql/src/test/results/clientpositive/llap/jdbc_partition_table_pruned_pcolumn.q.out
new file mode 100644
index 00000000000..2a71fcaa0f6
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/jdbc_partition_table_pruned_pcolumn.q.out
@@ -0,0 +1,52 @@
+PREHOOK: query: CREATE EXTERNAL TABLE book
+(
+    id int,
+    title varchar(20),
+    author int
+)
+STORED BY                                          
+'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (                                    
+    "hive.sql.database.type" = "POSTGRES",
+    "hive.sql.jdbc.driver" = "org.postgresql.Driver",
+    "hive.sql.jdbc.url" = "jdbc:postgresql://localhost:5432/qtestDB",
+    "hive.sql.dbcp.username" = "qtestuser",
+    "hive.sql.dbcp.password" = "qtestpassword",
+    "hive.sql.table" = "book",
+    "hive.sql.partitionColumn" = "author",
+    "hive.sql.numPartitions" = "2"
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@book
+POSTHOOK: query: CREATE EXTERNAL TABLE book
+(
+    id int,
+    title varchar(20),
+    author int
+)
+STORED BY                                          
+'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (                                    
+    "hive.sql.database.type" = "POSTGRES",
+    "hive.sql.jdbc.driver" = "org.postgresql.Driver",
+    "hive.sql.jdbc.url" = "jdbc:postgresql://localhost:5432/qtestDB",
+    "hive.sql.dbcp.username" = "qtestuser",
+    "hive.sql.dbcp.password" = "qtestpassword",
+    "hive.sql.table" = "book",
+    "hive.sql.partitionColumn" = "author",
+    "hive.sql.numPartitions" = "2"
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@book
+PREHOOK: query: select book.id from book
+PREHOOK: type: QUERY
+PREHOOK: Input: default@book
+#### A masked pattern was here ####
+POSTHOOK: query: select book.id from book
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@book
+#### A masked pattern was here ####
+1
+2