You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by jc...@apache.org on 2018/10/05 22:03:23 UTC
hive git commit: HIVE-20652: JdbcStorageHandler push join of two
different datasource to jdbc driver (Jesus Camacho Rodriguez,
reviewed by Ashutosh Chauhan)
Repository: hive
Updated Branches:
refs/heads/master 159305a3a -> a4b087b18
HIVE-20652: JdbcStorageHandler push join of two different datasource to jdbc driver (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)
Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/a4b087b1
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/a4b087b1
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/a4b087b1
Branch: refs/heads/master
Commit: a4b087b18bd5b0b4023bced68c85cf1e16301fed
Parents: 159305a
Author: Jesus Camacho Rodriguez <jc...@apache.org>
Authored: Tue Oct 2 10:57:07 2018 -0700
Committer: Jesus Camacho Rodriguez <jc...@apache.org>
Committed: Fri Oct 5 15:02:46 2018 -0700
----------------------------------------------------------------------
.../org/apache/hadoop/hive/conf/Constants.java | 13 +-
.../test/resources/testconfiguration.properties | 1 +
.../hive/storage/jdbc/JdbcInputFormat.java | 10 +
.../hive/storage/jdbc/JdbcInputSplit.java | 19 +-
.../org/apache/hive/storage/jdbc/JdbcSerDe.java | 134 ++---
.../storage/jdbc/conf/JdbcStorageConfig.java | 25 +-
.../jdbc/conf/JdbcStorageConfigManager.java | 20 +-
.../jdbc/dao/GenericJdbcDatabaseAccessor.java | 11 +-
.../reloperators/jdbc/HiveJdbcConverter.java | 121 +++-
.../calcite/rules/jdbc/HiveJdbcImplementor.java | 43 ++
.../rules/jdbc/JDBCJoinPushDownRule.java | 57 +-
.../rules/jdbc/JDBCUnionPushDownRule.java | 32 +-
.../calcite/translator/ASTBuilder.java | 28 +-
.../hadoop/hive/ql/parse/CalcitePlanner.java | 23 +-
.../clientpositive/external_jdbc_table2.q | 128 +++++
.../llap/external_jdbc_table.q.out | 26 +-
.../llap/external_jdbc_table2.q.out | 559 +++++++++++++++++++
.../clientpositive/llap/jdbc_handler.q.out | 8 +-
18 files changed, 1067 insertions(+), 191 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hive/blob/a4b087b1/common/src/java/org/apache/hadoop/hive/conf/Constants.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/conf/Constants.java b/common/src/java/org/apache/hadoop/hive/conf/Constants.java
index 807d6bc..437096b 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/Constants.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/Constants.java
@@ -57,10 +57,19 @@ public class Constants {
/* Kafka Ingestion state - valid values - START/STOP/RESET */
public static final String DRUID_KAFKA_INGESTION = "druid.kafka.ingestion";
- public static final String HIVE_JDBC_QUERY = "hive.sql.generated.query";
- public static final String JDBC_QUERY = "hive.sql.query";
public static final String JDBC_HIVE_STORAGE_HANDLER_ID =
"org.apache.hive.storage.jdbc.JdbcStorageHandler";
+ public static final String JDBC_CONFIG_PREFIX = "hive.sql";
+ public static final String JDBC_TABLE = JDBC_CONFIG_PREFIX + ".table";
+ public static final String JDBC_DATABASE_TYPE = JDBC_CONFIG_PREFIX + ".database.type";
+ public static final String JDBC_URL = JDBC_CONFIG_PREFIX + ".jdbc.url";
+ public static final String JDBC_DRIVER = JDBC_CONFIG_PREFIX + ".jdbc.driver";
+ public static final String JDBC_USERNAME = JDBC_CONFIG_PREFIX + ".dbcp.username";
+ public static final String JDBC_PASSWORD = JDBC_CONFIG_PREFIX + ".dbcp.password";
+ public static final String JDBC_QUERY = JDBC_CONFIG_PREFIX + ".query";
+ public static final String JDBC_QUERY_FIELD_NAMES = JDBC_CONFIG_PREFIX + ".query.fieldNames";
+ public static final String JDBC_QUERY_FIELD_TYPES = JDBC_CONFIG_PREFIX + ".query.fieldTypes";
+ public static final String JDBC_SPLIT_QUERY = JDBC_CONFIG_PREFIX + ".query.split";
public static final String HIVE_SERVER2_JOB_CREDSTORE_PASSWORD_ENVVAR = "HIVE_JOB_CREDSTORE_PASSWORD";
public static final String HADOOP_CREDENTIAL_PASSWORD_ENVVAR = "HADOOP_CREDSTORE_PASSWORD";
http://git-wip-us.apache.org/repos/asf/hive/blob/a4b087b1/itests/src/test/resources/testconfiguration.properties
----------------------------------------------------------------------
diff --git a/itests/src/test/resources/testconfiguration.properties b/itests/src/test/resources/testconfiguration.properties
index d444c99..9984ce5 100644
--- a/itests/src/test/resources/testconfiguration.properties
+++ b/itests/src/test/resources/testconfiguration.properties
@@ -517,6 +517,7 @@ minillaplocal.query.files=\
explainuser_1.q,\
explainuser_4.q,\
external_jdbc_table.q,\
+ external_jdbc_table2.q,\
fullouter_mapjoin_1_optimized.q,\
groupby2.q,\
groupby_groupingset_bug.q,\
http://git-wip-us.apache.org/repos/asf/hive/blob/a4b087b1/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcInputFormat.java
----------------------------------------------------------------------
diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcInputFormat.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcInputFormat.java
index caa823f..59104fe 100644
--- a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcInputFormat.java
+++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcInputFormat.java
@@ -16,6 +16,7 @@
package org.apache.hive.storage.jdbc;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.conf.Constants;
import org.apache.hadoop.hive.ql.io.HiveInputFormat;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.MapWritable;
@@ -60,6 +61,15 @@ public class JdbcInputFormat extends HiveInputFormat<LongWritable, MapWritable>
public InputSplit[] getSplits(JobConf job, int numSplits) throws IOException {
try {
+ if (!job.getBoolean(Constants.JDBC_SPLIT_QUERY, true)) {
+ // We will not split this query
+ LOGGER.debug("Creating 1 input splits");
+ InputSplit[] splits = new InputSplit[1];
+ splits[0] = new JdbcInputSplit(FileInputFormat.getInputPaths(job)[0]);
+ return splits;
+ }
+
+ // We will split this query into n splits
LOGGER.debug("Creating {} input splits", numSplits);
if (dbAccessor == null) {
http://git-wip-us.apache.org/repos/asf/hive/blob/a4b087b1/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcInputSplit.java
----------------------------------------------------------------------
diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcInputSplit.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcInputSplit.java
index a691cc2..3a6ada8 100644
--- a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcInputSplit.java
+++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcInputSplit.java
@@ -31,20 +31,25 @@ public class JdbcInputSplit extends FileSplit implements InputSplit {
public JdbcInputSplit() {
- super((Path) null, 0, 0, EMPTY_ARRAY);
-
+ super(null, 0, 0, EMPTY_ARRAY);
+ this.limit = -1;
+ this.offset = 0;
}
-
- public JdbcInputSplit(long start, long end, Path dummyPath) {
+ public JdbcInputSplit(Path dummyPath) {
super(dummyPath, 0, 0, EMPTY_ARRAY);
- this.setLimit((int) start);
- this.setOffset((int) end);
+ this.limit = -1;
+ this.offset = 0;
}
+ public JdbcInputSplit(int limit, int offset, Path dummyPath) {
+ super(dummyPath, 0, 0, EMPTY_ARRAY);
+ this.limit = limit;
+ this.offset = offset;
+ }
public JdbcInputSplit(int limit, int offset) {
- super((Path) null, 0, 0, EMPTY_ARRAY);
+ super(null, 0, 0, EMPTY_ARRAY);
this.limit = limit;
this.offset = offset;
}
http://git-wip-us.apache.org/repos/asf/hive/blob/a4b087b1/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcSerDe.java
----------------------------------------------------------------------
diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcSerDe.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcSerDe.java
index 8bed9f2..5947628 100644
--- a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcSerDe.java
+++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcSerDe.java
@@ -14,6 +14,7 @@
*/
package org.apache.hive.storage.jdbc;
+import com.google.common.base.Preconditions;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.conf.Constants;
import org.apache.hadoop.hive.common.type.Date;
@@ -23,11 +24,12 @@ import org.apache.hadoop.hive.serde2.AbstractSerDe;
import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hadoop.hive.serde2.SerDeStats;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
-import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.MapWritable;
import org.apache.hadoop.io.ObjectWritable;
@@ -51,10 +53,9 @@ public class JdbcSerDe extends AbstractSerDe {
private static final Logger LOGGER = LoggerFactory.getLogger(JdbcSerDe.class);
- private StructObjectInspector objectInspector;
- private int numColumns;
- private String[] hiveColumnTypeArray;
- private List<String> columnNames;
+ private String[] hiveColumnNames;
+ private PrimitiveTypeInfo[] hiveColumnTypes;
+ private ObjectInspector inspector;
private List<Object> row;
@@ -65,50 +66,60 @@ public class JdbcSerDe extends AbstractSerDe {
* @see org.apache.hadoop.hive.serde2.Deserializer#initialize(org.apache.hadoop.conf.Configuration, java.util.Properties)
*/
@Override
- public void initialize(Configuration conf, Properties tbl) throws SerDeException {
+ public void initialize(Configuration conf, Properties properties) throws SerDeException {
try {
- LOGGER.trace("Initializing the SerDe");
-
- if (tbl.containsKey(JdbcStorageConfig.DATABASE_TYPE.getPropertyName())) {
- final boolean hiveQueryExecution = tbl.containsKey(Constants.HIVE_JDBC_QUERY);
-
- Configuration tableConfig = JdbcStorageConfigManager.convertPropertiesToConfiguration(tbl);
+ LOGGER.trace("Initializing the JdbcSerDe");
+ if (properties.containsKey(JdbcStorageConfig.DATABASE_TYPE.getPropertyName())) {
+ Configuration tableConfig = JdbcStorageConfigManager.convertPropertiesToConfiguration(properties);
DatabaseAccessor dbAccessor = DatabaseAccessorFactory.getAccessor(tableConfig);
- columnNames = dbAccessor.getColumnNames(tableConfig);
- numColumns = columnNames.size();
- List<String> hiveColumnNames;
- if (hiveQueryExecution) {
- hiveColumnNames = columnNames;
- final List<String> columnTypes = dbAccessor.getColumnTypes(tableConfig);
- hiveColumnTypeArray = new String[columnTypes.size()];
- hiveColumnTypeArray = columnTypes.toArray(hiveColumnTypeArray);
- } else {
-
- String[] hiveColumnNameArray = parseProperty(tbl.getProperty(serdeConstants.LIST_COLUMNS), ",");
- if (numColumns != hiveColumnNameArray.length) {
- throw new SerDeException("Expected " + numColumns + " columns. Table definition has "
- + hiveColumnNameArray.length + " columns");
- }
- hiveColumnNames = Arrays.asList(hiveColumnNameArray);
- hiveColumnTypeArray = parseProperty(tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES), ":");
- if (hiveColumnTypeArray.length == 0) {
- throw new SerDeException("Received an empty Hive column type definition");
+ // Extract information from properties
+ String[] jdbcColumnNamesArray;
+ List<TypeInfo> hiveColumnTypesArray;
+ if (properties.containsKey(Constants.JDBC_TABLE) && properties.containsKey(Constants.JDBC_QUERY)) {
+ // The query has been autogenerated by Hive, the column names are the
+ // same in the query pushed and the list of hiveColumnNames
+ String fieldNamesProperty =
+ Preconditions.checkNotNull(properties.getProperty(Constants.JDBC_QUERY_FIELD_NAMES, null));
+ String fieldTypesProperty =
+ Preconditions.checkNotNull(properties.getProperty(Constants.JDBC_QUERY_FIELD_TYPES, null));
+ hiveColumnNames = fieldNamesProperty.trim().split(",");
+ jdbcColumnNamesArray = hiveColumnNames;
+ hiveColumnTypesArray = TypeInfoUtils.getTypeInfosFromTypeString(fieldTypesProperty);
+ } else {
+ // The query was hardcoded by user or we are creating the table.
+ // We obtain the column names with the db accessor.
+ List<String> columnNames = dbAccessor.getColumnNames(tableConfig);
+ hiveColumnNames = columnNames.toArray(new String[columnNames.size()]);
+ // These are the column names for the table defined with the JDBC storage handler.
+ jdbcColumnNamesArray = parseProperty(properties.getProperty(serdeConstants.LIST_COLUMNS), ",");
+ if (hiveColumnNames.length != jdbcColumnNamesArray.length) {
+ throw new SerDeException("Expected " + hiveColumnNames.length + " hiveColumnNames. Table definition has "
+ + jdbcColumnNamesArray.length + " hiveColumnNames");
}
+ hiveColumnTypesArray = TypeInfoUtils.getTypeInfosFromTypeString(properties.getProperty(serdeConstants.LIST_COLUMN_TYPES));
}
-
- List<ObjectInspector> fieldInspectors = new ArrayList<ObjectInspector>(numColumns);
- for (int i = 0; i < numColumns; i++) {
- PrimitiveTypeInfo ti = TypeInfoFactory.getPrimitiveTypeInfo(hiveColumnTypeArray[i]);
- ObjectInspector oi = PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(ti);
- fieldInspectors.add(oi);
+ if (hiveColumnTypesArray.size() == 0) {
+ throw new SerDeException("Received an empty Hive column type definition");
}
- objectInspector =
- ObjectInspectorFactory.getStandardStructObjectInspector(hiveColumnNames,
- fieldInspectors);
- row = new ArrayList<Object>(numColumns);
+ // Populate column types and inspector
+ hiveColumnTypes = new PrimitiveTypeInfo[hiveColumnTypesArray.size()];
+ List<ObjectInspector> fieldInspectors = new ArrayList<>(hiveColumnNames.length);
+ for (int i = 0; i < hiveColumnNames.length; i++) {
+ TypeInfo ti = hiveColumnTypesArray.get(i);
+ if (ti.getCategory() != Category.PRIMITIVE) {
+ throw new SerDeException("Non primitive types not supported yet");
+ }
+ hiveColumnTypes[i] = (PrimitiveTypeInfo) ti;
+ fieldInspectors.add(
+ PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(hiveColumnTypes[i]));
+ }
+ inspector =
+ ObjectInspectorFactory.getStandardStructObjectInspector(Arrays.asList(jdbcColumnNamesArray),
+ fieldInspectors);
+ row = new ArrayList<>(hiveColumnNames.length);
}
}
catch (Exception e) {
@@ -134,7 +145,7 @@ public class JdbcSerDe extends AbstractSerDe {
throw new SerDeException("Expected MapWritable. Got " + blob.getClass().getName());
}
- if ((row == null) || (columnNames == null)) {
+ if ((row == null) || (hiveColumnNames == null)) {
throw new SerDeException("JDBC SerDe hasn't been initialized properly");
}
@@ -142,8 +153,8 @@ public class JdbcSerDe extends AbstractSerDe {
MapWritable input = (MapWritable) blob;
Text columnKey = new Text();
- for (int i = 0; i < numColumns; i++) {
- columnKey.set(columnNames.get(i));
+ for (int i = 0; i < hiveColumnNames.length; i++) {
+ columnKey.set(hiveColumnNames[i]);
Writable value = input.get(columnKey);
Object rowVal;
@@ -152,58 +163,55 @@ public class JdbcSerDe extends AbstractSerDe {
} else {
rowVal = ((ObjectWritable)value).get();
- switch (hiveColumnTypeArray[i].toLowerCase()) {
- case "int":
- case "integer":
- case "smallint":
- case "tinyint":
+ switch (hiveColumnTypes[i].getPrimitiveCategory()) {
+ case INT:
+ case SHORT:
+ case BYTE:
if (rowVal instanceof Number) {
rowVal = ((Number)rowVal).intValue();
} else {
rowVal = Integer.valueOf(rowVal.toString());
}
break;
- case "bigint":
+ case LONG:
if (rowVal instanceof Long) {
rowVal = ((Number)rowVal).longValue();
} else {
rowVal = Long.valueOf(rowVal.toString());
}
break;
- case "float":
+ case FLOAT:
if (rowVal instanceof Number) {
rowVal = ((Number)rowVal).floatValue();
} else {
rowVal = Float.valueOf(rowVal.toString());
}
break;
- case "double":
+ case DOUBLE:
if (rowVal instanceof Number) {
rowVal = ((Number)rowVal).doubleValue();
} else {
rowVal = Double.valueOf(rowVal.toString());
}
break;
- case "bigdecimal":
+ case DECIMAL:
if (!(rowVal instanceof BigDecimal)) {
rowVal = new BigDecimal(rowVal.toString());
}
break;
- case "boolean":
+ case BOOLEAN:
if (rowVal instanceof Number) {
rowVal = ((Number) value).intValue() != 0;
} else {
rowVal = Boolean.valueOf(value.toString());
}
break;
- case "string":
- case "char":
- case "varchar":
- case "long varchar":
+ case CHAR:
+ case VARCHAR:
+ case STRING:
rowVal = rowVal.toString();
break;
- case "datetime":
- case "time":
+ case DATE:
if (rowVal instanceof java.sql.Date) {
java.sql.Date dateRowVal = (java.sql.Date) rowVal;
rowVal = Date.ofEpochMilli(dateRowVal.getTime());
@@ -211,7 +219,7 @@ public class JdbcSerDe extends AbstractSerDe {
rowVal = Date.valueOf (rowVal.toString());
}
break;
- case "timestamp":
+ case TIMESTAMP:
if (rowVal instanceof java.sql.Timestamp) {
java.sql.Timestamp timestampRowVal = (java.sql.Timestamp) rowVal;
rowVal = Timestamp.ofEpochMilli(timestampRowVal.getTime(), timestampRowVal.getNanos());
@@ -232,7 +240,7 @@ public class JdbcSerDe extends AbstractSerDe {
@Override
public ObjectInspector getObjectInspector() throws SerDeException {
- return objectInspector;
+ return inspector;
}
http://git-wip-us.apache.org/repos/asf/hive/blob/a4b087b1/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/conf/JdbcStorageConfig.java
----------------------------------------------------------------------
diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/conf/JdbcStorageConfig.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/conf/JdbcStorageConfig.java
index 1ccbe08..adc3022 100644
--- a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/conf/JdbcStorageConfig.java
+++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/conf/JdbcStorageConfig.java
@@ -14,14 +14,16 @@
*/
package org.apache.hive.storage.jdbc.conf;
+import org.apache.hadoop.hive.conf.Constants;
+
public enum JdbcStorageConfig {
- DATABASE_TYPE("database.type", true),
- JDBC_URL("jdbc.url", true),
- JDBC_DRIVER_CLASS("jdbc.driver", true),
- QUERY("query", false),
- TABLE("table", false),
- JDBC_FETCH_SIZE("jdbc.fetch.size", false),
- COLUMN_MAPPING("column.mapping", false);
+ DATABASE_TYPE(Constants.JDBC_DATABASE_TYPE, true),
+ JDBC_URL(Constants.JDBC_URL, true),
+ JDBC_DRIVER_CLASS(Constants.JDBC_DRIVER, true),
+ QUERY(Constants.JDBC_QUERY, false),
+ TABLE(Constants.JDBC_TABLE, false),
+ JDBC_FETCH_SIZE(Constants.JDBC_CONFIG_PREFIX + ".jdbc.fetch.size", false),
+ COLUMN_MAPPING(Constants.JDBC_CONFIG_PREFIX + ".column.mapping", false);
private String propertyName;
private boolean required = false;
@@ -32,17 +34,10 @@ public enum JdbcStorageConfig {
this.required = required;
}
-
- JdbcStorageConfig(String propertyName) {
- this.propertyName = propertyName;
- }
-
-
public String getPropertyName() {
- return JdbcStorageConfigManager.CONFIG_PREFIX + "." + propertyName;
+ return propertyName;
}
-
public boolean isRequired() {
return required;
}
http://git-wip-us.apache.org/repos/asf/hive/blob/a4b087b1/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/conf/JdbcStorageConfigManager.java
----------------------------------------------------------------------
diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/conf/JdbcStorageConfigManager.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/conf/JdbcStorageConfigManager.java
index 55fc0ea..1b5a826 100644
--- a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/conf/JdbcStorageConfigManager.java
+++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/conf/JdbcStorageConfigManager.java
@@ -15,11 +15,11 @@
package org.apache.hive.storage.jdbc.conf;
import java.io.IOException;
+import org.apache.hadoop.hive.conf.Constants;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hive.storage.jdbc.conf.DatabaseType;
import org.apache.hadoop.conf.Configuration;
@@ -39,9 +39,8 @@ import java.util.Properties;
public class JdbcStorageConfigManager {
private static final Logger LOGGER = LoggerFactory.getLogger(JdbcStorageConfigManager.class);
- public static final String CONFIG_PREFIX = "hive.sql";
- public static final String CONFIG_PWD = CONFIG_PREFIX + ".dbcp.password";
- public static final String CONFIG_USERNAME = CONFIG_PREFIX + ".dbcp.username";
+ public static final String CONFIG_USERNAME = Constants.JDBC_USERNAME;
+ public static final String CONFIG_PWD = Constants.JDBC_PASSWORD;
private static final EnumSet<JdbcStorageConfig> DEFAULT_REQUIRED_PROPERTIES =
EnumSet.of(JdbcStorageConfig.DATABASE_TYPE,
JdbcStorageConfig.JDBC_URL,
@@ -118,13 +117,16 @@ public class JdbcStorageConfigManager {
public static String getQueryToExecute(Configuration config) {
- String query = config.get(JdbcStorageConfig.QUERY.getPropertyName());
-
- if (query == null) {
- String tableName = config.get(JdbcStorageConfig.TABLE.getPropertyName());
- query = "select * from " + tableName;
+ String query = config.get(Constants.JDBC_QUERY);
+ if (query != null) {
+ // Already defined query, we return it
+ return query;
}
+ // We generate query as select *
+ String tableName = config.get(JdbcStorageConfig.TABLE.getPropertyName());
+ query = "select * from " + tableName;
+
String hiveFilterCondition = QueryConditionBuilder.getInstance().buildCondition(config);
if ((hiveFilterCondition != null) && (!hiveFilterCondition.trim().isEmpty())) {
query = query + " WHERE " + hiveFilterCondition;
http://git-wip-us.apache.org/repos/asf/hive/blob/a4b087b1/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/GenericJdbcDatabaseAccessor.java
----------------------------------------------------------------------
diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/GenericJdbcDatabaseAccessor.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/GenericJdbcDatabaseAccessor.java
index b2ad9a6..ab19318 100644
--- a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/GenericJdbcDatabaseAccessor.java
+++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/GenericJdbcDatabaseAccessor.java
@@ -16,6 +16,7 @@ package org.apache.hive.storage.jdbc.dao;
import org.apache.commons.dbcp.BasicDataSourceFactory;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.conf.Constants;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.security.Credentials;
@@ -47,7 +48,7 @@ import java.util.Properties;
*/
public class GenericJdbcDatabaseAccessor implements DatabaseAccessor {
- protected static final String DBCP_CONFIG_PREFIX = JdbcStorageConfigManager.CONFIG_PREFIX + ".dbcp";
+ protected static final String DBCP_CONFIG_PREFIX = Constants.JDBC_CONFIG_PREFIX + ".dbcp";
protected static final int DEFAULT_FETCH_SIZE = 1000;
protected static final Logger LOGGER = LoggerFactory.getLogger(GenericJdbcDatabaseAccessor.class);
protected DataSource dbcpDataSource = null;
@@ -237,9 +238,10 @@ public class GenericJdbcDatabaseAccessor implements DatabaseAccessor {
protected String addLimitAndOffsetToQuery(String sql, int limit, int offset) {
if (offset == 0) {
return addLimitToQuery(sql, limit);
- }
- else {
+ } else if (limit != -1) {
return sql + " {LIMIT " + limit + " OFFSET " + offset + "}";
+ } else {
+ return sql + " {OFFSET " + offset + "}";
}
}
@@ -248,6 +250,9 @@ public class GenericJdbcDatabaseAccessor implements DatabaseAccessor {
* Uses generic JDBC escape functions to add a limit clause to a query string
*/
protected String addLimitToQuery(String sql, int limit) {
+ if (limit == -1) {
+ return sql;
+ }
return sql + " {LIMIT " + limit + "}";
}
http://git-wip-us.apache.org/repos/asf/hive/blob/a4b087b1/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/reloperators/jdbc/HiveJdbcConverter.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/reloperators/jdbc/HiveJdbcConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/reloperators/jdbc/HiveJdbcConverter.java
index fc54644..3820602 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/reloperators/jdbc/HiveJdbcConverter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/reloperators/jdbc/HiveJdbcConverter.java
@@ -17,21 +17,28 @@
*/
package org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.jdbc;
+import java.util.ArrayList;
import java.util.List;
import org.apache.calcite.adapter.java.JavaTypeFactory;
import org.apache.calcite.adapter.jdbc.JdbcConvention;
-import org.apache.calcite.adapter.jdbc.JdbcImplementor;
-import org.apache.calcite.adapter.jdbc.JdbcRel;
+import org.apache.calcite.adapter.jdbc.JdbcRules.JdbcProject;
import org.apache.calcite.plan.ConventionTraitDef;
import org.apache.calcite.plan.RelOptCluster;
import org.apache.calcite.plan.RelTraitSet;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.RelVisitor;
import org.apache.calcite.rel.convert.ConverterImpl;
+import org.apache.calcite.rel.core.Filter;
+import org.apache.calcite.rel.core.Project;
+import org.apache.calcite.rel.core.TableScan;
+import org.apache.calcite.rex.RexBuilder;
+import org.apache.calcite.rex.RexNode;
import org.apache.calcite.sql.SqlDialect;
+import org.apache.calcite.util.ControlFlowException;
import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveRelNode;
+import org.apache.hadoop.hive.ql.optimizer.calcite.rules.jdbc.HiveJdbcImplementor;
/**
* This is a designated RelNode that splits the Hive operators and the Jdbc operators,
@@ -40,17 +47,15 @@ import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveRelNode;
public class HiveJdbcConverter extends ConverterImpl implements HiveRelNode {
private final JdbcConvention convention;
+ private final String url;
+ private final String user;
public HiveJdbcConverter(RelOptCluster cluster, RelTraitSet traits,
- JdbcRel input, JdbcConvention jc) {
+ RelNode input, JdbcConvention jc, String url, String user) {
super(cluster, ConventionTraitDef.INSTANCE, traits, input);
- convention = jc;
- }
-
- private HiveJdbcConverter(RelOptCluster cluster, RelTraitSet traits,
- RelNode input, JdbcConvention jc) {
- super(cluster, ConventionTraitDef.INSTANCE, traits, input);
- convention = jc;
+ this.convention = jc;
+ this.url = url;
+ this.user = user;
}
public JdbcConvention getJdbcConvention() {
@@ -61,6 +66,14 @@ public class HiveJdbcConverter extends ConverterImpl implements HiveRelNode {
return convention.dialect;
}
+ public String getConnectionUrl() {
+ return url;
+ }
+
+ public String getConnectionUser() {
+ return user;
+ }
+
@Override
public void implement(Implementor implementor) {
@@ -70,19 +83,51 @@ public class HiveJdbcConverter extends ConverterImpl implements HiveRelNode {
public RelNode copy(
RelTraitSet traitSet,
List<RelNode> inputs) {
- return new HiveJdbcConverter(getCluster(), traitSet, sole(inputs), convention);
+ return new HiveJdbcConverter(getCluster(), traitSet, sole(inputs), convention, url, user);
+ }
+
+ public RelNode copy(RelTraitSet traitSet, RelNode input) {
+ return new HiveJdbcConverter(getCluster(), traitSet, input, convention, url, user);
}
public String generateSql() {
SqlDialect dialect = getJdbcDialect();
- final JdbcImplementor jdbcImplementor =
- new JdbcImplementor(dialect,
+ final HiveJdbcImplementor jdbcImplementor =
+ new HiveJdbcImplementor(dialect,
(JavaTypeFactory) getCluster().getTypeFactory());
- final JdbcImplementor.Result result =
- jdbcImplementor.visitChild(0, getInput());
+ Project topProject;
+ if (getInput() instanceof Project) {
+ topProject = (Project) getInput();
+ } else {
+ // If it is not a project operator, we add it on top of the input
+ // to force generating the column names instead of * while
+ // translating to SQL
+ RelNode nodeToTranslate = getInput();
+ RexBuilder builder = getCluster().getRexBuilder();
+ List<RexNode> projects = new ArrayList<>(
+ nodeToTranslate.getRowType().getFieldList().size());
+ for (int i = 0; i < nodeToTranslate.getRowType().getFieldCount(); i++) {
+ projects.add(builder.makeInputRef(nodeToTranslate, i));
+ }
+ topProject = new JdbcProject(nodeToTranslate.getCluster(),
+ nodeToTranslate.getTraitSet(), nodeToTranslate,
+ projects, nodeToTranslate.getRowType());
+ }
+ final HiveJdbcImplementor.Result result =
+ jdbcImplementor.translate(topProject);
return result.asStatement().toSqlString(dialect).getSql();
}
+ /**
+ * Whether the execution of the query below this jdbc converter
+ * can be split by Hive.
+ */
+ public boolean splittingAllowed() {
+ JdbcRelVisitor visitor = new JdbcRelVisitor();
+ visitor.go(getInput());
+ return visitor.splittingAllowed;
+ }
+
public JdbcHiveTableScan getTableScan() {
final JdbcHiveTableScan[] tmpJdbcHiveTableScan = new JdbcHiveTableScan[1];
new RelVisitor() {
@@ -104,4 +149,50 @@ public class HiveJdbcConverter extends ConverterImpl implements HiveRelNode {
assert jdbcHiveTableScan != null;
return jdbcHiveTableScan;
}
+
+ private static class JdbcRelVisitor extends RelVisitor {
+
+ private boolean splittingAllowed;
+
+ public JdbcRelVisitor() {
+ this.splittingAllowed = true;
+ }
+
+ @Override
+ public void visit(RelNode node, int ordinal, RelNode parent) {
+ if (node instanceof Project ||
+ node instanceof Filter ||
+ node instanceof TableScan) {
+ // We can continue
+ super.visit(node, ordinal, parent);
+ } else {
+ throw new ReturnedValue(false);
+ }
+ }
+
+ /**
+ * Starts an iteration.
+ */
+ public RelNode go(RelNode p) {
+ try {
+ visit(p, 0, null);
+ } catch (ReturnedValue e) {
+ // Splitting cannot be performed
+ splittingAllowed = e.value;
+ }
+ return p;
+ }
+
+ /**
+ * Exception used to interrupt a visitor walk.
+ */
+ private static class ReturnedValue extends ControlFlowException {
+ private final boolean value;
+
+ public ReturnedValue(boolean value) {
+ this.value = value;
+ }
+ }
+
+ }
}
http://git-wip-us.apache.org/repos/asf/hive/blob/a4b087b1/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/jdbc/HiveJdbcImplementor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/jdbc/HiveJdbcImplementor.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/jdbc/HiveJdbcImplementor.java
new file mode 100644
index 0000000..edca312
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/jdbc/HiveJdbcImplementor.java
@@ -0,0 +1,43 @@
+package org.apache.hadoop.hive.ql.optimizer.calcite.rules.jdbc;
+
+import java.util.ArrayList;
+import java.util.List;
+import org.apache.calcite.adapter.java.JavaTypeFactory;
+import org.apache.calcite.adapter.jdbc.JdbcImplementor;
+import org.apache.calcite.rel.RelNode;
+import org.apache.calcite.rel.core.CorrelationId;
+import org.apache.calcite.rel.core.Project;
+import org.apache.calcite.rex.RexNode;
+import org.apache.calcite.sql.SqlDialect;
+import org.apache.calcite.sql.SqlNode;
+import org.apache.calcite.sql.SqlNodeList;
+
+public class HiveJdbcImplementor extends JdbcImplementor {
+
+ public HiveJdbcImplementor(SqlDialect dialect, JavaTypeFactory typeFactory) {
+ super(dialect, typeFactory);
+ }
+
+ public Result translate(Project e) {
+ // This variant is for the top project as we want to keep
+ // the column aliases instead of producing STAR
+ Result x = visitChild(0, e.getInput());
+ parseCorrelTable(e, x);
+ final Builder builder =
+ x.builder(e, Clause.SELECT);
+ final List<SqlNode> selectList = new ArrayList<>();
+ for (RexNode ref : e.getChildExps()) {
+ SqlNode sqlExpr = builder.context.toSql(null, ref);
+ addSelect(selectList, sqlExpr, e.getRowType());
+ }
+
+ builder.setSelect(new SqlNodeList(selectList, POS));
+ return builder.result();
+ }
+
+ private void parseCorrelTable(RelNode relNode, Result x) {
+ for (CorrelationId id : relNode.getVariablesSet()) {
+ correlTableMap.put(id, x.qualifiedContext());
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/a4b087b1/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/jdbc/JDBCJoinPushDownRule.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/jdbc/JDBCJoinPushDownRule.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/jdbc/JDBCJoinPushDownRule.java
index 459be6e..795eae2 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/jdbc/JDBCJoinPushDownRule.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/jdbc/JDBCJoinPushDownRule.java
@@ -17,16 +17,15 @@
*/
package org.apache.hadoop.hive.ql.optimizer.calcite.rules.jdbc;
-import java.util.Arrays;
-
import org.apache.calcite.adapter.jdbc.JdbcRules.JdbcJoin;
-import org.apache.calcite.adapter.jdbc.JdbcRules.JdbcJoinRule;
import org.apache.calcite.plan.RelOptRule;
import org.apache.calcite.plan.RelOptRuleCall;
+import org.apache.calcite.rel.InvalidRelException;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rex.RexNode;
import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.jdbc.HiveJdbcConverter;
import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveJoin;
+
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -55,22 +54,27 @@ public class JDBCJoinPushDownRule extends RelOptRule {
final HiveJdbcConverter converter1 = call.rel(1);
final HiveJdbcConverter converter2 = call.rel(2);
- //The actual check should be the compare of the connection string of the external tables
- /*if (converter1.getJdbcConvention().equals(converter2.getJdbcConvention()) == false) {
+ // First we compare the convention
+ if (!converter1.getJdbcConvention().getName().equals(converter2.getJdbcConvention().getName())) {
return false;
- }*/
+ }
- if (!converter1.getJdbcConvention().getName().equals(converter2.getJdbcConvention().getName())) {
+ // Second, we compare the connection string
+ if (!converter1.getConnectionUrl().equals(converter2.getConnectionUrl())) {
return false;
}
+ // Third, we compare the connection user
+ if (!converter1.getConnectionUser().equals(converter2.getConnectionUser())) {
+ return false;
+ }
+
+ //We do not push cross join
if (cond.isAlwaysTrue()) {
- //We don't want to push cross join
return false;
}
- boolean visitorRes = JDBCRexCallValidator.isValidJdbcOperation(cond, converter1.getJdbcDialect());
- return visitorRes;
+ return JDBCRexCallValidator.isValidJdbcOperation(cond, converter1.getJdbcDialect());
}
@Override
@@ -79,21 +83,26 @@ public class JDBCJoinPushDownRule extends RelOptRule {
final HiveJoin join = call.rel(0);
final HiveJdbcConverter converter1 = call.rel(1);
+ final RelNode input1 = converter1.getInput();
final HiveJdbcConverter converter2 = call.rel(2);
-
- RelNode input1 = converter1.getInput();
- RelNode input2 = converter2.getInput();
-
- HiveJoin newHiveJoin = join.copy(join.getTraitSet(), join.getCondition(), input1, input2, join.getJoinType(),
- join.isSemiJoinDone());
- JdbcJoin newJdbcJoin = (JdbcJoin) new JdbcJoinRule(converter1.getJdbcConvention()).convert(newHiveJoin,
- false);
- if (newJdbcJoin != null) {
- RelNode converterRes = converter1.copy(converter1.getTraitSet(), Arrays.asList(newJdbcJoin));
- if (converterRes != null) {
- call.transformTo(converterRes);
- }
+ final RelNode input2 = converter2.getInput();
+
+ JdbcJoin jdbcJoin;
+ try {
+ jdbcJoin = new JdbcJoin(
+ join.getCluster(),
+ join.getTraitSet().replace(converter1.getJdbcConvention()),
+ input1,
+ input2,
+ join.getCondition(),
+ join.getVariablesSet(),
+ join.getJoinType());
+ } catch (InvalidRelException e) {
+ LOG.warn(e.toString());
+ return;
}
+
+ call.transformTo(converter1.copy(converter1.getTraitSet(), jdbcJoin));
}
-};
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/a4b087b1/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/jdbc/JDBCUnionPushDownRule.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/jdbc/JDBCUnionPushDownRule.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/jdbc/JDBCUnionPushDownRule.java
index d4f3b0e..b67de07 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/jdbc/JDBCUnionPushDownRule.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/jdbc/JDBCUnionPushDownRule.java
@@ -21,11 +21,9 @@ import java.util.Arrays;
import java.util.List;
import org.apache.calcite.adapter.jdbc.JdbcRules.JdbcUnion;
-import org.apache.calcite.adapter.jdbc.JdbcRules.JdbcUnionRule;
import org.apache.calcite.plan.RelOptRule;
import org.apache.calcite.plan.RelOptRuleCall;
import org.apache.calcite.rel.RelNode;
-import org.apache.calcite.rel.core.Union;
import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.jdbc.HiveJdbcConverter;
import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveUnion;
import org.slf4j.Logger;
@@ -55,12 +53,18 @@ public class JDBCUnionPushDownRule extends RelOptRule {
final HiveJdbcConverter converter1 = call.rel(1);
final HiveJdbcConverter converter2 = call.rel(2);
- //The actual check should be the compare of the connection string of the external tables
- /*if (converter1.getJdbcConvention().equals(converter2.getJdbcConvention()) == false) {
+ // First we compare the convention
+ if (!converter1.getJdbcConvention().getName().equals(converter2.getJdbcConvention().getName())) {
return false;
- }*/
+ }
- if (!converter1.getJdbcConvention().getName().equals(converter2.getJdbcConvention().getName())) {
+ // Second, we compare the connection string
+ if (!converter1.getConnectionUrl().equals(converter2.getConnectionUrl())) {
+ return false;
+ }
+
+ // Third, we compare the connection user
+ if (!converter1.getConnectionUser().equals(converter2.getConnectionUser())) {
return false;
}
@@ -75,14 +79,14 @@ public class JDBCUnionPushDownRule extends RelOptRule {
final HiveJdbcConverter converter1 = call.rel(1);
final HiveJdbcConverter converter2 = call.rel(2);
- final List<RelNode> unionInput = Arrays.asList(converter1.getInput(), converter2.getInput());
- Union newHiveUnion = (Union) union.copy(union.getTraitSet(), unionInput, union.all);
- JdbcUnion newJdbcUnion = (JdbcUnion) new JdbcUnionRule(converter1.getJdbcConvention()).convert(newHiveUnion);
- if (newJdbcUnion != null) {
- RelNode converterRes = converter1.copy(converter1.getTraitSet(), Arrays.asList(newJdbcUnion));
+ List<RelNode> unionInput = Arrays.asList(converter1.getInput(), converter2.getInput());
+ JdbcUnion jdbcUnion = new JdbcUnion(
+ union.getCluster(),
+ union.getTraitSet().replace(converter1.getJdbcConvention()),
+ unionInput,
+ union.all);
- call.transformTo(converterRes);
- }
+ call.transformTo(converter1.copy(converter1.getTraitSet(), jdbcUnion));
}
-};
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/a4b087b1/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ASTBuilder.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ASTBuilder.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ASTBuilder.java
index 0408d7c..74f8c33 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ASTBuilder.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ASTBuilder.java
@@ -68,10 +68,7 @@ public class ASTBuilder {
public static ASTNode table(final RelNode scan) {
HiveTableScan hts = null;
if (scan instanceof HiveJdbcConverter) {
- HiveJdbcConverter jdbcConverter = (HiveJdbcConverter) scan;
- JdbcHiveTableScan jdbcHiveTableScan = jdbcConverter.getTableScan();
-
- hts = jdbcHiveTableScan.getHiveTableScan();
+ hts = ((HiveJdbcConverter) scan).getTableScan().getHiveTableScan();
} else if (scan instanceof DruidQuery) {
hts = (HiveTableScan) ((DruidQuery) scan).getTableScan();
} else {
@@ -115,14 +112,29 @@ public class ASTBuilder {
} else if (scan instanceof HiveJdbcConverter) {
HiveJdbcConverter jdbcConverter = (HiveJdbcConverter) scan;
final String query = jdbcConverter.generateSql();
- LOGGER.info("The HiveJdbcConverter generated sql message is: " + System.lineSeparator() + query);
+ LOGGER.debug("Generated SQL query: " + System.lineSeparator() + query);
propList.add(ASTBuilder.construct(HiveParser.TOK_TABLEPROPERTY, "TOK_TABLEPROPERTY")
.add(HiveParser.StringLiteral, "\"" + Constants.JDBC_QUERY + "\"")
.add(HiveParser.StringLiteral, "\"" + SemanticAnalyzer.escapeSQLString(query) + "\""));
-
+ // Whether we can split the query
propList.add(ASTBuilder.construct(HiveParser.TOK_TABLEPROPERTY, "TOK_TABLEPROPERTY")
- .add(HiveParser.StringLiteral, "\"" + Constants.HIVE_JDBC_QUERY + "\"")
- .add(HiveParser.StringLiteral, "\"" + SemanticAnalyzer.escapeSQLString(query) + "\""));
+ .add(HiveParser.StringLiteral, "\"" + Constants.JDBC_SPLIT_QUERY + "\"")
+ .add(HiveParser.StringLiteral, "\"" + jdbcConverter.splittingAllowed() + "\""));
+ // Adding column names used later by org.apache.hadoop.hive.druid.serde.DruidSerDe
+ propList.add(ASTBuilder.construct(HiveParser.TOK_TABLEPROPERTY, "TOK_TABLEPROPERTY")
+ .add(HiveParser.StringLiteral, "\"" + Constants.JDBC_QUERY_FIELD_NAMES + "\"")
+ .add(HiveParser.StringLiteral,
+ "\"" + scan.getRowType().getFieldNames().stream().map(Object::toString)
+ .collect(Collectors.joining(",")) + "\""
+ ));
+ // Adding column types used later by org.apache.hadoop.hive.druid.serde.DruidSerDe
+ propList.add(ASTBuilder.construct(HiveParser.TOK_TABLEPROPERTY, "TOK_TABLEPROPERTY")
+ .add(HiveParser.StringLiteral, "\"" + Constants.JDBC_QUERY_FIELD_TYPES + "\"")
+ .add(HiveParser.StringLiteral,
+ "\"" + scan.getRowType().getFieldList().stream()
+ .map(e -> TypeConverter.convert(e.getType()).getTypeName())
+ .collect(Collectors.joining(",")) + "\""
+ ));
}
if (hts.isInsideView()) {
http://git-wip-us.apache.org/repos/asf/hive/blob/a4b087b1/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java
index 82c3ca9..40cfdbd 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java
@@ -2821,7 +2821,7 @@ public class CalcitePlanner extends SemanticAnalyzer {
// 4. Build operator
RelOptHiveTable optTable;
if (tableType == TableType.DRUID ||
- (tableType == TableType.JDBC && tabMetaData.getProperty("hive.sql.table") != null)) {
+ (tableType == TableType.JDBC && tabMetaData.getProperty(Constants.JDBC_TABLE) != null)) {
// Create case sensitive columns list
List<String> originalColumnNames =
((StandardStructObjectInspector)rowObjectInspector).getOriginalColumnNames();
@@ -2895,16 +2895,15 @@ public class CalcitePlanner extends SemanticAnalyzer {
getAliasId(tableAlias, qb),
HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_CBO_RETPATH_HIVEOP),
qb.isInsideView() || qb.getAliasInsideView().contains(tableAlias.toLowerCase()));
- LOG.debug("JDBC is running");
- final String dataBaseType = tabMetaData.getProperty("hive.sql.database.type");
- final String url = tabMetaData.getProperty("hive.sql.jdbc.url");
- final String driver = tabMetaData.getProperty("hive.sql.jdbc.driver");
- final String user = tabMetaData.getProperty("hive.sql.dbcp.username");
- final String pswd = tabMetaData.getProperty("hive.sql.dbcp.password");
- //final String query = tabMetaData.getProperty("hive.sql.query");
- final String tableName = tabMetaData.getProperty("hive.sql.table");
-
- final DataSource ds = JdbcSchema.dataSource(url, driver, user, pswd);
+
+ final String dataBaseType = tabMetaData.getProperty(Constants.JDBC_DATABASE_TYPE);
+ final String url = tabMetaData.getProperty(Constants.JDBC_URL);
+ final String driver = tabMetaData.getProperty(Constants.JDBC_DRIVER);
+ final String user = tabMetaData.getProperty(Constants.JDBC_USERNAME);
+ final String pswd = tabMetaData.getProperty(Constants.JDBC_PASSWORD);
+ final String tableName = tabMetaData.getProperty(Constants.JDBC_TABLE);
+
+ DataSource ds = JdbcSchema.dataSource(url, driver, user, pswd);
SqlDialect jdbcDialect = JdbcSchema.createDialect(SqlDialectFactoryImpl.INSTANCE, ds);
JdbcConvention jc = JdbcConvention.of(jdbcDialect, null, dataBaseType);
JdbcSchema schema = new JdbcSchema(ds, jc.dialect, jc, null/*catalog */, null/*schema */);
@@ -2915,7 +2914,7 @@ public class CalcitePlanner extends SemanticAnalyzer {
JdbcHiveTableScan jdbcTableRel = new JdbcHiveTableScan(cluster, optTable, jt, jc, hts);
tableRel = new HiveJdbcConverter(cluster, jdbcTableRel.getTraitSet().replace(HiveRelNode.CONVENTION),
- jdbcTableRel, jc);
+ jdbcTableRel, jc, url, user);
}
} else {
// Build row type from field <type, name>
http://git-wip-us.apache.org/repos/asf/hive/blob/a4b087b1/ql/src/test/queries/clientpositive/external_jdbc_table2.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/external_jdbc_table2.q b/ql/src/test/queries/clientpositive/external_jdbc_table2.q
new file mode 100644
index 0000000..cc4466e
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/external_jdbc_table2.q
@@ -0,0 +1,128 @@
+--! qt:dataset:src
+
+CREATE TEMPORARY FUNCTION dboutput AS 'org.apache.hadoop.hive.contrib.genericudf.example.GenericUDFDBOutput';
+
+FROM src
+SELECT
+dboutput ( 'jdbc:derby:;databaseName=${system:test.tmp.dir}/test_derby_auth1;create=true','user1','passwd1',
+'CREATE TABLE SIMPLE_DERBY_TABLE1 ("ikey" INTEGER, "bkey" BIGINT, "fkey" REAL, "dkey" DOUBLE)' ),
+dboutput('jdbc:derby:;databaseName=${system:test.tmp.dir}/test_derby_auth1','user1','passwd1',
+'INSERT INTO SIMPLE_DERBY_TABLE1 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','20','20','20.0','20.0'),
+dboutput('jdbc:derby:;databaseName=${system:test.tmp.dir}/test_derby_auth1','user1','passwd1',
+'INSERT INTO SIMPLE_DERBY_TABLE1 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','-20','-20','-20.0','-20.0'),
+dboutput('jdbc:derby:;databaseName=${system:test.tmp.dir}/test_derby_auth1','user1','passwd1',
+'INSERT INTO SIMPLE_DERBY_TABLE1 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','100','-15','65.0','-74.0'),
+dboutput('jdbc:derby:;databaseName=${system:test.tmp.dir}/test_derby_auth1','user1','passwd1',
+'INSERT INTO SIMPLE_DERBY_TABLE1 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','44','53','-455.454','330.76')
+limit 1;
+
+FROM src
+SELECT
+dboutput ( 'jdbc:derby:;databaseName=${system:test.tmp.dir}/test_derby_auth2;create=true','user2','passwd2',
+'CREATE TABLE SIMPLE_DERBY_TABLE2 ("ikey" INTEGER, "bkey" BIGINT, "fkey" REAL, "dkey" DOUBLE )' ),
+dboutput('jdbc:derby:;databaseName=${system:test.tmp.dir}/test_derby_auth2','user2','passwd2',
+'INSERT INTO SIMPLE_DERBY_TABLE2 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','20','20','20.0','20.0'),
+dboutput('jdbc:derby:;databaseName=${system:test.tmp.dir}/test_derby_auth2','user2','passwd2',
+'INSERT INTO SIMPLE_DERBY_TABLE2 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','-20','8','9.0','11.0'),
+dboutput('jdbc:derby:;databaseName=${system:test.tmp.dir}/test_derby_auth2','user2','passwd2',
+'INSERT INTO SIMPLE_DERBY_TABLE2 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','101','-16','66.0','-75.0'),
+dboutput('jdbc:derby:;databaseName=${system:test.tmp.dir}/test_derby_auth2','user2','passwd2',
+'INSERT INTO SIMPLE_DERBY_TABLE2 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','40','50','-455.4543','330.767')
+limit 1;
+
+FROM src
+SELECT
+dboutput ( 'jdbc:derby:;databaseName=${system:test.tmp.dir}/test_derby_auth1;create=true','user1','passwd1',
+'CREATE TABLE SIMPLE_DERBY_TABLE2 ("ikey" INTEGER, "bkey" BIGINT, "fkey" REAL, "dkey" DOUBLE )' ),
+dboutput('jdbc:derby:;databaseName=${system:test.tmp.dir}/test_derby_auth1','user1','passwd1',
+'INSERT INTO SIMPLE_DERBY_TABLE2 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','20','20','20.0','20.0'),
+dboutput('jdbc:derby:;databaseName=${system:test.tmp.dir}/test_derby_auth1','user1','passwd1',
+'INSERT INTO SIMPLE_DERBY_TABLE2 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','-20','8','9.0','11.0'),
+dboutput('jdbc:derby:;databaseName=${system:test.tmp.dir}/test_derby_auth1','user1','passwd1',
+'INSERT INTO SIMPLE_DERBY_TABLE2 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','101','-16','66.0','-75.0'),
+dboutput('jdbc:derby:;databaseName=${system:test.tmp.dir}/test_derby_auth1','user1','passwd1',
+'INSERT INTO SIMPLE_DERBY_TABLE2 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','40','50','-455.4543','330.767')
+limit 1;
+
+
+
+CREATE EXTERNAL TABLE db1_ext_auth1
+(
+ ikey int,
+ bkey bigint,
+ fkey float,
+ dkey double
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+ "hive.sql.database.type" = "DERBY",
+ "hive.sql.jdbc.driver" = "org.apache.derby.jdbc.EmbeddedDriver",
+ "hive.sql.jdbc.url" = "jdbc:derby:;databaseName=${system:test.tmp.dir}/test_derby_auth1;collation=TERRITORY_BASED:PRIMARY",
+ "hive.sql.dbcp.username" = "user1",
+ "hive.sql.dbcp.password" = "passwd1",
+ "hive.sql.table" = "SIMPLE_DERBY_TABLE1",
+ "hive.sql.dbcp.maxActive" = "1"
+);
+
+CREATE EXTERNAL TABLE db2_ext_auth2
+(
+ ikey int,
+ bkey bigint,
+ fkey float,
+ dkey double
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+ "hive.sql.database.type" = "DERBY",
+ "hive.sql.jdbc.driver" = "org.apache.derby.jdbc.EmbeddedDriver",
+ "hive.sql.jdbc.url" = "jdbc:derby:;databaseName=${system:test.tmp.dir}/test_derby_auth2;collation=TERRITORY_BASED:PRIMARY",
+ "hive.sql.dbcp.username" = "user2",
+ "hive.sql.dbcp.password" = "passwd2",
+ "hive.sql.table" = "SIMPLE_DERBY_TABLE2",
+ "hive.sql.dbcp.maxActive" = "1"
+);
+
+CREATE EXTERNAL TABLE db1_ext_auth2
+(
+ ikey int,
+ bkey bigint,
+ fkey float,
+ dkey double
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+ "hive.sql.database.type" = "DERBY",
+ "hive.sql.jdbc.driver" = "org.apache.derby.jdbc.EmbeddedDriver",
+ "hive.sql.jdbc.url" = "jdbc:derby:;databaseName=${system:test.tmp.dir}/test_derby_auth1;collation=TERRITORY_BASED:PRIMARY",
+ "hive.sql.dbcp.username" = "user1",
+ "hive.sql.dbcp.password" = "passwd1",
+ "hive.sql.table" = "SIMPLE_DERBY_TABLE2",
+ "hive.sql.dbcp.maxActive" = "1"
+);
+
+
+SELECT * FROM db1_ext_auth1;
+
+SELECT * FROM db2_ext_auth2;
+
+SELECT * FROM db1_ext_auth2;
+
+EXPLAIN
+SELECT * FROM db1_ext_auth1 JOIN db2_ext_auth2 ON db1_ext_auth1.ikey = db2_ext_auth2.ikey;
+
+SELECT * FROM db1_ext_auth1 JOIN db2_ext_auth2 ON db1_ext_auth1.ikey = db2_ext_auth2.ikey;
+
+EXPLAIN
+SELECT * FROM db1_ext_auth1 JOIN db1_ext_auth2 ON db1_ext_auth1.ikey = db1_ext_auth2.ikey;
+
+SELECT * FROM db1_ext_auth1 JOIN db1_ext_auth2 ON db1_ext_auth1.ikey = db1_ext_auth2.ikey;
+
+EXPLAIN
+SELECT * FROM db1_ext_auth1 UNION ALL SELECT * FROM db2_ext_auth2;
+
+SELECT * FROM db1_ext_auth1 UNION ALL SELECT * FROM db2_ext_auth2;
+
+EXPLAIN
+SELECT * FROM db1_ext_auth1 UNION ALL SELECT * FROM db1_ext_auth2;
+
+SELECT * FROM db1_ext_auth1 UNION ALL SELECT * FROM db1_ext_auth2;
http://git-wip-us.apache.org/repos/asf/hive/blob/a4b087b1/ql/src/test/results/clientpositive/llap/external_jdbc_table.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/external_jdbc_table.q.out b/ql/src/test/results/clientpositive/llap/external_jdbc_table.q.out
index 0849170..1bb41dd 100644
--- a/ql/src/test/results/clientpositive/llap/external_jdbc_table.q.out
+++ b/ql/src/test/results/clientpositive/llap/external_jdbc_table.q.out
@@ -233,7 +233,7 @@ POSTHOOK: Input: default@ext_simple_derby_table1
20.0 20 20.0 20
20.0 20 20.0 20
74.0 100 65.0 15
-330.76 44 455.454 53
+330.76 44 455.4540100097656 53
PREHOOK: query: select count(*) from ext_simple_derby_table1
PREHOOK: type: QUERY
PREHOOK: Input: default@ext_simple_derby_table1
@@ -304,12 +304,12 @@ STAGE PLANS:
TableScan
alias: ext_simple_derby_table1
properties:
- hive.sql.generated.query SELECT "bkey"
-FROM "SIMPLE_DERBY_TABLE1"
-WHERE 100 < "ikey"
hive.sql.query SELECT "bkey"
FROM "SIMPLE_DERBY_TABLE1"
WHERE 100 < "ikey"
+ hive.sql.query.fieldNames bkey
+ hive.sql.query.fieldTypes bigint
+ hive.sql.query.split true
Select Operator
expressions: bkey (type: bigint)
outputColumnNames: _col0
@@ -381,13 +381,6 @@ STAGE PLANS:
TableScan
alias: ext_simple_derby_table1
properties:
- hive.sql.generated.query SELECT "t"."fkey", "t0"."dkey"
-FROM (SELECT *
-FROM "SIMPLE_DERBY_TABLE1"
-WHERE "ikey" IS NOT NULL) AS "t"
-INNER JOIN (SELECT *
-FROM "SIMPLE_DERBY_TABLE2"
-WHERE "ikey" IS NOT NULL) AS "t0" ON "t"."ikey" = "t0"."ikey"
hive.sql.query SELECT "t"."fkey", "t0"."dkey"
FROM (SELECT *
FROM "SIMPLE_DERBY_TABLE1"
@@ -395,6 +388,9 @@ WHERE "ikey" IS NOT NULL) AS "t"
INNER JOIN (SELECT *
FROM "SIMPLE_DERBY_TABLE2"
WHERE "ikey" IS NOT NULL) AS "t0" ON "t"."ikey" = "t0"."ikey"
+ hive.sql.query.fieldNames fkey,dkey
+ hive.sql.query.fieldTypes float,double
+ hive.sql.query.split false
Select Operator
expressions: fkey (type: float), dkey (type: double)
outputColumnNames: _col0, _col1
@@ -464,12 +460,12 @@ STAGE PLANS:
TableScan
alias: ext_simple_derby_table2
properties:
- hive.sql.generated.query SELECT *
-FROM "SIMPLE_DERBY_TABLE2"
-WHERE "ikey" IS NOT NULL
- hive.sql.query SELECT *
+ hive.sql.query SELECT "ikey", "bkey", "fkey", "dkey"
FROM "SIMPLE_DERBY_TABLE2"
WHERE "ikey" IS NOT NULL
+ hive.sql.query.fieldNames ikey,bkey,fkey,dkey
+ hive.sql.query.fieldTypes int,bigint,float,double
+ hive.sql.query.split true
Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: ikey (type: int)
http://git-wip-us.apache.org/repos/asf/hive/blob/a4b087b1/ql/src/test/results/clientpositive/llap/external_jdbc_table2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/external_jdbc_table2.q.out b/ql/src/test/results/clientpositive/llap/external_jdbc_table2.q.out
new file mode 100644
index 0000000..e56a221
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/external_jdbc_table2.q.out
@@ -0,0 +1,559 @@
+PREHOOK: query: CREATE TEMPORARY FUNCTION dboutput AS 'org.apache.hadoop.hive.contrib.genericudf.example.GenericUDFDBOutput'
+PREHOOK: type: CREATEFUNCTION
+PREHOOK: Output: dboutput
+POSTHOOK: query: CREATE TEMPORARY FUNCTION dboutput AS 'org.apache.hadoop.hive.contrib.genericudf.example.GenericUDFDBOutput'
+POSTHOOK: type: CREATEFUNCTION
+POSTHOOK: Output: dboutput
+PREHOOK: query: FROM src
+SELECT
+#### A masked pattern was here ####
+'CREATE TABLE SIMPLE_DERBY_TABLE1 ("ikey" INTEGER, "bkey" BIGINT, "fkey" REAL, "dkey" DOUBLE)' ),
+#### A masked pattern was here ####
+'INSERT INTO SIMPLE_DERBY_TABLE1 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','20','20','20.0','20.0'),
+#### A masked pattern was here ####
+'INSERT INTO SIMPLE_DERBY_TABLE1 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','-20','-20','-20.0','-20.0'),
+#### A masked pattern was here ####
+'INSERT INTO SIMPLE_DERBY_TABLE1 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','100','-15','65.0','-74.0'),
+#### A masked pattern was here ####
+'INSERT INTO SIMPLE_DERBY_TABLE1 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','44','53','-455.454','330.76')
+limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: FROM src
+SELECT
+#### A masked pattern was here ####
+'CREATE TABLE SIMPLE_DERBY_TABLE1 ("ikey" INTEGER, "bkey" BIGINT, "fkey" REAL, "dkey" DOUBLE)' ),
+#### A masked pattern was here ####
+'INSERT INTO SIMPLE_DERBY_TABLE1 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','20','20','20.0','20.0'),
+#### A masked pattern was here ####
+'INSERT INTO SIMPLE_DERBY_TABLE1 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','-20','-20','-20.0','-20.0'),
+#### A masked pattern was here ####
+'INSERT INTO SIMPLE_DERBY_TABLE1 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','100','-15','65.0','-74.0'),
+#### A masked pattern was here ####
+'INSERT INTO SIMPLE_DERBY_TABLE1 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','44','53','-455.454','330.76')
+limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+0 0 0 0 0
+PREHOOK: query: FROM src
+SELECT
+#### A masked pattern was here ####
+'CREATE TABLE SIMPLE_DERBY_TABLE2 ("ikey" INTEGER, "bkey" BIGINT, "fkey" REAL, "dkey" DOUBLE )' ),
+#### A masked pattern was here ####
+'INSERT INTO SIMPLE_DERBY_TABLE2 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','20','20','20.0','20.0'),
+#### A masked pattern was here ####
+'INSERT INTO SIMPLE_DERBY_TABLE2 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','-20','8','9.0','11.0'),
+#### A masked pattern was here ####
+'INSERT INTO SIMPLE_DERBY_TABLE2 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','101','-16','66.0','-75.0'),
+#### A masked pattern was here ####
+'INSERT INTO SIMPLE_DERBY_TABLE2 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','40','50','-455.4543','330.767')
+limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: FROM src
+SELECT
+#### A masked pattern was here ####
+'CREATE TABLE SIMPLE_DERBY_TABLE2 ("ikey" INTEGER, "bkey" BIGINT, "fkey" REAL, "dkey" DOUBLE )' ),
+#### A masked pattern was here ####
+'INSERT INTO SIMPLE_DERBY_TABLE2 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','20','20','20.0','20.0'),
+#### A masked pattern was here ####
+'INSERT INTO SIMPLE_DERBY_TABLE2 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','-20','8','9.0','11.0'),
+#### A masked pattern was here ####
+'INSERT INTO SIMPLE_DERBY_TABLE2 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','101','-16','66.0','-75.0'),
+#### A masked pattern was here ####
+'INSERT INTO SIMPLE_DERBY_TABLE2 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','40','50','-455.4543','330.767')
+limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+0 0 0 0 0
+PREHOOK: query: FROM src
+SELECT
+#### A masked pattern was here ####
+'CREATE TABLE SIMPLE_DERBY_TABLE2 ("ikey" INTEGER, "bkey" BIGINT, "fkey" REAL, "dkey" DOUBLE )' ),
+#### A masked pattern was here ####
+'INSERT INTO SIMPLE_DERBY_TABLE2 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','20','20','20.0','20.0'),
+#### A masked pattern was here ####
+'INSERT INTO SIMPLE_DERBY_TABLE2 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','-20','8','9.0','11.0'),
+#### A masked pattern was here ####
+'INSERT INTO SIMPLE_DERBY_TABLE2 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','101','-16','66.0','-75.0'),
+#### A masked pattern was here ####
+'INSERT INTO SIMPLE_DERBY_TABLE2 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','40','50','-455.4543','330.767')
+limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: FROM src
+SELECT
+#### A masked pattern was here ####
+'CREATE TABLE SIMPLE_DERBY_TABLE2 ("ikey" INTEGER, "bkey" BIGINT, "fkey" REAL, "dkey" DOUBLE )' ),
+#### A masked pattern was here ####
+'INSERT INTO SIMPLE_DERBY_TABLE2 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','20','20','20.0','20.0'),
+#### A masked pattern was here ####
+'INSERT INTO SIMPLE_DERBY_TABLE2 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','-20','8','9.0','11.0'),
+#### A masked pattern was here ####
+'INSERT INTO SIMPLE_DERBY_TABLE2 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','101','-16','66.0','-75.0'),
+#### A masked pattern was here ####
+'INSERT INTO SIMPLE_DERBY_TABLE2 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','40','50','-455.4543','330.767')
+limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+0 0 0 0 0
+PREHOOK: query: CREATE EXTERNAL TABLE db1_ext_auth1
+(
+ ikey int,
+ bkey bigint,
+ fkey float,
+ dkey double
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+ "hive.sql.database.type" = "DERBY",
+ "hive.sql.jdbc.driver" = "org.apache.derby.jdbc.EmbeddedDriver",
+#### A masked pattern was here ####
+ "hive.sql.dbcp.username" = "user1",
+ "hive.sql.dbcp.password" = "passwd1",
+ "hive.sql.table" = "SIMPLE_DERBY_TABLE1",
+ "hive.sql.dbcp.maxActive" = "1"
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@db1_ext_auth1
+POSTHOOK: query: CREATE EXTERNAL TABLE db1_ext_auth1
+(
+ ikey int,
+ bkey bigint,
+ fkey float,
+ dkey double
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+ "hive.sql.database.type" = "DERBY",
+ "hive.sql.jdbc.driver" = "org.apache.derby.jdbc.EmbeddedDriver",
+#### A masked pattern was here ####
+ "hive.sql.dbcp.username" = "user1",
+ "hive.sql.dbcp.password" = "passwd1",
+ "hive.sql.table" = "SIMPLE_DERBY_TABLE1",
+ "hive.sql.dbcp.maxActive" = "1"
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@db1_ext_auth1
+PREHOOK: query: CREATE EXTERNAL TABLE db2_ext_auth2
+(
+ ikey int,
+ bkey bigint,
+ fkey float,
+ dkey double
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+ "hive.sql.database.type" = "DERBY",
+ "hive.sql.jdbc.driver" = "org.apache.derby.jdbc.EmbeddedDriver",
+#### A masked pattern was here ####
+ "hive.sql.dbcp.username" = "user2",
+ "hive.sql.dbcp.password" = "passwd2",
+ "hive.sql.table" = "SIMPLE_DERBY_TABLE2",
+ "hive.sql.dbcp.maxActive" = "1"
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@db2_ext_auth2
+POSTHOOK: query: CREATE EXTERNAL TABLE db2_ext_auth2
+(
+ ikey int,
+ bkey bigint,
+ fkey float,
+ dkey double
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+ "hive.sql.database.type" = "DERBY",
+ "hive.sql.jdbc.driver" = "org.apache.derby.jdbc.EmbeddedDriver",
+#### A masked pattern was here ####
+ "hive.sql.dbcp.username" = "user2",
+ "hive.sql.dbcp.password" = "passwd2",
+ "hive.sql.table" = "SIMPLE_DERBY_TABLE2",
+ "hive.sql.dbcp.maxActive" = "1"
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@db2_ext_auth2
+PREHOOK: query: CREATE EXTERNAL TABLE db1_ext_auth2
+(
+ ikey int,
+ bkey bigint,
+ fkey float,
+ dkey double
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+ "hive.sql.database.type" = "DERBY",
+ "hive.sql.jdbc.driver" = "org.apache.derby.jdbc.EmbeddedDriver",
+#### A masked pattern was here ####
+ "hive.sql.dbcp.username" = "user1",
+ "hive.sql.dbcp.password" = "passwd1",
+ "hive.sql.table" = "SIMPLE_DERBY_TABLE2",
+ "hive.sql.dbcp.maxActive" = "1"
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@db1_ext_auth2
+POSTHOOK: query: CREATE EXTERNAL TABLE db1_ext_auth2
+(
+ ikey int,
+ bkey bigint,
+ fkey float,
+ dkey double
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+ "hive.sql.database.type" = "DERBY",
+ "hive.sql.jdbc.driver" = "org.apache.derby.jdbc.EmbeddedDriver",
+#### A masked pattern was here ####
+ "hive.sql.dbcp.username" = "user1",
+ "hive.sql.dbcp.password" = "passwd1",
+ "hive.sql.table" = "SIMPLE_DERBY_TABLE2",
+ "hive.sql.dbcp.maxActive" = "1"
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@db1_ext_auth2
+PREHOOK: query: SELECT * FROM db1_ext_auth1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@db1_ext_auth1
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM db1_ext_auth1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@db1_ext_auth1
+#### A masked pattern was here ####
+20 20 20.0 20.0
+-20 -20 -20.0 -20.0
+100 -15 65.0 -74.0
+44 53 -455.454 330.76
+PREHOOK: query: SELECT * FROM db2_ext_auth2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@db2_ext_auth2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM db2_ext_auth2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@db2_ext_auth2
+#### A masked pattern was here ####
+20 20 20.0 20.0
+-20 8 9.0 11.0
+101 -16 66.0 -75.0
+40 50 -455.4543 330.767
+PREHOOK: query: SELECT * FROM db1_ext_auth2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@db1_ext_auth2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM db1_ext_auth2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@db1_ext_auth2
+#### A masked pattern was here ####
+20 20 20.0 20.0
+-20 8 9.0 11.0
+101 -16 66.0 -75.0
+40 50 -455.4543 330.767
+PREHOOK: query: EXPLAIN
+SELECT * FROM db1_ext_auth1 JOIN db2_ext_auth2 ON db1_ext_auth1.ikey = db2_ext_auth2.ikey
+PREHOOK: type: QUERY
+PREHOOK: Input: default@db1_ext_auth1
+PREHOOK: Input: default@db2_ext_auth2
+#### A masked pattern was here ####
+POSTHOOK: query: EXPLAIN
+SELECT * FROM db1_ext_auth1 JOIN db2_ext_auth2 ON db1_ext_auth1.ikey = db2_ext_auth2.ikey
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@db1_ext_auth1
+POSTHOOK: Input: default@db2_ext_auth2
+#### A masked pattern was here ####
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 3 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: db1_ext_auth1
+ properties:
+ hive.sql.query SELECT "ikey", "bkey", "fkey", "dkey"
+FROM "SIMPLE_DERBY_TABLE1"
+WHERE "ikey" IS NOT NULL
+ hive.sql.query.fieldNames ikey,bkey,fkey,dkey
+ hive.sql.query.fieldTypes int,bigint,float,double
+ hive.sql.query.split true
+ Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: ikey (type: int)
+ sort order: +
+ Map-reduce partition columns: ikey (type: int)
+ Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE
+ value expressions: bkey (type: bigint), fkey (type: float), dkey (type: double)
+ Execution mode: vectorized, llap
+ LLAP IO: no inputs
+ Map 3
+ Map Operator Tree:
+ TableScan
+ alias: db2_ext_auth2
+ properties:
+ hive.sql.query SELECT "ikey", "bkey", "fkey", "dkey"
+FROM "SIMPLE_DERBY_TABLE2"
+WHERE "ikey" IS NOT NULL
+ hive.sql.query.fieldNames ikey,bkey,fkey,dkey
+ hive.sql.query.fieldTypes int,bigint,float,double
+ hive.sql.query.split true
+ Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: ikey (type: int)
+ sort order: +
+ Map-reduce partition columns: ikey (type: int)
+ Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE
+ value expressions: bkey (type: bigint), fkey (type: float), dkey (type: double)
+ Execution mode: vectorized, llap
+ LLAP IO: no inputs
+ Reducer 2
+ Execution mode: llap
+ Reduce Operator Tree:
+ Merge Join Operator
+ condition map:
+ Inner Join 0 to 1
+ keys:
+ 0 ikey (type: int)
+ 1 ikey (type: int)
+ outputColumnNames: _col0, _col1, _col2, _col3, _col7, _col8, _col9, _col10
+ Statistics: Num rows: 1 Data size: 26 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: _col0 (type: int), _col1 (type: bigint), _col2 (type: float), _col3 (type: double), _col7 (type: int), _col8 (type: bigint), _col9 (type: float), _col10 (type: double)
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7
+ Statistics: Num rows: 1 Data size: 26 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 1 Data size: 26 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: SELECT * FROM db1_ext_auth1 JOIN db2_ext_auth2 ON db1_ext_auth1.ikey = db2_ext_auth2.ikey
+PREHOOK: type: QUERY
+PREHOOK: Input: default@db1_ext_auth1
+PREHOOK: Input: default@db2_ext_auth2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM db1_ext_auth1 JOIN db2_ext_auth2 ON db1_ext_auth1.ikey = db2_ext_auth2.ikey
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@db1_ext_auth1
+POSTHOOK: Input: default@db2_ext_auth2
+#### A masked pattern was here ####
+-20 -20 -20.0 -20.0 -20 8 9.0 11.0
+20 20 20.0 20.0 20 20 20.0 20.0
+PREHOOK: query: EXPLAIN
+SELECT * FROM db1_ext_auth1 JOIN db1_ext_auth2 ON db1_ext_auth1.ikey = db1_ext_auth2.ikey
+PREHOOK: type: QUERY
+PREHOOK: Input: default@db1_ext_auth1
+PREHOOK: Input: default@db1_ext_auth2
+#### A masked pattern was here ####
+POSTHOOK: query: EXPLAIN
+SELECT * FROM db1_ext_auth1 JOIN db1_ext_auth2 ON db1_ext_auth1.ikey = db1_ext_auth2.ikey
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@db1_ext_auth1
+POSTHOOK: Input: default@db1_ext_auth2
+#### A masked pattern was here ####
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: db1_ext_auth1
+ properties:
+ hive.sql.query SELECT "t"."ikey", "t"."bkey", "t"."fkey", "t"."dkey", "t0"."ikey" AS "ikey0", "t0"."bkey" AS "bkey0", "t0"."fkey" AS "fkey0", "t0"."dkey" AS "dkey0"
+FROM (SELECT *
+FROM "SIMPLE_DERBY_TABLE1"
+WHERE "ikey" IS NOT NULL) AS "t"
+INNER JOIN (SELECT *
+FROM "SIMPLE_DERBY_TABLE2"
+WHERE "ikey" IS NOT NULL) AS "t0" ON "t"."ikey" = "t0"."ikey"
+ hive.sql.query.fieldNames ikey,bkey,fkey,dkey,ikey0,bkey0,fkey0,dkey0
+ hive.sql.query.fieldTypes int,bigint,float,double,int,bigint,float,double
+ hive.sql.query.split false
+ Select Operator
+ expressions: ikey (type: int), bkey (type: bigint), fkey (type: float), dkey (type: double), ikey0 (type: int), bkey0 (type: bigint), fkey0 (type: float), dkey0 (type: double)
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7
+ ListSink
+
+PREHOOK: query: SELECT * FROM db1_ext_auth1 JOIN db1_ext_auth2 ON db1_ext_auth1.ikey = db1_ext_auth2.ikey
+PREHOOK: type: QUERY
+PREHOOK: Input: default@db1_ext_auth1
+PREHOOK: Input: default@db1_ext_auth2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM db1_ext_auth1 JOIN db1_ext_auth2 ON db1_ext_auth1.ikey = db1_ext_auth2.ikey
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@db1_ext_auth1
+POSTHOOK: Input: default@db1_ext_auth2
+#### A masked pattern was here ####
+20 20 20.0 20.0 20 20 20.0 20.0
+-20 -20 -20.0 -20.0 -20 8 9.0 11.0
+PREHOOK: query: EXPLAIN
+SELECT * FROM db1_ext_auth1 UNION ALL SELECT * FROM db2_ext_auth2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@db1_ext_auth1
+PREHOOK: Input: default@db2_ext_auth2
+#### A masked pattern was here ####
+POSTHOOK: query: EXPLAIN
+SELECT * FROM db1_ext_auth1 UNION ALL SELECT * FROM db2_ext_auth2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@db1_ext_auth1
+POSTHOOK: Input: default@db2_ext_auth2
+#### A masked pattern was here ####
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Map 1 <- Union 2 (CONTAINS)
+ Map 3 <- Union 2 (CONTAINS)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: db1_ext_auth1
+ Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: ikey (type: int), bkey (type: bigint), fkey (type: float), dkey (type: double)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 2 Data size: 48 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ Execution mode: vectorized, llap
+ LLAP IO: no inputs
+ Map 3
+ Map Operator Tree:
+ TableScan
+ alias: db2_ext_auth2
+ Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: ikey (type: int), bkey (type: bigint), fkey (type: float), dkey (type: double)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 2 Data size: 48 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ Execution mode: vectorized, llap
+ LLAP IO: no inputs
+ Union 2
+ Vertex: Union 2
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: SELECT * FROM db1_ext_auth1 UNION ALL SELECT * FROM db2_ext_auth2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@db1_ext_auth1
+PREHOOK: Input: default@db2_ext_auth2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM db1_ext_auth1 UNION ALL SELECT * FROM db2_ext_auth2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@db1_ext_auth1
+POSTHOOK: Input: default@db2_ext_auth2
+#### A masked pattern was here ####
+20 20 20.0 20.0
+-20 -20 -20.0 -20.0
+100 -15 65.0 -74.0
+44 53 -455.454 330.76
+20 20 20.0 20.0
+-20 8 9.0 11.0
+101 -16 66.0 -75.0
+40 50 -455.4543 330.767
+PREHOOK: query: EXPLAIN
+SELECT * FROM db1_ext_auth1 UNION ALL SELECT * FROM db1_ext_auth2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@db1_ext_auth1
+PREHOOK: Input: default@db1_ext_auth2
+#### A masked pattern was here ####
+POSTHOOK: query: EXPLAIN
+SELECT * FROM db1_ext_auth1 UNION ALL SELECT * FROM db1_ext_auth2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@db1_ext_auth1
+POSTHOOK: Input: default@db1_ext_auth2
+#### A masked pattern was here ####
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: db1_ext_auth1
+ properties:
+ hive.sql.query SELECT "ikey", "bkey", "fkey", "dkey"
+FROM (SELECT *
+FROM "SIMPLE_DERBY_TABLE1"
+UNION ALL
+SELECT *
+FROM "SIMPLE_DERBY_TABLE2") AS "t"
+ hive.sql.query.fieldNames ikey,bkey,fkey,dkey
+ hive.sql.query.fieldTypes int,bigint,float,double
+ hive.sql.query.split false
+ Select Operator
+ expressions: ikey (type: int), bkey (type: bigint), fkey (type: float), dkey (type: double)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ ListSink
+
+PREHOOK: query: SELECT * FROM db1_ext_auth1 UNION ALL SELECT * FROM db1_ext_auth2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@db1_ext_auth1
+PREHOOK: Input: default@db1_ext_auth2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM db1_ext_auth1 UNION ALL SELECT * FROM db1_ext_auth2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@db1_ext_auth1
+POSTHOOK: Input: default@db1_ext_auth2
+#### A masked pattern was here ####
+20 20 20.0 20.0
+-20 -20 -20.0 -20.0
+100 -15 65.0 -74.0
+44 53 -455.454 330.76
+20 20 20.0 20.0
+-20 8 9.0 11.0
+101 -16 66.0 -75.0
+40 50 -455.4543 330.767
http://git-wip-us.apache.org/repos/asf/hive/blob/a4b087b1/ql/src/test/results/clientpositive/llap/jdbc_handler.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/jdbc_handler.q.out b/ql/src/test/results/clientpositive/llap/jdbc_handler.q.out
index 261689d..b46909d 100644
--- a/ql/src/test/results/clientpositive/llap/jdbc_handler.q.out
+++ b/ql/src/test/results/clientpositive/llap/jdbc_handler.q.out
@@ -98,12 +98,12 @@ STAGE PLANS:
TableScan
alias: ext_simple_derby_table
properties:
- hive.sql.generated.query SELECT *
-FROM "SIMPLE_DERBY_TABLE"
-WHERE 100 < "kkey"
- hive.sql.query SELECT *
+ hive.sql.query SELECT "kkey"
FROM "SIMPLE_DERBY_TABLE"
WHERE 100 < "kkey"
+ hive.sql.query.fieldNames kkey
+ hive.sql.query.fieldTypes int
+ hive.sql.query.split true
Select Operator
expressions: kkey (type: int)
outputColumnNames: _col0