You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2013/09/27 03:26:37 UTC
svn commit: r1526766 [1/2] - in /hive/branches/vectorization: ./
beeline/src/java/org/apache/hive/beeline/ bin/
common/src/java/org/apache/hadoop/hive/conf/ conf/
data/files/exported_table/ data/files/exported_table/data/
eclipse-templates/ hcatalog/co...
Author: hashutosh
Date: Fri Sep 27 01:26:36 2013
New Revision: 1526766
URL: http://svn.apache.org/r1526766
Log:
Merged in with latest trunk
Added:
hive/branches/vectorization/data/files/exported_table/
- copied from r1526765, hive/trunk/data/files/exported_table/
hive/branches/vectorization/data/files/exported_table/_metadata
- copied unchanged from r1526765, hive/trunk/data/files/exported_table/_metadata
hive/branches/vectorization/data/files/exported_table/data/
- copied from r1526765, hive/trunk/data/files/exported_table/data/
hive/branches/vectorization/data/files/exported_table/data/data
- copied unchanged from r1526765, hive/trunk/data/files/exported_table/data/data
hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/HttpBasicAuthInterceptor.java
- copied unchanged from r1526765, hive/trunk/jdbc/src/java/org/apache/hive/jdbc/HttpBasicAuthInterceptor.java
hive/branches/vectorization/ql/src/test/queries/clientpositive/import_exported_table.q
- copied unchanged from r1526765, hive/trunk/ql/src/test/queries/clientpositive/import_exported_table.q
hive/branches/vectorization/ql/src/test/results/clientpositive/import_exported_table.q.out
- copied unchanged from r1526765, hive/trunk/ql/src/test/results/clientpositive/import_exported_table.q.out
hive/branches/vectorization/service/src/java/org/apache/hive/service/cli/thrift/EmbeddedThriftBinaryCLIService.java
- copied unchanged from r1526765, hive/trunk/service/src/java/org/apache/hive/service/cli/thrift/EmbeddedThriftBinaryCLIService.java
hive/branches/vectorization/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java
- copied unchanged from r1526765, hive/trunk/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java
hive/branches/vectorization/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java
- copied unchanged from r1526765, hive/trunk/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java
hive/branches/vectorization/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java
- copied unchanged from r1526765, hive/trunk/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java
hive/branches/vectorization/service/src/test/org/apache/hive/service/cli/TestEmbeddedThriftBinaryCLIService.java
- copied unchanged from r1526765, hive/trunk/service/src/test/org/apache/hive/service/cli/TestEmbeddedThriftBinaryCLIService.java
hive/branches/vectorization/service/src/test/org/apache/hive/service/cli/thrift/TestThriftBinaryCLIService.java
- copied unchanged from r1526765, hive/trunk/service/src/test/org/apache/hive/service/cli/thrift/TestThriftBinaryCLIService.java
hive/branches/vectorization/service/src/test/org/apache/hive/service/cli/thrift/TestThriftHttpCLIService.java
- copied unchanged from r1526765, hive/trunk/service/src/test/org/apache/hive/service/cli/thrift/TestThriftHttpCLIService.java
hive/branches/vectorization/service/src/test/org/apache/hive/service/cli/thrift/ThriftCLIServiceTest.java
- copied unchanged from r1526765, hive/trunk/service/src/test/org/apache/hive/service/cli/thrift/ThriftCLIServiceTest.java
Removed:
hive/branches/vectorization/service/src/java/org/apache/hive/service/cli/thrift/EmbeddedThriftCLIService.java
hive/branches/vectorization/service/src/test/org/apache/hive/service/cli/TestEmbeddedThriftCLIService.java
hive/branches/vectorization/service/src/test/org/apache/hive/service/cli/thrift/TestThriftCLIService.java
Modified:
hive/branches/vectorization/ (props changed)
hive/branches/vectorization/beeline/src/java/org/apache/hive/beeline/BeeLine.java
hive/branches/vectorization/bin/hive
hive/branches/vectorization/build-common.xml
hive/branches/vectorization/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
hive/branches/vectorization/conf/hive-default.xml.template
hive/branches/vectorization/eclipse-templates/.classpath
hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hcatalog/common/HCatUtil.java
hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java
hive/branches/vectorization/hcatalog/src/docs/src/documentation/content/xdocs/queue.xml
hive/branches/vectorization/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHCatHBaseInputFormat.java
hive/branches/vectorization/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HiveDelegator.java
hive/branches/vectorization/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/JarDelegator.java
hive/branches/vectorization/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/LauncherDelegator.java
hive/branches/vectorization/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/PigDelegator.java
hive/branches/vectorization/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/QueueStatusBean.java
hive/branches/vectorization/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java
hive/branches/vectorization/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/StreamingDelegator.java
hive/branches/vectorization/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobState.java
hive/branches/vectorization/ivy/libraries.properties
hive/branches/vectorization/jdbc/ivy.xml
hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java
hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/Utils.java
hive/branches/vectorization/jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java
hive/branches/vectorization/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java
hive/branches/vectorization/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
hive/branches/vectorization/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java
hive/branches/vectorization/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java
hive/branches/vectorization/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
hive/branches/vectorization/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyHiveVarcharObjectInspector.java
hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/AbstractPrimitiveObjectInspector.java
hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveVarcharObjectInspector.java
hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveVarcharObjectInspector.java
hive/branches/vectorization/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
hive/branches/vectorization/service/src/java/org/apache/hive/service/server/HiveServer2.java
hive/branches/vectorization/service/src/test/org/apache/hive/service/auth/TestPlainSaslHelper.java
hive/branches/vectorization/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java
hive/branches/vectorization/service/src/test/org/apache/hive/service/cli/session/TestSessionHooks.java
Propchange: hive/branches/vectorization/
------------------------------------------------------------------------------
Merged /hive/trunk:r1526185-1526765
Modified: hive/branches/vectorization/beeline/src/java/org/apache/hive/beeline/BeeLine.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/beeline/src/java/org/apache/hive/beeline/BeeLine.java?rev=1526766&r1=1526765&r2=1526766&view=diff
==============================================================================
--- hive/branches/vectorization/beeline/src/java/org/apache/hive/beeline/BeeLine.java (original)
+++ hive/branches/vectorization/beeline/src/java/org/apache/hive/beeline/BeeLine.java Fri Sep 27 01:26:36 2013
@@ -502,7 +502,7 @@ public class BeeLine {
for (int i = 0; i < args.length; i++) {
if (args[i].equals("--help") || args[i].equals("-h")) {
- usage();
+ // Return false here, so usage will be printed.
return false;
}
Modified: hive/branches/vectorization/bin/hive
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/bin/hive?rev=1526766&r1=1526765&r2=1526766&view=diff
==============================================================================
--- hive/branches/vectorization/bin/hive (original)
+++ hive/branches/vectorization/bin/hive Fri Sep 27 01:26:36 2013
@@ -117,7 +117,7 @@ elif [ "${HIVE_AUX_JARS_PATH}" != "" ];
fi
AUX_CLASSPATH=${HIVE_AUX_JARS_PATH}
AUX_PARAM=file://${HIVE_AUX_JARS_PATH}
- AUX_PARAM=`echo $AUX_PARAM | sed 's/,/,file:\/\//g'`
+ AUX_PARAM=`echo $AUX_PARAM | sed 's/:/,file:\/\//g'`
fi
# adding jars from auxlib directory
Modified: hive/branches/vectorization/build-common.xml
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/build-common.xml?rev=1526766&r1=1526765&r2=1526766&view=diff
==============================================================================
--- hive/branches/vectorization/build-common.xml (original)
+++ hive/branches/vectorization/build-common.xml Fri Sep 27 01:26:36 2013
@@ -59,7 +59,7 @@
<property name="test.output" value="true"/>
<property name="test.junit.output.format" value="xml"/>
<property name="test.junit.output.usefile" value="true"/>
- <property name="minimr.query.files" value="list_bucket_dml_10.q,input16_cc.q,scriptfile1.q,scriptfile1_win.q,bucket4.q,bucketmapjoin6.q,disable_merge_for_bucketing.q,reduce_deduplicate.q,smb_mapjoin_8.q,join1.q,groupby2.q,bucketizedhiveinputformat.q,bucketmapjoin7.q,optrstat_groupby.q,bucket_num_reducers.q,bucket5.q,load_fs2.q,bucket_num_reducers2.q,infer_bucket_sort_merge.q,infer_bucket_sort_reducers_power_two.q,infer_bucket_sort_dyn_part.q,infer_bucket_sort_bucketed_table.q,infer_bucket_sort_map_operators.q,infer_bucket_sort_num_buckets.q,leftsemijoin_mr.q,schemeAuthority.q,schemeAuthority2.q,truncate_column_buckets.q,remote_script.q,,load_hdfs_file_with_space_in_the_name.q,parallel_orderby.q"/>
+ <property name="minimr.query.files" value="list_bucket_dml_10.q,input16_cc.q,scriptfile1.q,scriptfile1_win.q,bucket4.q,bucketmapjoin6.q,disable_merge_for_bucketing.q,reduce_deduplicate.q,smb_mapjoin_8.q,join1.q,groupby2.q,bucketizedhiveinputformat.q,bucketmapjoin7.q,optrstat_groupby.q,bucket_num_reducers.q,bucket5.q,load_fs2.q,bucket_num_reducers2.q,infer_bucket_sort_merge.q,infer_bucket_sort_reducers_power_two.q,infer_bucket_sort_dyn_part.q,infer_bucket_sort_bucketed_table.q,infer_bucket_sort_map_operators.q,infer_bucket_sort_num_buckets.q,leftsemijoin_mr.q,schemeAuthority.q,schemeAuthority2.q,truncate_column_buckets.q,remote_script.q,,load_hdfs_file_with_space_in_the_name.q,parallel_orderby.q,import_exported_table.q"/>
<property name="minimr.query.negative.files" value="cluster_tasklog_retrieval.q,minimr_broken_pipe.q,mapreduce_stack_trace.q,mapreduce_stack_trace_turnoff.q,mapreduce_stack_trace_hadoop20.q,mapreduce_stack_trace_turnoff_hadoop20.q" />
<property name="test.silent" value="true"/>
<property name="hadoopVersion" value="${hadoop.version.ant-internal}"/>
Modified: hive/branches/vectorization/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1526766&r1=1526765&r2=1526766&view=diff
==============================================================================
--- hive/branches/vectorization/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (original)
+++ hive/branches/vectorization/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Fri Sep 27 01:26:36 2013
@@ -739,6 +739,19 @@ public class HiveConf extends Configurat
HIVE_DDL_OUTPUT_FORMAT("hive.ddl.output.format", null),
HIVE_ENTITY_SEPARATOR("hive.entity.separator", "@"),
+ // binary or http
+ HIVE_SERVER2_TRANSPORT_MODE("hive.server2.transport.mode", "binary"),
+
+ // http (over thrift) transport settings
+ HIVE_SERVER2_THRIFT_HTTP_PORT("hive.server2.thrift.http.port", 10001),
+ HIVE_SERVER2_THRIFT_HTTP_PATH("hive.server2.thrift.http.path", "cliservice"),
+ HIVE_SERVER2_THRIFT_HTTP_MIN_WORKER_THREADS("hive.server2.thrift.http.min.worker.threads", 5),
+ HIVE_SERVER2_THRIFT_HTTP_MAX_WORKER_THREADS("hive.server2.thrift.http.max.worker.threads", 500),
+
+ // binary transport settings
+ HIVE_SERVER2_THRIFT_PORT("hive.server2.thrift.port", 10000),
+ HIVE_SERVER2_THRIFT_BIND_HOST("hive.server2.thrift.bind.host", ""),
+ HIVE_SERVER2_THRIFT_SASL_QOP("hive.server2.thrift.sasl.qop", "auth"),
HIVE_SERVER2_THRIFT_MIN_WORKER_THREADS("hive.server2.thrift.min.worker.threads", 5),
HIVE_SERVER2_THRIFT_MAX_WORKER_THREADS("hive.server2.thrift.max.worker.threads", 500),
@@ -748,10 +761,6 @@ public class HiveConf extends Configurat
// Number of seconds HiveServer2 shutdown will wait for async threads to terminate
HIVE_SERVER2_ASYNC_EXEC_SHUTDOWN_TIMEOUT("hive.server2.async.exec.shutdown.timeout", 10),
- HIVE_SERVER2_THRIFT_PORT("hive.server2.thrift.port", 10000),
- HIVE_SERVER2_THRIFT_BIND_HOST("hive.server2.thrift.bind.host", ""),
- HIVE_SERVER2_THRIFT_SASL_QOP("hive.server2.thrift.sasl.qop", "auth"),
-
// HiveServer2 auth configuration
HIVE_SERVER2_AUTHENTICATION("hive.server2.authentication", "NONE"),
@@ -1273,6 +1282,6 @@ public class HiveConf extends Configurat
} else {
return Integer.parseInt(m.group(1));
}
-
}
+
}
Modified: hive/branches/vectorization/conf/hive-default.xml.template
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/conf/hive-default.xml.template?rev=1526766&r1=1526765&r2=1526766&view=diff
==============================================================================
--- hive/branches/vectorization/conf/hive-default.xml.template (original)
+++ hive/branches/vectorization/conf/hive-default.xml.template Fri Sep 27 01:26:36 2013
@@ -877,7 +877,36 @@
<description>Read from a binary stream and treat each hive.binary.record.max.length bytes as a record.
The last record before the end of stream can have less than hive.binary.record.max.length bytes</description>
</property>
+
+<property>
+ <name>hive.server2.transport.mode</name>
+ <value>binary</value>
+ <description>Server transport mode. "binary" or "http".</description>
+</property>
+
+<property>
+ <name>hive.server2.thrift.http.port</name>
+ <value>10001</value>
+ <description>Port number when in HTTP mode.</description>
+</property>
+
+<property>
+ <name>hive.server2.thrift.http.path</name>
+ <value>cliservice</value>
+ <description>Path component of URL endpoint when in HTTP mode.</description>
+</property>
+
+<property>
+ <name>hive.server2.thrift.http.min.worker.threads</name>
+ <value>5</value>
+ <description>Minimum number of worker threads when in HTTP mode.</description>
+</property>
+<property>
+ <name>hive.server2.thrift.http.max.worker.threads</name>
+ <value>500</value>
+ <description>Maximum number of worker threads when in HTTP mode.</description>
+</property>
<property>
<name>hive.script.recordreader</name>
@@ -1755,6 +1784,8 @@
</description>
</property>
+
+
<property>
<name>hive.hmshandler.retry.attempts</name>
<value>1</value>
Modified: hive/branches/vectorization/eclipse-templates/.classpath
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/eclipse-templates/.classpath?rev=1526766&r1=1526765&r2=1526766&view=diff
==============================================================================
--- hive/branches/vectorization/eclipse-templates/.classpath (original)
+++ hive/branches/vectorization/eclipse-templates/.classpath Fri Sep 27 01:26:36 2013
@@ -75,6 +75,8 @@
<classpathentry kind="lib" path="build/ivy/lib/default/commons-collections-@commons-collections.version@.jar"/>
<classpathentry kind="lib" path="build/ivy/lib/default/bonecp-@BoneCP.version@.jar"/>
<classpathentry kind="lib" path="build/ivy/lib/default/commons-pool-@commons-pool.version@.jar"/>
+ <classpathentry kind="lib" path="build/ivy/lib/default/httpcore-@httpcore.version@.jar"/>
+ <classpathentry kind="lib" path="build/ivy/lib/default/httpclient-@httpclient.version@.jar"/>
<classpathentry kind="lib" path="build/ivy/lib/default/slf4j-api-@slf4j-api.version@.jar"/>
<classpathentry kind="lib" path="build/ivy/lib/default/slf4j-log4j12-@slf4j-log4j12.version@.jar"/>
<classpathentry kind="lib" path="build/ivy/lib/default/JavaEWAH-@javaewah.version@.jar"/>
Modified: hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hcatalog/common/HCatUtil.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hcatalog/common/HCatUtil.java?rev=1526766&r1=1526765&r2=1526766&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hcatalog/common/HCatUtil.java (original)
+++ hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hcatalog/common/HCatUtil.java Fri Sep 27 01:26:36 2013
@@ -450,10 +450,10 @@ public class HCatUtil {
public static Map<String, String>
getInputJobProperties(HCatStorageHandler storageHandler,
InputJobInfo inputJobInfo) {
- Properties props = inputJobInfo.getTableInfo().getStorerInfo().getProperties();
- props.put(serdeConstants.SERIALIZATION_LIB,storageHandler.getSerDeClass().getName());
- TableDesc tableDesc = new TableDesc(storageHandler.getInputFormatClass(),
- storageHandler.getOutputFormatClass(),props);
+ Properties props = inputJobInfo.getTableInfo().getStorerInfo().getProperties();
+ props.put(serdeConstants.SERIALIZATION_LIB,storageHandler.getSerDeClass().getName());
+ TableDesc tableDesc = new TableDesc(storageHandler.getInputFormatClass(),
+ storageHandler.getOutputFormatClass(),props);
if (tableDesc.getJobProperties() == null) {
tableDesc.setJobProperties(new HashMap<String, String>());
}
Modified: hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java?rev=1526766&r1=1526765&r2=1526766&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java (original)
+++ hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java Fri Sep 27 01:26:36 2013
@@ -483,9 +483,9 @@ public class HCatUtil {
OutputJobInfo outputJobInfo) {
//TODO replace IgnoreKeyTextOutputFormat with a
//HiveOutputFormatWrapper in StorageHandler
- Properties props = outputJobInfo.getTableInfo().getStorerInfo().getProperties();
- props.put(serdeConstants.SERIALIZATION_LIB,storageHandler.getSerDeClass().getName());
- TableDesc tableDesc = new TableDesc(storageHandler.getInputFormatClass(),
+ Properties props = outputJobInfo.getTableInfo().getStorerInfo().getProperties();
+ props.put(serdeConstants.SERIALIZATION_LIB,storageHandler.getSerDeClass().getName());
+ TableDesc tableDesc = new TableDesc(storageHandler.getInputFormatClass(),
IgnoreKeyTextOutputFormat.class,props);
if (tableDesc.getJobProperties() == null)
tableDesc.setJobProperties(new HashMap<String, String>());
Modified: hive/branches/vectorization/hcatalog/src/docs/src/documentation/content/xdocs/queue.xml
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/src/docs/src/documentation/content/xdocs/queue.xml?rev=1526766&r1=1526765&r2=1526766&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/src/docs/src/documentation/content/xdocs/queue.xml (original)
+++ hive/branches/vectorization/hcatalog/src/docs/src/documentation/content/xdocs/queue.xml Fri Sep 27 01:26:36 2013
@@ -143,7 +143,17 @@
"exitValue": 0,
"user": "ctdean",
"callback": null,
- "completed": "done"
+ "completed": "done",
+ "userargs" => {
+ "callback" => null,
+ "define" => [],
+ "enablelog" => "false",
+ "execute" => "select a,rand(b) from mynums",
+ "file" => null,
+ "files" => [],
+ "statusdir" => null,
+ "user.name" => "hadoopqa",
+ },
}
</source>
</section>
Modified: hive/branches/vectorization/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHCatHBaseInputFormat.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHCatHBaseInputFormat.java?rev=1526766&r1=1526765&r2=1526766&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHCatHBaseInputFormat.java (original)
+++ hive/branches/vectorization/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHCatHBaseInputFormat.java Fri Sep 27 01:26:36 2013
@@ -229,7 +229,7 @@ public class TestHCatHBaseInputFormat ex
// Note: These asserts only works in case of LocalJobRunner as they run in same jvm.
// If using MiniMRCluster, the tests will have to be modified.
assertFalse(MapReadHTable.error);
- assertEquals(MapReadHTable.count, 1);
+ assertEquals(1, MapReadHTable.count);
String dropTableQuery = "DROP TABLE " + hbaseTableName;
CommandProcessorResponse responseThree = hcatDriver.run(dropTableQuery);
@@ -291,7 +291,7 @@ public class TestHCatHBaseInputFormat ex
job.setNumReduceTasks(0);
assertTrue(job.waitForCompletion(true));
assertFalse(MapReadProjHTable.error);
- assertEquals(MapReadProjHTable.count, 1);
+ assertEquals(1, MapReadProjHTable.count);
String dropTableQuery = "DROP TABLE " + tableName;
CommandProcessorResponse responseThree = hcatDriver.run(dropTableQuery);
@@ -325,7 +325,7 @@ public class TestHCatHBaseInputFormat ex
HCatUtil.serialize(getHiveConf().getAllProperties()));
// output settings
- Path outputDir = new Path(getTestDir(), "mapred/testHBaseTableProjectionReadMR");
+ Path outputDir = new Path(getTestDir(), "mapred/testHBaseInputFormatProjectionReadMR");
FileSystem fs = getFileSystem();
if (fs.exists(outputDir)) {
fs.delete(outputDir, true);
@@ -361,8 +361,8 @@ public class TestHCatHBaseInputFormat ex
RunningJob runJob = JobClient.runJob(job);
runJob.waitForCompletion();
assertTrue(runJob.isSuccessful());
- assertFalse(MapReadProjHTable.error);
- assertEquals(MapReadProjHTable.count, 1);
+ assertFalse(MapReadProjectionHTable.error);
+ assertEquals(1, MapReadProjectionHTable.count);
String dropTableQuery = "DROP TABLE " + tableName;
CommandProcessorResponse responseThree = hcatDriver.run(dropTableQuery);
Modified: hive/branches/vectorization/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HiveDelegator.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HiveDelegator.java?rev=1526766&r1=1526765&r2=1526766&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HiveDelegator.java (original)
+++ hive/branches/vectorization/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HiveDelegator.java Fri Sep 27 01:26:36 2013
@@ -24,6 +24,7 @@ import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
+import java.util.Map;
import org.apache.commons.exec.ExecuteException;
import org.apache.hive.hcatalog.templeton.tool.TempletonControllerJob;
@@ -40,7 +41,7 @@ public class HiveDelegator extends Launc
super(appConf);
}
- public EnqueueBean run(String user,
+ public EnqueueBean run(String user, Map<String, Object> userArgs,
String execute, String srcFile, List<String> defines,
List<String> hiveArgs, String otherFiles,
String statusdir, String callback, String completedUrl, boolean enablelog)
@@ -51,7 +52,7 @@ public class HiveDelegator extends Launc
List<String> args = makeArgs(execute, srcFile, defines, hiveArgs, otherFiles, statusdir,
completedUrl, enablelog);
- return enqueueController(user, callback, args);
+ return enqueueController(user, userArgs, callback, args);
}
private List<String> makeArgs(String execute, String srcFile,
Modified: hive/branches/vectorization/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/JarDelegator.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/JarDelegator.java?rev=1526766&r1=1526765&r2=1526766&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/JarDelegator.java (original)
+++ hive/branches/vectorization/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/JarDelegator.java Fri Sep 27 01:26:36 2013
@@ -23,6 +23,7 @@ import java.io.IOException;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.List;
+import java.util.Map;
import org.apache.commons.exec.ExecuteException;
import org.apache.hive.hcatalog.templeton.tool.TempletonControllerJob;
@@ -38,7 +39,7 @@ public class JarDelegator extends Launch
super(appConf);
}
- public EnqueueBean run(String user, String jar, String mainClass,
+ public EnqueueBean run(String user, Map<String, Object> userArgs, String jar, String mainClass,
String libjars, String files,
List<String> jarArgs, List<String> defines,
String statusdir, String callback, String completedUrl,
@@ -50,7 +51,7 @@ public class JarDelegator extends Launch
libjars, files, jarArgs, defines,
statusdir, completedUrl, enablelog, jobType);
- return enqueueController(user, callback, args);
+ return enqueueController(user, userArgs, callback, args);
}
private List<String> makeArgs(String jar, String mainClass,
Modified: hive/branches/vectorization/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/LauncherDelegator.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/LauncherDelegator.java?rev=1526766&r1=1526765&r2=1526766&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/LauncherDelegator.java (original)
+++ hive/branches/vectorization/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/LauncherDelegator.java Fri Sep 27 01:26:36 2013
@@ -22,6 +22,7 @@ import java.io.IOException;
import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
import java.util.List;
+import java.util.Map;
import org.apache.commons.exec.ExecuteException;
import org.apache.commons.logging.Log;
@@ -49,13 +50,15 @@ public class LauncherDelegator extends T
super(appConf);
}
- public void registerJob(String id, String user, String callback)
+ public void registerJob(String id, String user, String callback,
+ Map<String, Object> userArgs)
throws IOException {
JobState state = null;
try {
state = new JobState(id, Main.getAppConfigInstance());
state.setUser(user);
state.setCallback(callback);
+ state.setUserArgs(userArgs);
} finally {
if (state != null)
state.close();
@@ -65,7 +68,7 @@ public class LauncherDelegator extends T
/**
* Enqueue the TempletonControllerJob directly calling doAs.
*/
- public EnqueueBean enqueueController(String user, String callback,
+ public EnqueueBean enqueueController(String user, Map<String, Object> userArgs, String callback,
List<String> args)
throws NotAuthorizedException, BusyException, ExecuteException,
IOException, QueueException {
@@ -82,7 +85,7 @@ public class LauncherDelegator extends T
if (id == null)
throw new QueueException("Unable to get job id");
- registerJob(id, user, callback);
+ registerJob(id, user, callback, userArgs);
return new EnqueueBean(id);
} catch (InterruptedException e) {
Modified: hive/branches/vectorization/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/PigDelegator.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/PigDelegator.java?rev=1526766&r1=1526765&r2=1526766&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/PigDelegator.java (original)
+++ hive/branches/vectorization/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/PigDelegator.java Fri Sep 27 01:26:36 2013
@@ -24,6 +24,7 @@ import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
+import java.util.Map;
import org.apache.commons.exec.ExecuteException;
import org.apache.hive.hcatalog.templeton.tool.TempletonControllerJob;
@@ -39,7 +40,7 @@ public class PigDelegator extends Launch
super(appConf);
}
- public EnqueueBean run(String user,
+ public EnqueueBean run(String user, Map<String, Object> userArgs,
String execute, String srcFile,
List<String> pigArgs, String otherFiles,
String statusdir, String callback, String completedUrl, boolean enablelog)
@@ -50,7 +51,7 @@ public class PigDelegator extends Launch
srcFile, pigArgs,
otherFiles, statusdir, completedUrl, enablelog);
- return enqueueController(user, callback, args);
+ return enqueueController(user, userArgs, callback, args);
}
private List<String> makeArgs(String execute, String srcFile,
Modified: hive/branches/vectorization/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/QueueStatusBean.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/QueueStatusBean.java?rev=1526766&r1=1526765&r2=1526766&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/QueueStatusBean.java (original)
+++ hive/branches/vectorization/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/QueueStatusBean.java Fri Sep 27 01:26:36 2013
@@ -19,6 +19,7 @@
package org.apache.hive.hcatalog.templeton;
import java.io.IOException;
+import java.util.Map;
import org.apache.hadoop.mapred.JobStatus;
import org.apache.hadoop.mapred.JobProfile;
@@ -38,6 +39,7 @@ public class QueueStatusBean {
public String user;
public String callback;
public String completed;
+ public Map<String, Object> userargs;
public QueueStatusBean() {
}
@@ -63,5 +65,6 @@ public class QueueStatusBean {
user = state.getUser();
callback = state.getCallback();
completed = state.getCompleteStatus();
+ userargs = state.getUserArgs();
}
}
Modified: hive/branches/vectorization/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java?rev=1526766&r1=1526765&r2=1526766&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java (original)
+++ hive/branches/vectorization/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java Fri Sep 27 01:26:36 2013
@@ -601,10 +601,23 @@ public class Server {
verifyParam(mapper, "mapper");
verifyParam(reducer, "reducer");
+ Map<String, Object> userArgs = new HashMap<String, Object>();
+ userArgs.put("user.name", getDoAsUser());
+ userArgs.put("input", inputs);
+ userArgs.put("output", output);
+ userArgs.put("mapper", mapper);
+ userArgs.put("reducer", reducer);
+ userArgs.put("files", files);
+ userArgs.put("define", defines);
+ userArgs.put("cmdenv", cmdenvs);
+ userArgs.put("arg", args);
+ userArgs.put("statusdir", statusdir);
+ userArgs.put("callback", callback);
+ userArgs.put("enablelog", Boolean.toString(enablelog));
checkEnableLogPrerequisite(enablelog, statusdir);
StreamingDelegator d = new StreamingDelegator(appConf);
- return d.run(getDoAsUser(), inputs, output, mapper, reducer,
+ return d.run(getDoAsUser(), userArgs, inputs, output, mapper, reducer,
files, defines, cmdenvs, args,
statusdir, callback, getCompletedUrl(), enablelog, JobType.STREAMING);
}
@@ -630,10 +643,22 @@ public class Server {
verifyParam(jar, "jar");
verifyParam(mainClass, "class");
+ Map<String, Object> userArgs = new HashMap<String, Object>();
+ userArgs.put("user.name", getDoAsUser());
+ userArgs.put("jar", jar);
+ userArgs.put("class", mainClass);
+ userArgs.put("libjars", libjars);
+ userArgs.put("files", files);
+ userArgs.put("arg", args);
+ userArgs.put("define", defines);
+ userArgs.put("statusdir", statusdir);
+ userArgs.put("callback", callback);
+ userArgs.put("enablelog", Boolean.toString(enablelog));
+
checkEnableLogPrerequisite(enablelog, statusdir);
JarDelegator d = new JarDelegator(appConf);
- return d.run(getDoAsUser(),
+ return d.run(getDoAsUser(), userArgs,
jar, mainClass,
libjars, files, args, defines,
statusdir, callback, getCompletedUrl(), enablelog, JobType.JAR);
@@ -658,10 +683,21 @@ public class Server {
if (execute == null && srcFile == null)
throw new BadParam("Either execute or file parameter required");
+ //add all function arguments to a map
+ Map<String, Object> userArgs = new HashMap<String, Object>();
+ userArgs.put("user.name", getDoAsUser());
+ userArgs.put("execute", execute);
+ userArgs.put("file", srcFile);
+ userArgs.put("arg", pigArgs);
+ userArgs.put("files", otherFiles);
+ userArgs.put("statusdir", statusdir);
+ userArgs.put("callback", callback);
+ userArgs.put("enablelog", Boolean.toString(enablelog));
+
checkEnableLogPrerequisite(enablelog, statusdir);
PigDelegator d = new PigDelegator(appConf);
- return d.run(getDoAsUser(),
+ return d.run(getDoAsUser(), userArgs,
execute, srcFile,
pigArgs, otherFiles,
statusdir, callback, getCompletedUrl(), enablelog);
@@ -699,10 +735,21 @@ public class Server {
if (execute == null && srcFile == null)
throw new BadParam("Either execute or file parameter required");
+ //add all function arguments to a map
+ Map<String, Object> userArgs = new HashMap<String, Object>();
+ userArgs.put("user.name", getDoAsUser());
+ userArgs.put("execute", execute);
+ userArgs.put("file", srcFile);
+ userArgs.put("define", defines);
+ userArgs.put("files", otherFiles);
+ userArgs.put("statusdir", statusdir);
+ userArgs.put("callback", callback);
+ userArgs.put("enablelog", Boolean.toString(enablelog));
+
checkEnableLogPrerequisite(enablelog, statusdir);
HiveDelegator d = new HiveDelegator(appConf);
- return d.run(getDoAsUser(), execute, srcFile, defines, hiveArgs, otherFiles,
+ return d.run(getDoAsUser(), userArgs, execute, srcFile, defines, hiveArgs, otherFiles,
statusdir, callback, getCompletedUrl(), enablelog);
}
Modified: hive/branches/vectorization/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/StreamingDelegator.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/StreamingDelegator.java?rev=1526766&r1=1526765&r2=1526766&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/StreamingDelegator.java (original)
+++ hive/branches/vectorization/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/StreamingDelegator.java Fri Sep 27 01:26:36 2013
@@ -21,6 +21,7 @@ package org.apache.hive.hcatalog.templet
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
+import java.util.Map;
import org.apache.commons.exec.ExecuteException;
@@ -35,7 +36,7 @@ public class StreamingDelegator extends
super(appConf);
}
- public EnqueueBean run(String user,
+ public EnqueueBean run(String user, Map<String, Object> userArgs,
List<String> inputs, String output,
String mapper, String reducer,
List<String> files, List<String> defines,
@@ -52,7 +53,7 @@ public class StreamingDelegator extends
files, defines, cmdenvs, jarArgs);
JarDelegator d = new JarDelegator(appConf);
- return d.run(user,
+ return d.run(user, userArgs,
appConf.streamingJar(), null,
null, null, args, defines,
statusdir, callback, completedUrl, enableLog, jobType);
Modified: hive/branches/vectorization/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobState.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobState.java?rev=1526766&r1=1526765&r2=1526766&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobState.java (original)
+++ hive/branches/vectorization/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobState.java Fri Sep 27 01:26:36 2013
@@ -21,10 +21,12 @@ package org.apache.hive.hcatalog.templet
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
+import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hive.hcatalog.templeton.JsonBuilder;
/**
* The persistent state of a job. The state is stored in one of the
@@ -232,6 +234,20 @@ public class JobState {
setField("user", user);
}
+ @SuppressWarnings("unchecked")
+ public Map<String, Object> getUserArgs()
+ throws IOException
+ {
+ String jsonString = getField("userArgs");
+ return (Map<String, Object>)JsonBuilder.jsonToMap(jsonString);
+ }
+ public void setUserArgs(Map<String, Object> userArgs)
+ throws IOException
+ {
+ String jsonString = JsonBuilder.mapToJson(userArgs);
+ setField("userArgs", jsonString);
+ }
+
/**
* The url callback
*/
Modified: hive/branches/vectorization/ivy/libraries.properties
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ivy/libraries.properties?rev=1526766&r1=1526765&r2=1526766&view=diff
==============================================================================
--- hive/branches/vectorization/ivy/libraries.properties (original)
+++ hive/branches/vectorization/ivy/libraries.properties Fri Sep 27 01:26:36 2013
@@ -43,6 +43,8 @@ commons-pool.version=1.5.4
derby.version=10.4.2.0
guava.version=11.0.2
hbase.version=0.94.6.1
+httpclient.version=4.2.5
+httpcore.version=4.2.4
jackson.version=1.8.8
javaewah.version=0.3.2
jdo-api.version=3.0.1
Modified: hive/branches/vectorization/jdbc/ivy.xml
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/jdbc/ivy.xml?rev=1526766&r1=1526765&r2=1526766&view=diff
==============================================================================
--- hive/branches/vectorization/jdbc/ivy.xml (original)
+++ hive/branches/vectorization/jdbc/ivy.xml Fri Sep 27 01:26:36 2013
@@ -29,5 +29,10 @@
<dependencies>
<dependency org="org.apache.hive" name="hive-cli" rev="${version}"
conf="compile->default" />
+ <dependency org="org.apache.httpcomponents" name="httpcore"
+ rev="${httpcore.version}"/>
+ <dependency org="org.apache.httpcomponents" name="httpclient"
+ rev="${httpclient.version}"/>
+
</dependencies>
</ivy-module>
Modified: hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java?rev=1526766&r1=1526765&r2=1526766&view=diff
==============================================================================
--- hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java (original)
+++ hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java Fri Sep 27 01:26:36 2013
@@ -44,20 +44,23 @@ import java.util.concurrent.Executor;
import javax.security.sasl.Sasl;
import javax.security.sasl.SaslException;
+import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hive.service.auth.KerberosSaslHelper;
import org.apache.hive.service.auth.PlainSaslHelper;
import org.apache.hive.service.auth.SaslQOP;
-import org.apache.hive.service.cli.thrift.EmbeddedThriftCLIService;
+import org.apache.hive.service.cli.thrift.EmbeddedThriftBinaryCLIService;
import org.apache.hive.service.cli.thrift.TCLIService;
import org.apache.hive.service.cli.thrift.TCloseSessionReq;
import org.apache.hive.service.cli.thrift.TOpenSessionReq;
import org.apache.hive.service.cli.thrift.TOpenSessionResp;
import org.apache.hive.service.cli.thrift.TProtocolVersion;
import org.apache.hive.service.cli.thrift.TSessionHandle;
+import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.thrift.TException;
import org.apache.thrift.protocol.TBinaryProtocol;
import org.apache.thrift.protocol.TProtocol;
+import org.apache.thrift.transport.THttpClient;
import org.apache.thrift.transport.TSocket;
import org.apache.thrift.transport.TTransport;
import org.apache.thrift.transport.TTransportException;
@@ -75,30 +78,49 @@ public class HiveConnection implements j
private static final String HIVE_AUTH_PASSWD = "password";
private static final String HIVE_ANONYMOUS_USER = "anonymous";
private static final String HIVE_ANONYMOUS_PASSWD = "anonymous";
-
+ private final String jdbcURI;
+ private final String host;
+ private final int port;
+ private final Map<String, String> sessConfMap;
+ private final Map<String, String> hiveConfMap;
+ private final Map<String, String> hiveVarMap;
+ private final boolean isEmbeddedMode;
private TTransport transport;
private TCLIService.Iface client;
private boolean isClosed = true;
private SQLWarning warningChain = null;
private TSessionHandle sessHandle = null;
private final List<TProtocolVersion> supportedProtocols = new LinkedList<TProtocolVersion>();
- /**
- * TODO: - parse uri (use java.net.URI?).
- */
+
public HiveConnection(String uri, Properties info) throws SQLException {
- Utils.JdbcConnectionParams connParams = Utils.parseURL(uri);
- if (connParams.isEmbeddedMode()) {
- client = new EmbeddedThriftCLIService();
+ jdbcURI = uri;
+ // parse the connection uri
+ Utils.JdbcConnectionParams connParams = Utils.parseURL(jdbcURI);
+ // extract parsed connection parameters:
+ // JDBC URL: jdbc:hive2://<host>:<port>/dbName;sess_var_list?hive_conf_list#hive_var_list
+ // each list: <key1>=<val1>;<key2>=<val2> and so on
+ // sess_var_list -> sessConfMap
+ // hive_conf_list -> hiveConfMap
+ // hive_var_list -> hiveVarMap
+ host = connParams.getHost();
+ port = connParams.getPort();
+ sessConfMap = connParams.getSessionVars();
+ hiveConfMap = connParams.getHiveConfs();
+ hiveVarMap = connParams.getHiveVars();
+ isEmbeddedMode = connParams.isEmbeddedMode();
+
+ if (isEmbeddedMode) {
+ client = new EmbeddedThriftBinaryCLIService();
} else {
// extract user/password from JDBC connection properties if its not supplied in the connection URL
if (info.containsKey(HIVE_AUTH_USER)) {
- connParams.getSessionVars().put(HIVE_AUTH_USER, info.getProperty(HIVE_AUTH_USER));
+ sessConfMap.put(HIVE_AUTH_USER, info.getProperty(HIVE_AUTH_USER));
if (info.containsKey(HIVE_AUTH_PASSWD)) {
- connParams.getSessionVars().put(HIVE_AUTH_PASSWD, info.getProperty(HIVE_AUTH_PASSWD));
+ sessConfMap.put(HIVE_AUTH_PASSWD, info.getProperty(HIVE_AUTH_PASSWD));
}
}
-
- openTransport(uri, connParams.getHost(), connParams.getPort(), connParams.getSessionVars());
+ // open the client transport
+ openTransport();
}
// add supported protocols
@@ -107,48 +129,66 @@ public class HiveConnection implements j
supportedProtocols.add(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V3);
// open client session
- openSession(uri);
+ openSession();
- configureConnection(connParams);
+ configureConnection();
}
- private void configureConnection(Utils.JdbcConnectionParams connParams)
- throws SQLException {
- // set the hive variable in session state for local mode
- if (connParams.isEmbeddedMode()) {
- if (!connParams.getHiveVars().isEmpty()) {
- SessionState.get().setHiveVariables(connParams.getHiveVars());
- }
- } else {
- // for remote JDBC client, try to set the conf var using 'set foo=bar'
- Statement stmt = createStatement();
- for (Entry<String, String> hiveConf : connParams.getHiveConfs().entrySet()) {
- stmt.execute("set " + hiveConf.getKey() + "=" + hiveConf.getValue());
- stmt.close();
- }
+ private void openTransport() throws SQLException {
+ transport = isHttpTransportMode() ?
+ createHttpTransport() :
+ createBinaryTransport();
+ TProtocol protocol = new TBinaryProtocol(transport);
+ client = new TCLIService.Client(protocol);
+ try {
+ transport.open();
+ } catch (TTransportException e) {
+ throw new SQLException("Could not open connection to "
+ + jdbcURI + ": " + e.getMessage(), " 08S01", e);
+ }
+ }
- // For remote JDBC client, try to set the hive var using 'set hivevar:key=value'
- for (Entry<String, String> hiveVar : connParams.getHiveVars().entrySet()) {
- stmt.execute("set hivevar:" + hiveVar.getKey() + "=" + hiveVar.getValue());
- stmt.close();
- }
+ private TTransport createHttpTransport() throws SQLException {
+ // http path should begin with "/"
+ String httpPath;
+ httpPath = hiveConfMap.get(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_HTTP_PATH.varname);
+ if(httpPath == null) {
+ httpPath = "/";
+ }
+ if(!httpPath.startsWith("/")) {
+ httpPath = "/" + httpPath;
}
+
+ DefaultHttpClient httpClient = new DefaultHttpClient();
+ String httpUrl = hiveConfMap.get(HiveConf.ConfVars.HIVE_SERVER2_TRANSPORT_MODE.varname) +
+ "://" + host + ":" + port + httpPath;
+ httpClient.addRequestInterceptor(
+ new HttpBasicAuthInterceptor(getUserName(), getPasswd())
+ );
+ try {
+ transport = new THttpClient(httpUrl, httpClient);
+ }
+ catch (TTransportException e) {
+ String msg = "Could not create http connection to " +
+ jdbcURI + ". " + e.getMessage();
+ throw new SQLException(msg, " 08S01", e);
+ }
+ return transport;
}
- private void openTransport(String uri, String host, int port, Map<String, String> sessConf )
- throws SQLException {
+ private TTransport createBinaryTransport() throws SQLException {
transport = new TSocket(host, port);
-
// handle secure connection if specified
- if (!sessConf.containsKey(HIVE_AUTH_TYPE)
- || !sessConf.get(HIVE_AUTH_TYPE).equals(HIVE_AUTH_SIMPLE)){
+ if (!sessConfMap.containsKey(HIVE_AUTH_TYPE)
+ || !sessConfMap.get(HIVE_AUTH_TYPE).equals(HIVE_AUTH_SIMPLE)) {
try {
- if (sessConf.containsKey(HIVE_AUTH_PRINCIPAL)) {
+ // If Kerberos
+ if (sessConfMap.containsKey(HIVE_AUTH_PRINCIPAL)) {
Map<String, String> saslProps = new HashMap<String, String>();
SaslQOP saslQOP = SaslQOP.AUTH;
- if(sessConf.containsKey(HIVE_AUTH_QOP)) {
+ if(sessConfMap.containsKey(HIVE_AUTH_QOP)) {
try {
- saslQOP = SaslQOP.fromString(sessConf.get(HIVE_AUTH_QOP));
+ saslQOP = SaslQOP.fromString(sessConfMap.get(HIVE_AUTH_QOP));
} catch (IllegalArgumentException e) {
throw new SQLException("Invalid " + HIVE_AUTH_QOP + " parameter. " + e.getMessage(), "42000", e);
}
@@ -156,35 +196,38 @@ public class HiveConnection implements j
saslProps.put(Sasl.QOP, saslQOP.toString());
saslProps.put(Sasl.SERVER_AUTH, "true");
transport = KerberosSaslHelper.getKerberosTransport(
- sessConf.get(HIVE_AUTH_PRINCIPAL), host, transport, saslProps);
+ sessConfMap.get(HIVE_AUTH_PRINCIPAL), host, transport, saslProps);
} else {
- String userName = sessConf.get(HIVE_AUTH_USER);
+ String userName = sessConfMap.get(HIVE_AUTH_USER);
if ((userName == null) || userName.isEmpty()) {
userName = HIVE_ANONYMOUS_USER;
}
- String passwd = sessConf.get(HIVE_AUTH_PASSWD);
+ String passwd = sessConfMap.get(HIVE_AUTH_PASSWD);
if ((passwd == null) || passwd.isEmpty()) {
passwd = HIVE_ANONYMOUS_PASSWD;
}
transport = PlainSaslHelper.getPlainTransport(userName, passwd, transport);
}
} catch (SaslException e) {
- throw new SQLException("Could not establish secure connection to "
- + uri + ": " + e.getMessage(), " 08S01", e);
+ throw new SQLException("Could not create secure connection to "
+ + jdbcURI + ": " + e.getMessage(), " 08S01", e);
}
}
+ return transport;
+ }
- TProtocol protocol = new TBinaryProtocol(transport);
- client = new TCLIService.Client(protocol);
- try {
- transport.open();
- } catch (TTransportException e) {
- throw new SQLException("Could not establish connection to "
- + uri + ": " + e.getMessage(), " 08S01", e);
+
+ private boolean isHttpTransportMode() {
+ String transportMode =
+ hiveConfMap.get(HiveConf.ConfVars.HIVE_SERVER2_TRANSPORT_MODE.varname);
+ if(transportMode != null && (transportMode.equalsIgnoreCase("http") ||
+ transportMode.equalsIgnoreCase("https"))) {
+ return true;
}
+ return false;
}
- private void openSession(String uri) throws SQLException {
+ private void openSession() throws SQLException {
TOpenSessionReq openReq = new TOpenSessionReq();
// set the session configuration
@@ -201,11 +244,61 @@ public class HiveConnection implements j
sessHandle = openResp.getSessionHandle();
} catch (TException e) {
throw new SQLException("Could not establish connection to "
- + uri + ": " + e.getMessage(), " 08S01", e);
+ + jdbcURI + ": " + e.getMessage(), " 08S01", e);
}
isClosed = false;
}
+ private void configureConnection() throws SQLException {
+ // set the hive variable in session state for local mode
+ if (isEmbeddedMode) {
+ if (!hiveVarMap.isEmpty()) {
+ SessionState.get().setHiveVariables(hiveVarMap);
+ }
+ } else {
+ // for remote JDBC client, try to set the conf var using 'set foo=bar'
+ Statement stmt = createStatement();
+ for (Entry<String, String> hiveConf : hiveConfMap.entrySet()) {
+ stmt.execute("set " + hiveConf.getKey() + "=" + hiveConf.getValue());
+ }
+
+ // For remote JDBC client, try to set the hive var using 'set hivevar:key=value'
+ for (Entry<String, String> hiveVar : hiveVarMap.entrySet()) {
+ stmt.execute("set hivevar:" + hiveVar.getKey() + "=" + hiveVar.getValue());
+ }
+ stmt.close();
+ }
+ }
+
+ /**
+ * @return username from sessConfMap
+ */
+ private String getUserName() {
+ return getSessionValue(HIVE_AUTH_USER, HIVE_ANONYMOUS_USER);
+ }
+
+ /**
+ * @return password from sessConfMap
+ */
+ private String getPasswd() {
+ return getSessionValue(HIVE_AUTH_PASSWD, HIVE_ANONYMOUS_PASSWD);
+ }
+
+ /**
+ * Lookup varName in sessConfMap, if its null or empty return the default
+ * value varDefault
+ * @param varName
+ * @param varDefault
+ * @return
+ */
+ private String getSessionValue(String varName, String varDefault) {
+ String varValue = sessConfMap.get(varName);
+ if ((varValue == null) || varValue.isEmpty()) {
+ varValue = varDefault;
+ }
+ return varValue;
+ }
+
public void abort(Executor executor) throws SQLException {
// JDK 1.7
throw new SQLException("Method not supported");
Modified: hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java?rev=1526766&r1=1526765&r2=1526766&view=diff
==============================================================================
--- hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java (original)
+++ hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java Fri Sep 27 01:26:36 2013
@@ -30,6 +30,10 @@ import java.util.jar.Attributes;
import java.util.jar.Manifest;
import java.util.logging.Logger;
import java.util.regex.Pattern;
+
+import org.apache.hive.jdbc.Utils.JdbcConnectionParams;
+
+
/**
* HiveDriver.
*
@@ -50,16 +54,6 @@ public class HiveDriver implements Drive
private static final boolean JDBC_COMPLIANT = false;
/**
- * The required prefix for the connection URL.
- */
- private static final String URL_PREFIX = "jdbc:hive2://";
-
- /**
- * If host is provided, without a port.
- */
- private static final String DEFAULT_PORT = "10000";
-
- /**
* Property key for the database name.
*/
private static final String DBNAME_PROPERTY_KEY = "DBNAME";
@@ -99,7 +93,7 @@ public class HiveDriver implements Drive
*/
public boolean acceptsURL(String url) throws SQLException {
- return Pattern.matches(URL_PREFIX + ".*", url);
+ return Pattern.matches(Utils.URL_PREFIX + ".*", url);
}
/*
@@ -183,8 +177,8 @@ public class HiveDriver implements Drive
info = new Properties();
}
- if ((url != null) && url.startsWith(URL_PREFIX)) {
- info = parseURL(url, info);
+ if ((url != null) && url.startsWith(Utils.URL_PREFIX)) {
+ info = parseURLforPropertyInfo(url, info);
}
DriverPropertyInfo hostProp = new DriverPropertyInfo(HOST_PROPERTY_KEY,
@@ -214,7 +208,6 @@ public class HiveDriver implements Drive
/**
* Returns whether the driver is JDBC compliant.
*/
-
public boolean jdbcCompliant() {
return JDBC_COMPLIANT;
}
@@ -223,44 +216,36 @@ public class HiveDriver implements Drive
* Takes a url in the form of jdbc:hive://[hostname]:[port]/[db_name] and
* parses it. Everything after jdbc:hive// is optional.
*
+ * The output from Utils.parseUrl() is massaged for the needs of getPropertyInfo
* @param url
* @param defaults
* @return
* @throws java.sql.SQLException
*/
- private Properties parseURL(String url, Properties defaults) throws SQLException {
+ private Properties parseURLforPropertyInfo(String url, Properties defaults) throws SQLException {
Properties urlProps = (defaults != null) ? new Properties(defaults)
: new Properties();
- if (url == null || !url.startsWith(URL_PREFIX)) {
+ if (url == null || !url.startsWith(Utils.URL_PREFIX)) {
throw new SQLException("Invalid connection url: " + url);
}
- if (url.length() <= URL_PREFIX.length()) {
- return urlProps;
- }
-
- // [hostname]:[port]/[db_name]
- String connectionInfo = url.substring(URL_PREFIX.length());
-
- // [hostname]:[port] [db_name]
- String[] hostPortAndDatabase = connectionInfo.split("/", 2);
-
- // [hostname]:[port]
- if (hostPortAndDatabase[0].length() > 0) {
- String[] hostAndPort = hostPortAndDatabase[0].split(":", 2);
- urlProps.put(HOST_PROPERTY_KEY, hostAndPort[0]);
- if (hostAndPort.length > 1) {
- urlProps.put(PORT_PROPERTY_KEY, hostAndPort[1]);
- } else {
- urlProps.put(PORT_PROPERTY_KEY, DEFAULT_PORT);
- }
- }
-
- // [db_name]
- if (hostPortAndDatabase.length > 1) {
- urlProps.put(DBNAME_PROPERTY_KEY, hostPortAndDatabase[1]);
- }
+ JdbcConnectionParams params = Utils.parseURL(url);
+ String host = params.getHost();
+ if (host == null){
+ host = "";
+ }
+ String port = Integer.toString(params.getPort());
+ if(host.equals("")){
+ port = "";
+ }
+ else if(port.equals("0")){
+ port = Utils.DEFAULT_PORT;
+ }
+ String db = params.getDbName();
+ urlProps.put(HOST_PROPERTY_KEY, host);
+ urlProps.put(PORT_PROPERTY_KEY, port);
+ urlProps.put(DBNAME_PROPERTY_KEY, db);
return urlProps;
}
Modified: hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/Utils.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/Utils.java?rev=1526766&r1=1526765&r2=1526766&view=diff
==============================================================================
--- hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/Utils.java (original)
+++ hive/branches/vectorization/jdbc/src/java/org/apache/hive/jdbc/Utils.java Fri Sep 27 01:26:36 2013
@@ -169,13 +169,16 @@ public class Utils {
/**
* Parse JDBC connection URL
- * The new format of the URL is jdbc:hive://<host>:<port>/dbName;sess_var_list?hive_conf_list#hive_var_list
+ * The new format of the URL is jdbc:hive2://<host>:<port>/dbName;sess_var_list?hive_conf_list#hive_var_list
* where the optional sess, conf and var lists are semicolon separated <key>=<val> pairs. As before, if the
* host/port is not specified, it the driver runs an embedded hive.
* examples -
- * jdbc:hive://ubuntu:11000/db2?hive.cli.conf.printheader=true;hive.exec.mode.local.auto.inputbytes.max=9999#stab=salesTable;icol=customerID
- * jdbc:hive://?hive.cli.conf.printheader=true;hive.exec.mode.local.auto.inputbytes.max=9999#stab=salesTable;icol=customerID
- * jdbc:hive://ubuntu:11000/db2;user=foo;password=bar
+ * jdbc:hive2://ubuntu:11000/db2?hive.cli.conf.printheader=true;hive.exec.mode.local.auto.inputbytes.max=9999#stab=salesTable;icol=customerID
+ * jdbc:hive2://?hive.cli.conf.printheader=true;hive.exec.mode.local.auto.inputbytes.max=9999#stab=salesTable;icol=customerID
+ * jdbc:hive2://ubuntu:11000/db2;user=foo;password=bar
+ *
+ * Connect to http://server:10001/hs2, with specified basicAuth credentials and initial database:
+ * jdbc:hive2://server:10001/db;user=foo;password=bar?hive.server2.transport.mode=http;hive.server2.thrift.http.path=hs2
*
* Note that currently the session properties are not used.
*
@@ -189,7 +192,8 @@ public class Utils {
throw new IllegalArgumentException("Bad URL format");
}
- // Don't parse URL with no other configuration.
+ // For URLs with no other configuration
+ // Don't parse them, but set embedded mode as true
if (uri.equalsIgnoreCase(URL_PREFIX)) {
connParams.setEmbeddedMode(true);
return connParams;
@@ -197,11 +201,11 @@ public class Utils {
URI jdbcURI = URI.create(uri.substring(URI_JDBC_PREFIX.length()));
- //Check to prevent unintentional use of embedded mode. A missing "/" can
+ // Check to prevent unintentional use of embedded mode. A missing "/"
// to separate the 'path' portion of URI can result in this.
- //The missing "/" common typo while using secure mode, eg of such url -
+ // The missing "/" common typo while using secure mode, eg of such url -
// jdbc:hive2://localhost:10000;principal=hive/HiveServer2Host@YOUR-REALM.COM
- if((jdbcURI.getAuthority() != null) && (jdbcURI.getHost()==null)){
+ if((jdbcURI.getAuthority() != null) && (jdbcURI.getHost()==null)) {
throw new IllegalArgumentException("Bad URL format. Hostname not found "
+ " in authority part of the url: " + jdbcURI.getAuthority()
+ ". Are you missing a '/' after the hostname ?");
@@ -264,6 +268,4 @@ public class Utils {
return connParams;
}
-
-
}
Modified: hive/branches/vectorization/jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java?rev=1526766&r1=1526765&r2=1526766&view=diff
==============================================================================
--- hive/branches/vectorization/jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java (original)
+++ hive/branches/vectorization/jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java Fri Sep 27 01:26:36 2013
@@ -45,6 +45,7 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.TableType;
import org.apache.hive.common.util.HiveVersionInfo;
+import org.apache.hive.jdbc.Utils.JdbcConnectionParams;
import org.apache.hive.service.cli.operation.ClassicTableTypeMapping;
import org.apache.hive.service.cli.operation.ClassicTableTypeMapping.ClassicTableTypes;
import org.apache.hive.service.cli.operation.HiveTableTypeMapping;
@@ -1356,14 +1357,21 @@ public class TestJdbcDriver2 extends Tes
// [url] [host] [port] [db]
private static final String[][] URL_PROPERTIES = new String[][] {
+ // binary mode
{"jdbc:hive2://", "", "", "default"},
{"jdbc:hive2://localhost:10001/default", "localhost", "10001", "default"},
{"jdbc:hive2://localhost/notdefault", "localhost", "10000", "notdefault"},
- {"jdbc:hive2://foo:1243", "foo", "1243", "default"}};
+ {"jdbc:hive2://foo:1243", "foo", "1243", "default"},
+
+ // http mode
+ {"jdbc:hive2://server:10002/db;user=foo;password=bar?" +
+ "hive.server2.transport.mode=http;" +
+ "hive.server2.thrift.http.path=hs2",
+ "server", "10002", "db"},
+ };
public void testDriverProperties() throws SQLException {
HiveDriver driver = new HiveDriver();
-
for (String[] testValues : URL_PROPERTIES) {
DriverPropertyInfo[] dpi = driver.getPropertyInfo(testValues[0], null);
assertEquals("unexpected DriverPropertyInfo array size", 3, dpi.length);
@@ -1371,7 +1379,29 @@ public class TestJdbcDriver2 extends Tes
assertDpi(dpi[1], "PORT", testValues[2]);
assertDpi(dpi[2], "DBNAME", testValues[3]);
}
+ }
+ private static final String[][] HTTP_URL_PROPERTIES = new String[][] {
+ {"jdbc:hive2://server:10002/db;" +
+ "user=foo;password=bar?" +
+ "hive.server2.transport.mode=http;" +
+ "hive.server2.thrift.http.path=hs2", "server", "10002", "db", "http", "hs2"},
+ {"jdbc:hive2://server:10000/testdb;" +
+ "user=foo;password=bar?" +
+ "hive.server2.transport.mode=binary;" +
+ "hive.server2.thrift.http.path=", "server", "10000", "testdb", "binary", ""},
+ };
+
+ public void testParseUrlHttpMode() throws SQLException {
+ HiveDriver driver = new HiveDriver();
+ for (String[] testValues : HTTP_URL_PROPERTIES) {
+ JdbcConnectionParams params = Utils.parseURL(testValues[0]);
+ assertEquals(params.getHost(), testValues[1]);
+ assertEquals(params.getPort(), Integer.parseInt(testValues[2]));
+ assertEquals(params.getDbName(), testValues[3]);
+ assertEquals(params.getHiveConfs().get("hive.server2.transport.mode"), testValues[4]);
+ assertEquals(params.getHiveConfs().get("hive.server2.thrift.http.path"), testValues[5]);
+ }
}
private static void assertDpi(DriverPropertyInfo dpi, String name,
Modified: hive/branches/vectorization/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java?rev=1526766&r1=1526765&r2=1526766&view=diff
==============================================================================
--- hive/branches/vectorization/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java (original)
+++ hive/branches/vectorization/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java Fri Sep 27 01:26:36 2013
@@ -21,6 +21,9 @@ package org.apache.hadoop.hive.metastore
import static org.apache.commons.lang.StringUtils.join;
import static org.apache.commons.lang.StringUtils.repeat;
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.sql.Statement;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
@@ -30,10 +33,11 @@ import java.util.TreeMap;
import javax.jdo.PersistenceManager;
import javax.jdo.Query;
+import javax.jdo.Transaction;
+import javax.jdo.datastore.JDOConnection;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.common.ObjectPair;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.Order;
@@ -63,9 +67,87 @@ class MetaStoreDirectSql {
private static final Log LOG = LogFactory.getLog(MetaStoreDirectSql.class);
private final PersistenceManager pm;
+ /**
+ * We want to avoid db-specific code in this class and stick with ANSI SQL. However, mysql
+ * and postgres are differently ansi-incompatible (mysql by default doesn't support quoted
+ * identifiers, and postgres contravenes ANSI by coercing unquoted ones to lower case).
+ * MySQL's way of working around this is simpler (just set ansi quotes mode on), so we will
+ * use that. MySQL detection is done by actually issuing the set-ansi-quotes command.
+ */
+ private final boolean isMySql;
+
+ /**
+ * Whether direct SQL can be used with the current datastore backing {@link #pm}.
+ */
+ private final boolean isCompatibleDatastore;
+
+ // TODO: we might also want to work around the strange and arguably non-standard behavior
+ // of postgres where it rolls back a tx after a failed select (see SQL92 4.28, on page 69
+ // about implicit rollbacks; 4.10.1 last paragraph for the "spirit" of the standard).
+ // See #canUseDirectSql in ObjectStore, isActiveTransaction is undesirable but unavoidable
+ // for postgres; in MySQL and other databases we could avoid it.
public MetaStoreDirectSql(PersistenceManager pm) {
this.pm = pm;
+ Transaction tx = pm.currentTransaction();
+ tx.begin();
+ boolean isMySql = false;
+ try {
+ trySetAnsiQuotesForMysql();
+ isMySql = true;
+ } catch (SQLException sqlEx) {
+ LOG.info("MySQL check failed, assuming we are not on mysql: " + sqlEx.getMessage());
+ tx.rollback();
+ tx = pm.currentTransaction();
+ tx.begin();
+ }
+ // This should work. If it doesn't, we will self-disable. What a PITA...
+ boolean isCompatibleDatastore = false;
+ String selfTestQuery = "select \"DB_ID\" from \"DBS\"";
+ try {
+ pm.newQuery("javax.jdo.query.SQL", selfTestQuery).execute();
+ isCompatibleDatastore = true;
+ tx.commit();
+ } catch (Exception ex) {
+ LOG.error("Self-test query [" + selfTestQuery + "] failed; direct SQL is disabled", ex);
+ tx.rollback();
+ }
+
+ this.isCompatibleDatastore = isCompatibleDatastore;
+ this.isMySql = isMySql;
+ }
+
+ public boolean isCompatibleDatastore() {
+ return isCompatibleDatastore;
+ }
+
+ /**
+ * See {@link #trySetAnsiQuotesForMysql()}.
+ */
+ private void setAnsiQuotesForMysql() throws MetaException {
+ try {
+ trySetAnsiQuotesForMysql();
+ } catch (SQLException sqlEx) {
+ throw new MetaException("Error setting ansi quotes: " + sqlEx.getMessage());
+ }
+ }
+
+ /**
+ * MySQL, by default, doesn't recognize ANSI quotes which need to have for Postgres.
+ * Try to set the ANSI quotes mode on for the session. Due to connection pooling, needs
+ * to be called in the same transaction as the actual queries.
+ */
+ private void trySetAnsiQuotesForMysql() throws SQLException {
+ final String queryText = "SET @@session.sql_mode=ANSI_QUOTES";
+ JDOConnection jdoConn = pm.getDataStoreConnection();
+ boolean doTrace = LOG.isDebugEnabled();
+ try {
+ long start = doTrace ? System.nanoTime() : 0;
+ ((Connection)jdoConn.getNativeConnection()).createStatement().execute(queryText);
+ timingTrace(doTrace, queryText, start, doTrace ? System.nanoTime() : 0);
+ } finally {
+ jdoConn.close(); // We must release the connection before we call other pm methods.
+ }
}
/**
@@ -83,7 +165,8 @@ class MetaStoreDirectSql {
}
String list = repeat(",?", partNames.size()).substring(1);
return getPartitionsViaSqlFilterInternal(dbName, tblName, null,
- "and PARTITIONS.PART_NAME in (" + list + ")", partNames, new ArrayList<String>(), max);
+ "and \"PARTITIONS\".\"PART_NAME\" in (" + list + ")",
+ partNames, new ArrayList<String>(), max);
}
/**
@@ -124,9 +207,9 @@ class MetaStoreDirectSql {
}
private boolean isViewTable(String dbName, String tblName) throws MetaException {
- String queryText = "select TBL_TYPE from TBLS" +
- " inner join DBS on TBLS.DB_ID = DBS.DB_ID " +
- " where TBLS.TBL_NAME = ? and DBS.NAME = ?";
+ String queryText = "select \"TBL_TYPE\" from \"TBLS\"" +
+ " inner join \"DBS\" on \"TBLS\".\"DB_ID\" = \"DBS\".\"DB_ID\" " +
+ " where \"TBLS\".\"TBL_NAME\" = ? and \"DBS\".\"NAME\" = ?";
Object[] params = new Object[] { tblName, dbName };
Query query = pm.newQuery("javax.jdo.query.SQL", queryText);
query.setUnique(true);
@@ -155,7 +238,11 @@ class MetaStoreDirectSql {
dbName = dbName.toLowerCase();
tblName = tblName.toLowerCase();
// We have to be mindful of order during filtering if we are not returning all partitions.
- String orderForFilter = (max != null) ? " order by PART_NAME asc" : "";
+ String orderForFilter = (max != null) ? " order by \"PART_NAME\" asc" : "";
+ if (isMySql) {
+ assert pm.currentTransaction().isActive();
+ setAnsiQuotesForMysql(); // must be inside tx together with queries
+ }
// Get all simple fields for partitions and related objects, which we can map one-on-one.
// We will do this in 2 queries to use different existing indices for each one.
@@ -163,13 +250,13 @@ class MetaStoreDirectSql {
// TODO: We might want to tune the indexes instead. With current ones MySQL performs
// poorly, esp. with 'order by' w/o index on large tables, even if the number of actual
// results is small (query that returns 8 out of 32k partitions can go 4sec. to 0sec. by
- // just adding a PART_ID IN (...) filter that doesn't alter the results to it, probably
+ // just adding a \"PART_ID\" IN (...) filter that doesn't alter the results to it, probably
// causing it to not sort the entire table due to not knowing how selective the filter is.
String queryText =
- "select PARTITIONS.PART_ID from PARTITIONS"
- + " inner join TBLS on PARTITIONS.TBL_ID = TBLS.TBL_ID "
- + " inner join DBS on TBLS.DB_ID = DBS.DB_ID "
- + join(joinsForFilter, ' ') + " where TBLS.TBL_NAME = ? and DBS.NAME = ? "
+ "select \"PARTITIONS\".\"PART_ID\" from \"PARTITIONS\""
+ + " inner join \"TBLS\" on \"PARTITIONS\".\"TBL_ID\" = \"TBLS\".\"TBL_ID\" "
+ + " inner join \"DBS\" on \"TBLS\".\"DB_ID\" = \"DBS\".\"DB_ID\" "
+ + join(joinsForFilter, ' ') + " where \"TBLS\".\"TBL_NAME\" = ? and \"DBS\".\"NAME\" = ? "
+ (sqlFilter == null ? "" : sqlFilter) + orderForFilter;
Object[] params = new Object[paramsForFilter.size() + 2];
params[0] = tblName;
@@ -203,14 +290,15 @@ class MetaStoreDirectSql {
// Now get most of the other fields.
queryText =
- "select PARTITIONS.PART_ID, SDS.SD_ID, SDS.CD_ID, SERDES.SERDE_ID, "
- + " PARTITIONS.CREATE_TIME, PARTITIONS.LAST_ACCESS_TIME, SDS.INPUT_FORMAT, "
- + " SDS.IS_COMPRESSED, SDS.IS_STOREDASSUBDIRECTORIES, SDS.LOCATION, SDS.NUM_BUCKETS, "
- + " SDS.OUTPUT_FORMAT, SERDES.NAME, SERDES.SLIB "
- + "from PARTITIONS"
- + " left outer join SDS on PARTITIONS.SD_ID = SDS.SD_ID "
- + " left outer join SERDES on SDS.SERDE_ID = SERDES.SERDE_ID "
- + "where PART_ID in (" + partIds + ") order by PART_NAME asc";
+ "select \"PARTITIONS\".\"PART_ID\", \"SDS\".\"SD_ID\", \"SDS\".\"CD_ID\","
+ + " \"SERDES\".\"SERDE_ID\", \"PARTITIONS\".\"CREATE_TIME\","
+ + " \"PARTITIONS\".\"LAST_ACCESS_TIME\", \"SDS\".\"INPUT_FORMAT\", \"SDS\".\"IS_COMPRESSED\","
+ + " \"SDS\".\"IS_STOREDASSUBDIRECTORIES\", \"SDS\".\"LOCATION\", \"SDS\".\"NUM_BUCKETS\","
+ + " \"SDS\".\"OUTPUT_FORMAT\", \"SERDES\".\"NAME\", \"SERDES\".\"SLIB\" "
+ + "from \"PARTITIONS\""
+ + " left outer join \"SDS\" on \"PARTITIONS\".\"SD_ID\" = \"SDS\".\"SD_ID\" "
+ + " left outer join \"SERDES\" on \"SDS\".\"SERDE_ID\" = \"SERDES\".\"SERDE_ID\" "
+ + "where \"PART_ID\" in (" + partIds + ") order by \"PART_NAME\" asc";
start = doTrace ? System.nanoTime() : 0;
query = pm.newQuery("javax.jdo.query.SQL", queryText);
@SuppressWarnings("unchecked")
@@ -254,8 +342,8 @@ class MetaStoreDirectSql {
part.setValues(new ArrayList<String>());
part.setDbName(dbName);
part.setTableName(tblName);
- if (fields[4] != null) part.setCreateTime((Integer)fields[4]);
- if (fields[5] != null) part.setLastAccessTime((Integer)fields[5]);
+ if (fields[4] != null) part.setCreateTime(extractSqlInt(fields[4]));
+ if (fields[5] != null) part.setLastAccessTime(extractSqlInt(fields[5]));
partitions.put(partitionId, part);
if (sdId == null) continue; // Probably a view.
@@ -279,7 +367,7 @@ class MetaStoreDirectSql {
tmpBoolean = extractSqlBoolean(fields[8]);
if (tmpBoolean != null) sd.setStoredAsSubDirectories(tmpBoolean);
sd.setLocation((String)fields[9]);
- if (fields[10] != null) sd.setNumBuckets((Integer)fields[10]);
+ if (fields[10] != null) sd.setNumBuckets(extractSqlInt(fields[10]));
sd.setOutputFormat((String)fields[11]);
sdSb.append(sdId).append(",");
part.setSd(sd);
@@ -309,15 +397,17 @@ class MetaStoreDirectSql {
timingTrace(doTrace, queryText, start, queryTime);
// Now get all the one-to-many things. Start with partitions.
- queryText = "select PART_ID, PARAM_KEY, PARAM_VALUE from PARTITION_PARAMS where PART_ID in ("
- + partIds + ") and PARAM_KEY is not null order by PART_ID asc";
+ queryText = "select \"PART_ID\", \"PARAM_KEY\", \"PARAM_VALUE\" from \"PARTITION_PARAMS\""
+ + " where \"PART_ID\" in (" + partIds + ") and \"PARAM_KEY\" is not null"
+ + " order by \"PART_ID\" asc";
loopJoinOrderedResult(partitions, queryText, 0, new ApplyFunc<Partition>() {
public void apply(Partition t, Object[] fields) {
t.putToParameters((String)fields[1], (String)fields[2]);
}});
- queryText = "select PART_ID, PART_KEY_VAL from PARTITION_KEY_VALS where PART_ID in ("
- + partIds + ") and INTEGER_IDX >= 0 order by PART_ID asc, INTEGER_IDX asc";
+ queryText = "select \"PART_ID\", \"PART_KEY_VAL\" from \"PARTITION_KEY_VALS\""
+ + " where \"PART_ID\" in (" + partIds + ") and \"INTEGER_IDX\" >= 0"
+ + " order by \"PART_ID\" asc, \"INTEGER_IDX\" asc";
loopJoinOrderedResult(partitions, queryText, 0, new ApplyFunc<Partition>() {
public void apply(Partition t, Object[] fields) {
t.addToValues((String)fields[1]);
@@ -332,33 +422,35 @@ class MetaStoreDirectSql {
colIds = trimCommaList(colsSb);
// Get all the stuff for SD. Don't do empty-list check - we expect partitions do have SDs.
- queryText = "select SD_ID, PARAM_KEY, PARAM_VALUE from SD_PARAMS where SD_ID in ("
- + sdIds + ") and PARAM_KEY is not null order by SD_ID asc";
+ queryText = "select \"SD_ID\", \"PARAM_KEY\", \"PARAM_VALUE\" from \"SD_PARAMS\""
+ + " where \"SD_ID\" in (" + sdIds + ") and \"PARAM_KEY\" is not null"
+ + " order by \"SD_ID\" asc";
loopJoinOrderedResult(sds, queryText, 0, new ApplyFunc<StorageDescriptor>() {
public void apply(StorageDescriptor t, Object[] fields) {
t.putToParameters((String)fields[1], (String)fields[2]);
}});
- // Note that SORT_COLS has "ORDER" column, which is not SQL92-legal. We have two choices
- // here - drop SQL92, or get '*' and be broken on certain schema changes. We do the latter.
- queryText = "select SD_ID, COLUMN_NAME, SORT_COLS.* from SORT_COLS where SD_ID in ("
- + sdIds + ") and INTEGER_IDX >= 0 order by SD_ID asc, INTEGER_IDX asc";
+ queryText = "select \"SD_ID\", \"COLUMN_NAME\", \"SORT_COLS\".\"ORDER\" from \"SORT_COLS\""
+ + " where \"SD_ID\" in (" + sdIds + ") and \"INTEGER_IDX\" >= 0"
+ + " order by \"SD_ID\" asc, \"INTEGER_IDX\" asc";
loopJoinOrderedResult(sds, queryText, 0, new ApplyFunc<StorageDescriptor>() {
public void apply(StorageDescriptor t, Object[] fields) {
- if (fields[4] == null) return;
- t.addToSortCols(new Order((String)fields[1], (Integer)fields[4]));
+ if (fields[2] == null) return;
+ t.addToSortCols(new Order((String)fields[1], extractSqlInt(fields[2])));
}});
- queryText = "select SD_ID, BUCKET_COL_NAME from BUCKETING_COLS where SD_ID in ("
- + sdIds + ") and INTEGER_IDX >= 0 order by SD_ID asc, INTEGER_IDX asc";
+ queryText = "select \"SD_ID\", \"BUCKET_COL_NAME\" from \"BUCKETING_COLS\""
+ + " where \"SD_ID\" in (" + sdIds + ") and \"INTEGER_IDX\" >= 0"
+ + " order by \"SD_ID\" asc, \"INTEGER_IDX\" asc";
loopJoinOrderedResult(sds, queryText, 0, new ApplyFunc<StorageDescriptor>() {
public void apply(StorageDescriptor t, Object[] fields) {
t.addToBucketCols((String)fields[1]);
}});
// Skewed columns stuff.
- queryText = "select SD_ID, SKEWED_COL_NAME from SKEWED_COL_NAMES where SD_ID in ("
- + sdIds + ") and INTEGER_IDX >= 0 order by SD_ID asc, INTEGER_IDX asc";
+ queryText = "select \"SD_ID\", \"SKEWED_COL_NAME\" from \"SKEWED_COL_NAMES\""
+ + " where \"SD_ID\" in (" + sdIds + ") and \"INTEGER_IDX\" >= 0"
+ + " order by \"SD_ID\" asc, \"INTEGER_IDX\" asc";
boolean hasSkewedColumns =
loopJoinOrderedResult(sds, queryText, 0, new ApplyFunc<StorageDescriptor>() {
public void apply(StorageDescriptor t, Object[] fields) {
@@ -370,16 +462,17 @@ class MetaStoreDirectSql {
if (hasSkewedColumns) {
// We are skipping the SKEWED_STRING_LIST table here, as it seems to be totally useless.
queryText =
- "select SKEWED_VALUES.SD_ID_OID, SKEWED_STRING_LIST_VALUES.STRING_LIST_ID, "
- + " SKEWED_STRING_LIST_VALUES.STRING_LIST_VALUE "
- + "from SKEWED_VALUES "
- + " left outer join SKEWED_STRING_LIST_VALUES on "
- + " SKEWED_VALUES.STRING_LIST_ID_EID = SKEWED_STRING_LIST_VALUES.STRING_LIST_ID "
- + "where SKEWED_VALUES.SD_ID_OID in (" + sdIds + ") "
- + " and SKEWED_VALUES.STRING_LIST_ID_EID is not null "
- + " and SKEWED_VALUES.INTEGER_IDX >= 0 "
- + "order by SKEWED_VALUES.SD_ID_OID asc, SKEWED_VALUES.INTEGER_IDX asc, "
- + " SKEWED_STRING_LIST_VALUES.INTEGER_IDX asc";
+ "select \"SKEWED_VALUES\".\"SD_ID_OID\","
+ + " \"SKEWED_STRING_LIST_VALUES\".\"STRING_LIST_ID\","
+ + " \"SKEWED_STRING_LIST_VALUES\".\"STRING_LIST_VALUE\" "
+ + "from \"SKEWED_VALUES\" "
+ + " left outer join \"SKEWED_STRING_LIST_VALUES\" on \"SKEWED_VALUES\"."
+ + "\"STRING_LIST_ID_EID\" = \"SKEWED_STRING_LIST_VALUES\".\"STRING_LIST_ID\" "
+ + "where \"SKEWED_VALUES\".\"SD_ID_OID\" in (" + sdIds + ") "
+ + " and \"SKEWED_VALUES\".\"STRING_LIST_ID_EID\" is not null "
+ + " and \"SKEWED_VALUES\".\"INTEGER_IDX\" >= 0 "
+ + "order by \"SKEWED_VALUES\".\"SD_ID_OID\" asc, \"SKEWED_VALUES\".\"INTEGER_IDX\" asc,"
+ + " \"SKEWED_STRING_LIST_VALUES\".\"INTEGER_IDX\" asc";
loopJoinOrderedResult(sds, queryText, 0, new ApplyFunc<StorageDescriptor>() {
private Long currentListId;
private List<String> currentList;
@@ -404,16 +497,18 @@ class MetaStoreDirectSql {
// We are skipping the SKEWED_STRING_LIST table here, as it seems to be totally useless.
queryText =
- "select SKEWED_COL_VALUE_LOC_MAP.SD_ID, SKEWED_STRING_LIST_VALUES.STRING_LIST_ID,"
- + " SKEWED_COL_VALUE_LOC_MAP.LOCATION, SKEWED_STRING_LIST_VALUES.STRING_LIST_VALUE "
- + "from SKEWED_COL_VALUE_LOC_MAP"
- + " left outer join SKEWED_STRING_LIST_VALUES on SKEWED_COL_VALUE_LOC_MAP."
- + "STRING_LIST_ID_KID = SKEWED_STRING_LIST_VALUES.STRING_LIST_ID "
- + "where SKEWED_COL_VALUE_LOC_MAP.SD_ID in (" + sdIds + ")"
- + " and SKEWED_COL_VALUE_LOC_MAP.STRING_LIST_ID_KID is not null "
- + "order by SKEWED_COL_VALUE_LOC_MAP.SD_ID asc,"
- + " SKEWED_STRING_LIST_VALUES.STRING_LIST_ID asc,"
- + " SKEWED_STRING_LIST_VALUES.INTEGER_IDX asc";
+ "select \"SKEWED_COL_VALUE_LOC_MAP\".\"SD_ID\","
+ + " \"SKEWED_STRING_LIST_VALUES\".STRING_LIST_ID,"
+ + " \"SKEWED_COL_VALUE_LOC_MAP\".\"LOCATION\","
+ + " \"SKEWED_STRING_LIST_VALUES\".\"STRING_LIST_VALUE\" "
+ + "from \"SKEWED_COL_VALUE_LOC_MAP\""
+ + " left outer join \"SKEWED_STRING_LIST_VALUES\" on \"SKEWED_COL_VALUE_LOC_MAP\"."
+ + "\"STRING_LIST_ID_KID\" = \"SKEWED_STRING_LIST_VALUES\".\"STRING_LIST_ID\" "
+ + "where \"SKEWED_COL_VALUE_LOC_MAP\".\"SD_ID\" in (" + sdIds + ")"
+ + " and \"SKEWED_COL_VALUE_LOC_MAP\".\"STRING_LIST_ID_KID\" is not null "
+ + "order by \"SKEWED_COL_VALUE_LOC_MAP\".\"SD_ID\" asc,"
+ + " \"SKEWED_STRING_LIST_VALUES\".\"STRING_LIST_ID\" asc,"
+ + " \"SKEWED_STRING_LIST_VALUES\".\"INTEGER_IDX\" asc";
loopJoinOrderedResult(sds, queryText, 0, new ApplyFunc<StorageDescriptor>() {
private Long currentListId;
@@ -447,8 +542,9 @@ class MetaStoreDirectSql {
// Get FieldSchema stuff if any.
if (!colss.isEmpty()) {
// We are skipping the CDS table here, as it seems to be totally useless.
- queryText = "select CD_ID, COMMENT, COLUMN_NAME, TYPE_NAME from COLUMNS_V2 where CD_ID in ("
- + colIds + ") and INTEGER_IDX >= 0 order by CD_ID asc, INTEGER_IDX asc";
+ queryText = "select \"CD_ID\", \"COMMENT\", \"COLUMN_NAME\", \"TYPE_NAME\""
+ + " from \"COLUMNS_V2\" where \"CD_ID\" in (" + colIds + ") and \"INTEGER_IDX\" >= 0"
+ + " order by \"CD_ID\" asc, \"INTEGER_IDX\" asc";
loopJoinOrderedResult(colss, queryText, 0, new ApplyFunc<List<FieldSchema>>() {
public void apply(List<FieldSchema> t, Object[] fields) {
t.add(new FieldSchema((String)fields[2], (String)fields[3], (String)fields[1]));
@@ -456,8 +552,9 @@ class MetaStoreDirectSql {
}
// Finally, get all the stuff for serdes - just the params.
- queryText = "select SERDE_ID, PARAM_KEY, PARAM_VALUE from SERDE_PARAMS where SERDE_ID in ("
- + serdeIds + ") and PARAM_KEY is not null order by SERDE_ID asc";
+ queryText = "select \"SERDE_ID\", \"PARAM_KEY\", \"PARAM_VALUE\" from \"SERDE_PARAMS\""
+ + " where \"SERDE_ID\" in (" + serdeIds + ") and \"PARAM_KEY\" is not null"
+ + " order by \"SERDE_ID\" asc";
loopJoinOrderedResult(serdes, queryText, 0, new ApplyFunc<SerDeInfo>() {
public void apply(SerDeInfo t, Object[] fields) {
t.putToParameters((String)fields[1], (String)fields[2]);
@@ -469,7 +566,7 @@ class MetaStoreDirectSql {
private void timingTrace(boolean doTrace, String queryText, long start, long queryTime) {
if (!doTrace) return;
LOG.debug("Direct SQL query in " + (queryTime - start) / 1000000.0 + "ms + " +
- (System.nanoTime() - queryTime) / 1000000.0 + "ms, the query is [ " + queryText + "]");
+ (System.nanoTime() - queryTime) / 1000000.0 + "ms, the query is [" + queryText + "]");
}
private static Boolean extractSqlBoolean(Object value) throws MetaException {
@@ -486,6 +583,10 @@ class MetaStoreDirectSql {
throw new MetaException("Cannot extrace boolean from column value " + value);
}
+ private int extractSqlInt(Object field) {
+ return ((Number)field).intValue();
+ }
+
private static String trimCommaList(StringBuilder sb) {
if (sb.length() > 0) {
sb.setLength(sb.length() - 1);
@@ -632,12 +733,12 @@ class MetaStoreDirectSql {
}
}
if (joins.get(partColIndex) == null) {
- joins.set(partColIndex, "inner join PARTITION_KEY_VALS as FILTER" + partColIndex
- + " on FILTER" + partColIndex + ".PART_ID = PARTITIONS.PART_ID and FILTER"
- + partColIndex + ".INTEGER_IDX = " + partColIndex);
+ joins.set(partColIndex, "inner join \"PARTITION_KEY_VALS\" as \"FILTER" + partColIndex
+ + "\" on \"FILTER" + partColIndex + "\".\"PART_ID\" = \"PARTITIONS\".\"PART_ID\""
+ + " and \"FILTER" + partColIndex + "\".\"INTEGER_IDX\" = " + partColIndex);
}
- String tableValue = "FILTER" + partColIndex + ".PART_KEY_VAL";
+ String tableValue = "\"FILTER" + partColIndex + "\".\"PART_KEY_VAL\"";
// TODO: need casts here if #doesOperatorSupportIntegral is amended to include lt/gt/etc.
filterBuffer.append(node.isReverseOrder
? "(? " + node.operator.getSqlOp() + " " + tableValue + ")"
Modified: hive/branches/vectorization/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java?rev=1526766&r1=1526765&r2=1526766&view=diff
==============================================================================
--- hive/branches/vectorization/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java (original)
+++ hive/branches/vectorization/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java Fri Sep 27 01:26:36 2013
@@ -2115,6 +2115,7 @@ public class ObjectStore implements RawS
// TODO: Drop table can be very slow on large tables, we might want to address this.
return allowSql
&& HiveConf.getBoolVar(getConf(), ConfVars.METASTORE_TRY_DIRECT_SQL)
+ && directSql.isCompatibleDatastore()
&& !isActiveTransaction();
}
Modified: hive/branches/vectorization/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java?rev=1526766&r1=1526765&r2=1526766&view=diff
==============================================================================
--- hive/branches/vectorization/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java (original)
+++ hive/branches/vectorization/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java Fri Sep 27 01:26:36 2013
@@ -134,7 +134,8 @@ public class VectorizedRowBatchCtx {
(part.getPartSpec() == null || part.getPartSpec().isEmpty()) ?
part.getTableDesc().getProperties() : part.getProperties();
- Deserializer partDeserializer = part.getDeserializer();
+ Class serdeclass = hiveConf.getClassByName(part.getSerdeClassName());
+ Deserializer partDeserializer = (Deserializer) serdeclass.newInstance();
partDeserializer.initialize(hiveConf, partProps);
StructObjectInspector partRawRowObjectInspector = (StructObjectInspector) partDeserializer
.getObjectInspector();