You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by jc...@apache.org on 2017/01/19 01:11:27 UTC
[1/3] hive git commit: HIVE-15612: Include Calcite dependency in
Druid storage handler jar (Jesus Camacho Rodriguez, reviewed by Slim Bouguerra,
Ashutosh Chauhan)
Repository: hive
Updated Branches:
refs/heads/master 8cdef2bc8 -> cc3ce1614
HIVE-15612: Include Calcite dependency in Druid storage handler jar (Jesus Camacho Rodriguez, reviewed by Slim Bouguerra, Ashutosh Chauhan)
Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/fd7b1016
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/fd7b1016
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/fd7b1016
Branch: refs/heads/master
Commit: fd7b10165ee8786d851c7f72b18d3afdbceb714a
Parents: 8cdef2b
Author: Jesus Camacho Rodriguez <jc...@apache.org>
Authored: Thu Jan 19 01:07:03 2017 +0000
Committer: Jesus Camacho Rodriguez <jc...@apache.org>
Committed: Thu Jan 19 01:09:00 2017 +0000
----------------------------------------------------------------------
druid-handler/pom.xml | 5 +++++
1 file changed, 5 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hive/blob/fd7b1016/druid-handler/pom.xml
----------------------------------------------------------------------
diff --git a/druid-handler/pom.xml b/druid-handler/pom.xml
index f691a2c..b057fff 100644
--- a/druid-handler/pom.xml
+++ b/druid-handler/pom.xml
@@ -250,6 +250,10 @@
<shadedPattern>org.apache.hive.druid.io.netty</shadedPattern>
</relocation>
<relocation>
+ <pattern>org.apache.calcite</pattern>
+ <shadedPattern>org.apache.hive.druid.org.apache.calcite</shadedPattern>
+ </relocation>
+ <relocation>
<pattern>org.jboss.netty</pattern>
<shadedPattern>org.apache.hive.druid.org.jboss.netty</shadedPattern>
</relocation>
@@ -268,6 +272,7 @@
<include>io.druid.extensions:*</include>
<include>com.metamx:*</include>
<include>io.netty:*</include>
+ <include>org.apache.calcite:*</include>
<include>com.fasterxml.jackson.core:*</include>
<include>com.fasterxml.jackson.datatype:*</include>
<include>com.fasterxml.jackson.dataformat:*</include>
[2/3] hive git commit: HIVE-15614: Druid splitSelectQuery closes
lifecycle object too early (Jesus Camacho Rodriguez,
reviewed by Ashutosh Chauhan)
Posted by jc...@apache.org.
HIVE-15614: Druid splitSelectQuery closes lifecycle object too early (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)
Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/b5763019
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/b5763019
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/b5763019
Branch: refs/heads/master
Commit: b5763019af557d7bf553cba66de0f12de130dc7b
Parents: fd7b101
Author: Jesus Camacho Rodriguez <jc...@apache.org>
Authored: Thu Jan 19 01:09:48 2017 +0000
Committer: Jesus Camacho Rodriguez <jc...@apache.org>
Committed: Thu Jan 19 01:09:48 2017 +0000
----------------------------------------------------------------------
.../druid/io/DruidQueryBasedInputFormat.java | 24 +++++++++++++++++---
.../druid/serde/DruidQueryRecordReader.java | 17 ++++++++++----
2 files changed, 33 insertions(+), 8 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hive/blob/b5763019/druid-handler/src/java/org/apache/hadoop/hive/druid/io/DruidQueryBasedInputFormat.java
----------------------------------------------------------------------
diff --git a/druid-handler/src/java/org/apache/hadoop/hive/druid/io/DruidQueryBasedInputFormat.java b/druid-handler/src/java/org/apache/hadoop/hive/druid/io/DruidQueryBasedInputFormat.java
index e539dab..d1b2a72 100644
--- a/druid-handler/src/java/org/apache/hadoop/hive/druid/io/DruidQueryBasedInputFormat.java
+++ b/druid-handler/src/java/org/apache/hadoop/hive/druid/io/DruidQueryBasedInputFormat.java
@@ -188,7 +188,7 @@ public class DruidQueryBasedInputFormat extends InputFormat<NullWritable, DruidW
metadataBuilder.merge(true);
metadataBuilder.analysisTypes();
SegmentMetadataQuery metadataQuery = metadataBuilder.build();
- final Lifecycle lifecycle = new Lifecycle();
+ Lifecycle lifecycle = new Lifecycle();
HttpClient client = HttpClientInit.createClient(
HttpClientConfig.builder().withNumConnections(numConnection)
.withReadTimeout(readTimeout.toStandardDuration()).build(), lifecycle);
@@ -203,9 +203,8 @@ public class DruidQueryBasedInputFormat extends InputFormat<NullWritable, DruidW
DruidStorageHandlerUtils.createRequest(address, metadataQuery)
);
} catch (Exception e) {
- throw new IOException(org.apache.hadoop.util.StringUtils.stringifyException(e));
- } finally {
lifecycle.stop();
+ throw new IOException(org.apache.hadoop.util.StringUtils.stringifyException(e));
}
// Retrieve results
@@ -218,6 +217,8 @@ public class DruidQueryBasedInputFormat extends InputFormat<NullWritable, DruidW
} catch (Exception e) {
response.close();
throw new IOException(org.apache.hadoop.util.StringUtils.stringifyException(e));
+ } finally {
+ lifecycle.stop();
}
if (metadataList == null) {
throw new IOException("Connected to Druid but could not retrieve datasource information");
@@ -253,11 +254,26 @@ public class DruidQueryBasedInputFormat extends InputFormat<NullWritable, DruidW
timeBuilder.dataSource(query.getDataSource());
TimeBoundaryQuery timeQuery = timeBuilder.build();
+ lifecycle = new Lifecycle();
+ client = HttpClientInit.createClient(
+ HttpClientConfig.builder().withNumConnections(numConnection)
+ .withReadTimeout(readTimeout.toStandardDuration()).build(), lifecycle);
+ try {
+ lifecycle.start();
+ } catch (Exception e) {
+ LOG.error("Lifecycle start issue", e);
+ }
+ try {
+ lifecycle.start();
+ } catch (Exception e) {
+ LOG.error("Lifecycle start issue", e);
+ }
try {
response = DruidStorageHandlerUtils.submitRequest(client,
DruidStorageHandlerUtils.createRequest(address, timeQuery)
);
} catch (Exception e) {
+ lifecycle.stop();
throw new IOException(org.apache.hadoop.util.StringUtils.stringifyException(e));
}
@@ -271,6 +287,8 @@ public class DruidQueryBasedInputFormat extends InputFormat<NullWritable, DruidW
} catch (Exception e) {
response.close();
throw new IOException(org.apache.hadoop.util.StringUtils.stringifyException(e));
+ } finally {
+ lifecycle.stop();
}
if (timeList == null || timeList.isEmpty()) {
throw new IOException(
http://git-wip-us.apache.org/repos/asf/hive/blob/b5763019/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidQueryRecordReader.java
----------------------------------------------------------------------
diff --git a/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidQueryRecordReader.java b/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidQueryRecordReader.java
index dc9d6a0..0d5f0b1 100644
--- a/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidQueryRecordReader.java
+++ b/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidQueryRecordReader.java
@@ -90,16 +90,21 @@ public abstract class DruidQueryRecordReader<T extends BaseQuery<R>, R extends C
HttpClient client = HttpClientInit.createClient(
HttpClientConfig.builder().withReadTimeout(readTimeout.toStandardDuration())
.withNumConnections(numConnection).build(), lifecycle);
-
try {
lifecycle.start();
} catch (Exception e) {
LOG.error("Issues with lifecycle start", e);
}
- InputStream response = DruidStorageHandlerUtils.submitRequest(client,
- DruidStorageHandlerUtils.createRequest(hiveDruidSplit.getAddress(), query)
- );
- lifecycle.stop();
+ InputStream response;
+ try {
+ response = DruidStorageHandlerUtils.submitRequest(client,
+ DruidStorageHandlerUtils.createRequest(hiveDruidSplit.getAddress(), query)
+ );
+ } catch (Exception e) {
+ lifecycle.stop();
+ throw new IOException(org.apache.hadoop.util.StringUtils.stringifyException(e));
+ }
+
// Retrieve results
List<R> resultsList;
try {
@@ -107,6 +112,8 @@ public abstract class DruidQueryRecordReader<T extends BaseQuery<R>, R extends C
} catch (IOException e) {
response.close();
throw e;
+ } finally {
+ lifecycle.stop();
}
if (resultsList == null || resultsList.isEmpty()) {
return;
[3/3] hive git commit: HIVE-15613: Include druid-handler sources in
src packaging (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)
Posted by jc...@apache.org.
HIVE-15613: Include druid-handler sources in src packaging (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)
Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/cc3ce161
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/cc3ce161
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/cc3ce161
Branch: refs/heads/master
Commit: cc3ce16143eedc8ca6f0965f8ecc5d0e50b84983
Parents: b576301
Author: Jesus Camacho Rodriguez <jc...@apache.org>
Authored: Thu Jan 19 01:11:12 2017 +0000
Committer: Jesus Camacho Rodriguez <jc...@apache.org>
Committed: Thu Jan 19 01:11:12 2017 +0000
----------------------------------------------------------------------
packaging/src/main/assembly/src.xml | 1 +
1 file changed, 1 insertion(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hive/blob/cc3ce161/packaging/src/main/assembly/src.xml
----------------------------------------------------------------------
diff --git a/packaging/src/main/assembly/src.xml b/packaging/src/main/assembly/src.xml
index f279112..a204033 100644
--- a/packaging/src/main/assembly/src.xml
+++ b/packaging/src/main/assembly/src.xml
@@ -68,6 +68,7 @@
<include>data/**/*</include>
<include>dev-support/**/*</include>
<include>docs/**/*</include>
+ <include>druid-handler/**/*</include>
<include>find-bugs/**/*</include>
<include>hbase-handler/**/*</include>
<include>hcatalog/**/*</include>