You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by om...@apache.org on 2017/05/31 16:49:53 UTC
[1/8] hive git commit: Incorporate fixes from storage-api 2.3.1.
Repository: hive
Updated Branches:
refs/heads/branch-2.2 fd1188a6a -> 61867c723
Incorporate fixes from storage-api 2.3.1.
Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/61867c72
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/61867c72
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/61867c72
Branch: refs/heads/branch-2.2
Commit: 61867c723ab35d9e3e9cf6a2e3f1dc220fa78dd8
Parents: cf17541
Author: Owen O'Malley <om...@apache.org>
Authored: Fri May 26 08:52:26 2017 -0700
Committer: Owen O'Malley <om...@apache.org>
Committed: Wed May 31 09:41:32 2017 -0700
----------------------------------------------------------------------
storage-api/pom.xml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hive/blob/61867c72/storage-api/pom.xml
----------------------------------------------------------------------
diff --git a/storage-api/pom.xml b/storage-api/pom.xml
index b96714e..e091e0e 100644
--- a/storage-api/pom.xml
+++ b/storage-api/pom.xml
@@ -25,7 +25,7 @@
<groupId>org.apache.hive</groupId>
<artifactId>hive-storage-api</artifactId>
- <version>2.3.0-SNAPSHOT</version>
+ <version>2.3.1</version>
<packaging>jar</packaging>
<name>Hive Storage API</name>
[2/8] hive git commit: HIVE-14362: Support explain analyze in Hive
(addendum)
Posted by om...@apache.org.
HIVE-14362: Support explain analyze in Hive (addendum)
Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/3f82447a
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/3f82447a
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/3f82447a
Branch: refs/heads/branch-2.2
Commit: 3f82447acc4939d0bbedcd14276b1f40a261b659
Parents: a046198
Author: Owen O'Malley <om...@apache.org>
Authored: Thu May 25 13:39:08 2017 -0700
Committer: Owen O'Malley <om...@apache.org>
Committed: Wed May 31 09:41:32 2017 -0700
----------------------------------------------------------------------
ql/src/java/org/apache/hadoop/hive/ql/Driver.java | 12 ++++++------
.../hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java | 2 ++
2 files changed, 8 insertions(+), 6 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hive/blob/3f82447a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
index 08bd040..c67ea86 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
@@ -396,12 +396,12 @@ public class Driver implements CommandProcessor {
}
if (ctx != null && ctx.getExplainAnalyze() != AnalyzeState.RUNNING) {
- close();
+ closeInProcess(false);
}
if (isInterrupted()) {
return handleInterruption("at beginning of compilation."); //indicate if need clean resource
}
-
+
if (resetTaskIds) {
TaskFactory.resetId();
}
@@ -444,7 +444,7 @@ public class Driver implements CommandProcessor {
if (ctx == null) {
ctx = new Context(conf);
}
-
+
ctx.setTryCount(getTryCount());
ctx.setCmd(command);
ctx.setHDFSCleanup(true);
@@ -687,7 +687,7 @@ public class Driver implements CommandProcessor {
}
// The following union operation returns a union, which traverses over the
- // first set once and then then over each element of second set, in order,
+ // first set once and then then over each element of second set, in order,
// that is not contained in first. This means it doesn't replace anything
// in first set, and would preserve the WriteType in WriteEntity in first
// set in case of outputs list.
@@ -1009,7 +1009,7 @@ public class Driver implements CommandProcessor {
conf.set(ValidTxnList.VALID_TXNS_KEY, txnStr);
if(plan.getFetchTask() != null) {
/**
- * This is needed for {@link HiveConf.ConfVars.HIVEFETCHTASKCONVERSION} optimization which
+ * This is needed for {@link HiveConf.ConfVars.HIVEFETCHTASKCONVERSION} optimization which
* initializes JobConf in FetchOperator before recordValidTxns() but this has to be done
* after locks are acquired to avoid race conditions in ACID.
*/
@@ -2348,7 +2348,7 @@ public class Driver implements CommandProcessor {
this.operationId = opId;
}
- /**
+ /**
* Resets QueryState to get new queryId on Driver reuse.
*/
public void resetQueryState() {
http://git-wip-us.apache.org/repos/asf/hive/blob/3f82447a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java
index a573808..bd566e9 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java
@@ -268,8 +268,10 @@ public class TestUpdateDeleteSemanticAnalyzer {
// connection, which is conveniently created by the semantic analyzer.
Map<String, String> params = new HashMap<String, String>(1);
params.put(hive_metastoreConstants.TABLE_IS_TRANSACTIONAL, "true");
+ db.dropTable("T");
db.createTable("T", Arrays.asList("a", "b"), null, OrcInputFormat.class,
OrcOutputFormat.class, 2, Arrays.asList("a"), params);
+ db.dropTable("U");
db.createTable("U", Arrays.asList("a", "b"), Arrays.asList("ds"), OrcInputFormat.class,
OrcOutputFormat.class, 2, Arrays.asList("a"), params);
Table u = db.getTable("U");
[4/8] hive git commit: HIVE-16402 : Upgrade to Hadoop 2.8.0 (Sahil
Takiar via Ashutosh Chauhan)
Posted by om...@apache.org.
HIVE-16402 : Upgrade to Hadoop 2.8.0 (Sahil Takiar via Ashutosh Chauhan)
Signed-off-by: Ashutosh Chauhan <ha...@apache.org>
Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/4ca90e01
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/4ca90e01
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/4ca90e01
Branch: refs/heads/branch-2.2
Commit: 4ca90e01de79fbb7e772ecbe296748f78f761025
Parents: 8e09edc
Author: Sahil Takiar <ta...@gmail.com>
Authored: Sun Apr 9 18:25:27 2017 -0700
Committer: Owen O'Malley <om...@apache.org>
Committed: Wed May 31 09:41:32 2017 -0700
----------------------------------------------------------------------
hcatalog/core/pom.xml | 10 ++-
llap-server/pom.xml | 8 ++-
metastore/pom.xml | 16 +++++
pom.xml | 65 ++++++++++++++++++--
.../encrypted/encryption_move_tbl.q.out | 2 +-
shims/0.23/pom.xml | 4 ++
shims/scheduler/pom.xml | 6 ++
storage-api/pom.xml | 2 +-
8 files changed, 103 insertions(+), 10 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hive/blob/4ca90e01/hcatalog/core/pom.xml
----------------------------------------------------------------------
diff --git a/hcatalog/core/pom.xml b/hcatalog/core/pom.xml
index 506bf22..92475b4 100644
--- a/hcatalog/core/pom.xml
+++ b/hcatalog/core/pom.xml
@@ -191,7 +191,7 @@
<artifactId>commons-logging</artifactId>
</exclusion>
</exclusions>
- </dependency>
+ </dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-jobclient</artifactId>
@@ -208,13 +208,19 @@
<artifactId>commons-logging</artifactId>
</exclusion>
</exclusions>
- </dependency>
+ </dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-yarn-server-tests</artifactId>
<version>${hadoop.version}</version>
<classifier>tests</classifier>
<scope>test</scope>
+ <exclusions>
+ <exclusion>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-log4j12</artifactId>
+ </exclusion>
+ </exclusions>
</dependency>
<dependency>
<groupId>org.apache.pig</groupId>
http://git-wip-us.apache.org/repos/asf/hive/blob/4ca90e01/llap-server/pom.xml
----------------------------------------------------------------------
diff --git a/llap-server/pom.xml b/llap-server/pom.xml
index 22d17b7..da75a7b 100644
--- a/llap-server/pom.xml
+++ b/llap-server/pom.xml
@@ -165,6 +165,10 @@
<version>${slider.version}</version>
<exclusions>
<exclusion>
+ <groupId>asm</groupId>
+ <artifactId>asm</artifactId>
+ </exclusion>
+ <exclusion>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
</exclusion>
@@ -209,8 +213,8 @@
<artifactId>jettison</artifactId>
</exclusion>
<exclusion>
- <groupId>asm</groupId>
- <artifactId>asm</artifactId>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-log4j12</artifactId>
</exclusion>
</exclusions>
</dependency>
http://git-wip-us.apache.org/repos/asf/hive/blob/4ca90e01/metastore/pom.xml
----------------------------------------------------------------------
diff --git a/metastore/pom.xml b/metastore/pom.xml
index eabcdb9..b6e3add 100644
--- a/metastore/pom.xml
+++ b/metastore/pom.xml
@@ -148,6 +148,22 @@
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-auth</artifactId>
+ <version>${hadoop.version}</version>
+ <optional>true</optional>
+ <exclusions>
+ <exclusion>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-log4j12</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>commmons-logging</groupId>
+ <artifactId>commons-logging</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>${hadoop.version}</version>
<optional>true</optional>
http://git-wip-us.apache.org/repos/asf/hive/blob/4ca90e01/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index aed3373..6f4c153 100644
--- a/pom.xml
+++ b/pom.xml
@@ -137,7 +137,8 @@
<druid.version>0.9.2</druid.version>
<guava.version>14.0.1</guava.version>
<groovy.version>2.4.4</groovy.version>
- <hadoop.version>2.7.2</hadoop.version>
+ <h2database.version>1.3.166</h2database.version>
+ <hadoop.version>2.8.0</hadoop.version>
<hadoop.bin.path>${basedir}/${hive.path.to.root}/testutils/hadoop</hadoop.bin.path>
<hbase.version>1.1.1</hbase.version>
<!-- required for logging test to avoid including hbase which pulls disruptor transitively -->
@@ -651,13 +652,24 @@
<artifactId>commons-logging</artifactId>
</exclusion>
</exclusions>
- </dependency>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-auth</artifactId>
+ <version>${hadoop.version}</version>
+ <exclusions>
+ <exclusion>
+ <groupId>commmons-logging</groupId>
+ <artifactId>commons-logging</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>${hadoop.version}</version>
<exclusions>
- <exclusion>
+ <exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
@@ -697,7 +709,7 @@
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-mapreduce-client-core</artifactId>
+ <artifactId>hadoop-mapreduce-client-common</artifactId>
<version>${hadoop.version}</version>
<exclusions>
<exclusion>
@@ -712,10 +724,55 @@
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapreduce-client-core</artifactId>
+ <version>${hadoop.version}</version>
+ <exclusions>
+ <exclusion>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-log4j12</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>commmons-logging</groupId>
+ <artifactId>commons-logging</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minikdc</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-api</artifactId>
+ <version>${hadoop.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-client</artifactId>
+ <version>${hadoop.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-common</artifactId>
+ <version>${hadoop.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-registry</artifactId>
+ <version>${hadoop.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-server-web-common</artifactId>
+ <version>${hadoop.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-server-web-proxy</artifactId>
+ <version>${hadoop.version}</version>
+ </dependency>
+ <dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-common</artifactId>
<version>${hbase.version}</version>
http://git-wip-us.apache.org/repos/asf/hive/blob/4ca90e01/ql/src/test/results/clientpositive/encrypted/encryption_move_tbl.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/encrypted/encryption_move_tbl.q.out b/ql/src/test/results/clientpositive/encrypted/encryption_move_tbl.q.out
index 55eb22e..5cea8f2 100644
--- a/ql/src/test/results/clientpositive/encrypted/encryption_move_tbl.q.out
+++ b/ql/src/test/results/clientpositive/encrypted/encryption_move_tbl.q.out
@@ -49,7 +49,7 @@ PREHOOK: query: ALTER TABLE default.encrypted_table RENAME TO encrypted_db.encry
PREHOOK: type: ALTERTABLE_RENAME
PREHOOK: Input: default@encrypted_table
PREHOOK: Output: default@encrypted_table
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Unable to alter table. Alter Table operation for default.encrypted_table failed to move data due to: '/build/ql/test/data/warehouse/default/encrypted_table can't be moved from encryption zone /build/ql/test/data/warehouse/default/encrypted_table to encryption zone /build/ql/test/data/warehouse/encrypted_db.' See hive log file for details.
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Unable to alter table. Alter Table operation for default.encrypted_table failed to move data due to: '/build/ql/test/data/warehouse/encrypted_table can't be moved into an encryption zone.' See hive log file for details.
PREHOOK: query: SHOW TABLES
PREHOOK: type: SHOWTABLES
PREHOOK: Input: database:default
http://git-wip-us.apache.org/repos/asf/hive/blob/4ca90e01/shims/0.23/pom.xml
----------------------------------------------------------------------
diff --git a/shims/0.23/pom.xml b/shims/0.23/pom.xml
index d0d1d5f..3ff75ba 100644
--- a/shims/0.23/pom.xml
+++ b/shims/0.23/pom.xml
@@ -179,6 +179,10 @@
<groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId>
</exclusion>
+ <exclusion>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-log4j12</artifactId>
+ </exclusion>
</exclusions>
</dependency>
<dependency>
http://git-wip-us.apache.org/repos/asf/hive/blob/4ca90e01/shims/scheduler/pom.xml
----------------------------------------------------------------------
diff --git a/shims/scheduler/pom.xml b/shims/scheduler/pom.xml
index 9141c1e..6cb53bb 100644
--- a/shims/scheduler/pom.xml
+++ b/shims/scheduler/pom.xml
@@ -76,6 +76,12 @@
<artifactId>hadoop-yarn-server-resourcemanager</artifactId>
<version>${hadoop.version}</version>
<optional>true</optional>
+ <exclusions>
+ <exclusion>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-log4j12</artifactId>
+ </exclusion>
+ </exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
http://git-wip-us.apache.org/repos/asf/hive/blob/4ca90e01/storage-api/pom.xml
----------------------------------------------------------------------
diff --git a/storage-api/pom.xml b/storage-api/pom.xml
index 097efdb..b96714e 100644
--- a/storage-api/pom.xml
+++ b/storage-api/pom.xml
@@ -32,7 +32,7 @@
<properties>
<commons-lang.version>2.6</commons-lang.version>
<guava.version>14.0.1</guava.version>
- <hadoop.version>2.7.2</hadoop.version>
+ <hadoop.version>2.8.0</hadoop.version>
<junit.version>4.11</junit.version>
<slf4j.version>1.7.10</slf4j.version>
</properties>
[7/8] hive git commit: HIVE-16549. Fix incompatible change in
PredicateLeafImpl. A change in HIVE-15269 made the API incompatible and this
fixes it.
Posted by om...@apache.org.
HIVE-16549. Fix incompatible change in PredicateLeafImpl.
A change in HIVE-15269 made the API incompatible and this fixes it.
Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/cf175410
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/cf175410
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/cf175410
Branch: refs/heads/branch-2.2
Commit: cf175410a5e2e00c7ff549a9b95adecbeadaa1a4
Parents: 3f82447
Author: Owen O'Malley <om...@apache.org>
Authored: Fri May 26 08:46:18 2017 -0700
Committer: Owen O'Malley <om...@apache.org>
Committed: Wed May 31 09:41:32 2017 -0700
----------------------------------------------------------------------
.../apache/hadoop/hive/ql/io/sarg/SearchArgumentImpl.java | 8 ++++++++
1 file changed, 8 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hive/blob/cf175410/storage-api/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgumentImpl.java
----------------------------------------------------------------------
diff --git a/storage-api/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgumentImpl.java b/storage-api/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgumentImpl.java
index db0a582..6d8c83b 100644
--- a/storage-api/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgumentImpl.java
+++ b/storage-api/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgumentImpl.java
@@ -59,6 +59,14 @@ public final class SearchArgumentImpl implements SearchArgument {
Type type,
String columnName,
Object literal,
+ List<Object> literalList) {
+ this(operator, type, columnName, literal, literalList, null);
+ }
+
+ public PredicateLeafImpl(Operator operator,
+ Type type,
+ String columnName,
+ Object literal,
List<Object> literalList, Configuration conf) {
this.operator = operator;
this.type = type;
[5/8] hive git commit: HIVE-16683. Backport of ORC-125 to fix
incorrect handling of future WriterVersions in ORC.
Posted by om...@apache.org.
HIVE-16683. Backport of ORC-125 to fix incorrect handling of future
WriterVersions in ORC.
Signed-off-by: Owen O'Malley <om...@apache.org>
Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/b7756980
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/b7756980
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/b7756980
Branch: refs/heads/branch-2.2
Commit: b775698016a1246814b24a270dc5fa10868bbb43
Parents: fd1188a
Author: Owen O'Malley <om...@apache.org>
Authored: Tue May 16 11:04:44 2017 -0700
Committer: Owen O'Malley <om...@apache.org>
Committed: Wed May 31 09:41:32 2017 -0700
----------------------------------------------------------------------
orc/src/java/org/apache/orc/OrcFile.java | 10 +++++++++-
orc/src/test/org/apache/orc/TestVectorOrcFile.java | 7 +++++++
2 files changed, 16 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hive/blob/b7756980/orc/src/java/org/apache/orc/OrcFile.java
----------------------------------------------------------------------
diff --git a/orc/src/java/org/apache/orc/OrcFile.java b/orc/src/java/org/apache/orc/OrcFile.java
index ddfa9f7..06fb666 100644
--- a/orc/src/java/org/apache/orc/OrcFile.java
+++ b/orc/src/java/org/apache/orc/OrcFile.java
@@ -140,8 +140,16 @@ public class OrcFile {
}
}
+ /**
+ * Convert the integer from OrcProto.PostScript.writerVersion
+ * to the enumeration with unknown versions being mapped to FUTURE.
+ * @param val the serialized writer version
+ * @return the corresponding enumeration value
+ */
public static WriterVersion from(int val) {
- if (val == FUTURE.id) return FUTURE; // Special handling for the magic value.
+ if (val >= values.length) {
+ return FUTURE;
+ }
return values[val];
}
}
http://git-wip-us.apache.org/repos/asf/hive/blob/b7756980/orc/src/test/org/apache/orc/TestVectorOrcFile.java
----------------------------------------------------------------------
diff --git a/orc/src/test/org/apache/orc/TestVectorOrcFile.java b/orc/src/test/org/apache/orc/TestVectorOrcFile.java
index 112edb9..73abf9e 100644
--- a/orc/src/test/org/apache/orc/TestVectorOrcFile.java
+++ b/orc/src/test/org/apache/orc/TestVectorOrcFile.java
@@ -2779,4 +2779,11 @@ public class TestVectorOrcFile {
rows.nextBatch(batch);
assertEquals(0, batch.size);
}
+
+ @Test
+ public void testWriterVersion() throws Exception {
+ assertEquals(OrcFile.WriterVersion.FUTURE, OrcFile.WriterVersion.from(99));
+ assertEquals(OrcFile.WriterVersion.ORIGINAL, OrcFile.WriterVersion.from(0));
+ assertEquals(OrcFile.WriterVersion.HIVE_4243, OrcFile.WriterVersion.from(2));
+ }
}
[8/8] hive git commit: HIVE-15335: Fast Decimal (addendum)
Posted by om...@apache.org.
HIVE-15335: Fast Decimal (addendum)
Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/a046198c
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/a046198c
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/a046198c
Branch: refs/heads/branch-2.2
Commit: a046198cd41bbec7236ab2a19382fda62e41d4e0
Parents: 93c33ff
Author: Owen O'Malley <om...@apache.org>
Authored: Thu May 25 09:29:03 2017 -0700
Committer: Owen O'Malley <om...@apache.org>
Committed: Wed May 31 09:41:32 2017 -0700
----------------------------------------------------------------------
.../orc/impl/ConvertTreeReaderFactory.java | 50 ++------------------
.../hadoop/hive/ql/util/TimestampUtils.java | 36 ++++++--------
2 files changed, 19 insertions(+), 67 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hive/blob/a046198c/orc/src/java/org/apache/orc/impl/ConvertTreeReaderFactory.java
----------------------------------------------------------------------
diff --git a/orc/src/java/org/apache/orc/impl/ConvertTreeReaderFactory.java b/orc/src/java/org/apache/orc/impl/ConvertTreeReaderFactory.java
index a7c3380..2d293b5 100644
--- a/orc/src/java/org/apache/orc/impl/ConvertTreeReaderFactory.java
+++ b/orc/src/java/org/apache/orc/impl/ConvertTreeReaderFactory.java
@@ -613,53 +613,13 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
@Override
public void setConvertVectorElement(int elementNum) throws IOException {
- HiveDecimalWritable decWritable = decimalColVector.vector[elementNum];
- long[] vector = longColVector.vector;
- Category readerCategory = readerType.getCategory();
-
- // Check to see if the decimal will fit in the Hive integer data type.
- // If not, set the element to null.
- boolean isInRange;
- switch (readerCategory) {
- case BOOLEAN:
- // No data loss for boolean.
- vector[elementNum] = decWritable.signum() == 0 ? 0 : 1;
- return;
- case BYTE:
- isInRange = decWritable.isByte();
- break;
- case SHORT:
- isInRange = decWritable.isShort();
- break;
- case INT:
- isInRange = decWritable.isInt();
- break;
- case LONG:
- isInRange = decWritable.isLong();
- break;
- default:
- throw new RuntimeException("Unexpected type kind " + readerCategory.name());
- }
- if (!isInRange) {
+ HiveDecimal decimalValue = decimalColVector.vector[elementNum].getHiveDecimal();
+ if (decimalValue.compareTo(DECIMAL_MAX_LONG) > 0 ||
+ decimalValue.compareTo(DECIMAL_MIN_LONG) < 0) {
longColVector.isNull[elementNum] = true;
longColVector.noNulls = false;
- return;
- }
- switch (readerCategory) {
- case BYTE:
- vector[elementNum] = decWritable.byteValue();
- break;
- case SHORT:
- vector[elementNum] = decWritable.shortValue();
- break;
- case INT:
- vector[elementNum] = decWritable.intValue();
- break;
- case LONG:
- vector[elementNum] = decWritable.longValue();
- break;
- default:
- throw new RuntimeException("Unexpected type kind " + readerCategory.name());
+ } else {
+ downCastAnyInteger(longColVector, elementNum, decimalValue.longValue(), readerType);
}
}
http://git-wip-us.apache.org/repos/asf/hive/blob/a046198c/storage-api/src/java/org/apache/hadoop/hive/ql/util/TimestampUtils.java
----------------------------------------------------------------------
diff --git a/storage-api/src/java/org/apache/hadoop/hive/ql/util/TimestampUtils.java b/storage-api/src/java/org/apache/hadoop/hive/ql/util/TimestampUtils.java
index dfc7272..c0e8a2e 100644
--- a/storage-api/src/java/org/apache/hadoop/hive/ql/util/TimestampUtils.java
+++ b/storage-api/src/java/org/apache/hadoop/hive/ql/util/TimestampUtils.java
@@ -76,32 +76,24 @@ public class TimestampUtils {
* @param dec
* @return
*/
- public static Timestamp decimalToTimestamp(HiveDecimal dec) {
+ public static Timestamp decimalToTimestamp(HiveDecimal d) {
+ try {
+ BigDecimal nanoInstant = d.bigDecimalValue().multiply(BILLION_BIG_DECIMAL);
+ int nanos = nanoInstant.remainder(BILLION_BIG_DECIMAL).intValue();
+ if (nanos < 0) {
+ nanos += 1000000000;
+ }
+ long seconds =
+ nanoInstant.subtract(new BigDecimal(nanos)).divide(BILLION_BIG_DECIMAL).longValue();
+ Timestamp t = new Timestamp(seconds * 1000);
+ t.setNanos(nanos);
- HiveDecimalWritable nanosWritable = new HiveDecimalWritable(dec);
- nanosWritable.mutateFractionPortion(); // Clip off seconds portion.
- nanosWritable.mutateScaleByPowerOfTen(9); // Bring nanoseconds into integer portion.
- if (!nanosWritable.isSet() || !nanosWritable.isInt()) {
+ return t;
+ } catch (NumberFormatException nfe) {
return null;
- }
- int nanos = nanosWritable.intValue();
- if (nanos < 0) {
- nanos += 1000000000;
- }
- nanosWritable.setFromLong(nanos);
-
- HiveDecimalWritable nanoInstant = new HiveDecimalWritable(dec);
- nanoInstant.mutateScaleByPowerOfTen(9);
-
- nanoInstant.mutateSubtract(nanosWritable);
- nanoInstant.mutateScaleByPowerOfTen(-9); // Back to seconds.
- if (!nanoInstant.isSet() || !nanoInstant.isLong()) {
+ } catch (IllegalArgumentException iae) {
return null;
}
- long seconds = nanoInstant.longValue();
- Timestamp t = new Timestamp(seconds * 1000);
- t.setNanos(nanos);
- return t;
}
/**
[3/8] hive git commit: HIVE-14595. Addendum fixing up backport.
Posted by om...@apache.org.
HIVE-14595. Addendum fixing up backport.
Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/8e09edc4
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/8e09edc4
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/8e09edc4
Branch: refs/heads/branch-2.2
Commit: 8e09edc442b3a831815f78684934b51202b26ca4
Parents: b775698
Author: Owen O'Malley <om...@apache.org>
Authored: Fri May 19 13:11:49 2017 -0700
Committer: Owen O'Malley <om...@apache.org>
Committed: Wed May 31 09:41:32 2017 -0700
----------------------------------------------------------------------
.../hadoop/hive/serde2/io/TimestampWritable.java | 17 ++---------------
.../hive/serde2/io/TestTimestampWritable.java | 17 ++---------------
2 files changed, 4 insertions(+), 30 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hive/blob/8e09edc4/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritable.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritable.java b/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritable.java
index bda8a7f..463d32c 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritable.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritable.java
@@ -556,22 +556,9 @@ public class TimestampWritable implements WritableComparable<TimestampWritable>
}
public static void setTimestamp(Timestamp t, byte[] bytes, int offset) {
- boolean hasDecimalOrSecondVInt = hasDecimalOrSecondVInt(bytes[offset]);
- long seconds = (long) TimestampWritable.getSeconds(bytes, offset);
- int nanos = 0;
- if (hasDecimalOrSecondVInt) {
- nanos = TimestampWritable.getNanos(bytes, offset + 4);
- if (hasSecondVInt(bytes[offset + 4])) {
- seconds += LazyBinaryUtils.readVLongFromByteArray(bytes,
- offset + 4 + WritableUtils.decodeVIntSize(bytes[offset + 4]));
- }
- }
+ long seconds = getSeconds(bytes, offset);
t.setTime(seconds * 1000);
- if (hasDecimalOrSecondVInt(bytes[offset])) {
- t.setNanos(getNanos(bytes, offset + 4));
- } else {
- t.setNanos(0);
- }
+ t.setNanos(getNanos(bytes, offset + 4));
}
public static Timestamp createTimestamp(byte[] bytes, int offset) {
http://git-wip-us.apache.org/repos/asf/hive/blob/8e09edc4/serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritable.java
----------------------------------------------------------------------
diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritable.java b/serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritable.java
index 3c483cc..cd7a1b9 100644
--- a/serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritable.java
+++ b/serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritable.java
@@ -495,21 +495,8 @@ public class TestTimestampWritable {
@Test
public void testSetTimestamp() {
- // one VInt without nanos
- verifySetTimestamp(1000);
-
- // one VInt with nanos
- verifySetTimestamp(1001);
-
- // two VInt without nanos
- verifySetTimestamp((long) Integer.MAX_VALUE * 1000 + 1000);
-
- // two VInt with nanos
- verifySetTimestamp((long) Integer.MAX_VALUE * 1000 + 1234);
- }
-
- private static void verifySetTimestamp(long time) {
- Timestamp t1 = new Timestamp(time);
+ // make sure we need a 2nd VInt
+ Timestamp t1 = new Timestamp((long) Integer.MAX_VALUE * 1000 + 1234);
TimestampWritable writable = new TimestampWritable(t1);
byte[] bytes = writable.getBytes();
Timestamp t2 = new Timestamp(0);
[6/8] hive git commit: HIVE-14251: Union All of different types
resolves to incorrect data (Addendum)
Posted by om...@apache.org.
HIVE-14251: Union All of different types resolves to incorrect data (Addendum)
Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/93c33ff8
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/93c33ff8
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/93c33ff8
Branch: refs/heads/branch-2.2
Commit: 93c33ff8d492c62b0403fcfce65adbf25d412c64
Parents: 4ca90e0
Author: Owen O'Malley <om...@apache.org>
Authored: Mon May 22 15:27:51 2017 -0700
Committer: Owen O'Malley <om...@apache.org>
Committed: Wed May 31 09:41:32 2017 -0700
----------------------------------------------------------------------
.../org/apache/hadoop/hive/ql/exec/TestFunctionRegistry.java | 8 +-------
1 file changed, 1 insertion(+), 7 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hive/blob/93c33ff8/ql/src/test/org/apache/hadoop/hive/ql/exec/TestFunctionRegistry.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestFunctionRegistry.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestFunctionRegistry.java
index 2984090..aa2f6f8 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestFunctionRegistry.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestFunctionRegistry.java
@@ -30,9 +30,9 @@ import org.apache.hadoop.hive.common.type.HiveVarchar;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.exec.FunctionInfo.FunctionResource;
import org.apache.hadoop.hive.ql.exec.FunctionInfo.FunctionType;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.ql.udf.UDFLn;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFMax;
@@ -343,14 +343,8 @@ public class TestFunctionRegistry extends TestCase {
TypeInfoFactory.doubleTypeInfo);
unionAll(TypeInfoFactory.intTypeInfo, TypeInfoFactory.decimalTypeInfo,
TypeInfoFactory.decimalTypeInfo);
- unionAll(TypeInfoFactory.stringTypeInfo, TypeInfoFactory.decimalTypeInfo,
- TypeInfoFactory.decimalTypeInfo);
unionAll(TypeInfoFactory.doubleTypeInfo, TypeInfoFactory.decimalTypeInfo,
TypeInfoFactory.decimalTypeInfo);
- unionAll(TypeInfoFactory.doubleTypeInfo, TypeInfoFactory.stringTypeInfo,
- TypeInfoFactory.stringTypeInfo);
- unionAll(TypeInfoFactory.doubleTypeInfo, TypeInfoFactory.decimalTypeInfo,
- TypeInfoFactory.doubleTypeInfo);
unionAll(varchar5, varchar10, varchar10);
unionAll(varchar10, varchar5, varchar10);