You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by om...@apache.org on 2017/05/31 16:37:07 UTC
hive git commit: HIVE-14309. Shade the contents of the hive-orc jar
to be in org.apache.hive.orc.
Repository: hive
Updated Branches:
refs/heads/branch-2.2 e4ac41d86 -> fd1188a6a
HIVE-14309. Shade the contents of the hive-orc jar to be in org.apache.hive.orc.
Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/fd1188a6
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/fd1188a6
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/fd1188a6
Branch: refs/heads/branch-2.2
Commit: fd1188a6a79c6da868d463c1e5db50c017c3b1a2
Parents: e4ac41d
Author: Owen O'Malley <om...@apache.org>
Authored: Tue May 30 16:29:19 2017 -0700
Committer: Owen O'Malley <om...@apache.org>
Committed: Wed May 31 09:27:38 2017 -0700
----------------------------------------------------------------------
.../TestHiveDecimalOrcSerializationUtils.java | 6 ++--
.../hive/hcatalog/mapreduce/SpecialCases.java | 4 +--
.../hive/hcatalog/streaming/TestStreaming.java | 8 ++---
.../hive/llap/cache/LowLevelCacheImpl.java | 2 +-
.../hive/llap/cache/SerDeLowLevelCacheImpl.java | 10 +++---
.../hive/llap/io/api/impl/LlapInputFormat.java | 4 +--
.../hive/llap/io/api/impl/LlapRecordReader.java | 4 +--
.../llap/io/decode/ColumnVectorProducer.java | 2 +-
.../llap/io/decode/EncodedDataConsumer.java | 2 +-
.../io/decode/GenericColumnVectorProducer.java | 16 +++++-----
.../llap/io/decode/OrcColumnVectorProducer.java | 2 +-
.../llap/io/decode/OrcEncodedDataConsumer.java | 18 +++++------
.../hive/llap/io/decode/ReadPipeline.java | 2 +-
.../llap/io/encoded/OrcEncodedDataReader.java | 22 +++++++-------
.../llap/io/encoded/SerDeEncodedDataReader.java | 26 ++++++++--------
.../llap/io/metadata/ConsumerFileMetadata.java | 6 ++--
.../io/metadata/ConsumerStripeMetadata.java | 8 ++---
.../hive/llap/io/metadata/OrcFileMetadata.java | 14 ++++-----
.../llap/io/metadata/OrcStripeMetadata.java | 10 +++---
.../TestIncrementalObjectSizeEstimator.java | 8 ++---
orc/pom.xml | 32 ++++++++++++++++++++
.../hive/ql/exec/OrcFileMergeOperator.java | 2 +-
.../hive/ql/hooks/PostExecOrcFileDump.java | 4 +--
.../hadoop/hive/ql/io/orc/CompressionKind.java | 14 ++++-----
.../hadoop/hive/ql/io/orc/ExternalCache.java | 2 +-
.../hadoop/hive/ql/io/orc/LocalCache.java | 2 +-
.../apache/hadoop/hive/ql/io/orc/OrcFile.java | 16 +++++-----
.../hive/ql/io/orc/OrcFileFormatProxy.java | 8 ++---
.../hive/ql/io/orc/OrcFileKeyWrapper.java | 4 +--
.../io/orc/OrcFileStripeMergeRecordReader.java | 4 +--
.../hive/ql/io/orc/OrcFileValueWrapper.java | 4 +--
.../hadoop/hive/ql/io/orc/OrcInputFormat.java | 22 +++++++-------
.../hive/ql/io/orc/OrcNewInputFormat.java | 2 +-
.../hadoop/hive/ql/io/orc/OrcNewSplit.java | 6 ++--
.../hadoop/hive/ql/io/orc/OrcOutputFormat.java | 4 +--
.../hive/ql/io/orc/OrcRawRecordMerger.java | 10 +++---
.../hadoop/hive/ql/io/orc/OrcRecordUpdater.java | 8 ++---
.../apache/hadoop/hive/ql/io/orc/OrcSerde.java | 2 +-
.../apache/hadoop/hive/ql/io/orc/OrcSplit.java | 4 +--
.../apache/hadoop/hive/ql/io/orc/OrcStruct.java | 2 +-
.../apache/hadoop/hive/ql/io/orc/OrcUnion.java | 2 +-
.../apache/hadoop/hive/ql/io/orc/Reader.java | 4 +--
.../hadoop/hive/ql/io/orc/ReaderImpl.java | 4 +--
.../hadoop/hive/ql/io/orc/RecordReader.java | 2 +-
.../hadoop/hive/ql/io/orc/RecordReaderImpl.java | 4 +--
.../ql/io/orc/VectorizedOrcInputFormat.java | 6 ++--
.../apache/hadoop/hive/ql/io/orc/Writer.java | 2 +-
.../hadoop/hive/ql/io/orc/WriterImpl.java | 10 +++---
.../hive/ql/io/orc/encoded/EncodedReader.java | 4 +--
.../ql/io/orc/encoded/EncodedReaderImpl.java | 20 ++++++------
.../orc/encoded/EncodedTreeReaderFactory.java | 16 +++++-----
.../hadoop/hive/ql/io/orc/encoded/Reader.java | 4 +--
.../hive/ql/io/orc/encoded/ReaderImpl.java | 2 +-
.../hive/ql/io/orc/encoded/StreamUtils.java | 4 +--
.../hive/ql/io/orc/TestInputOutputFormat.java | 10 +++---
.../hadoop/hive/ql/io/orc/TestOrcFile.java | 28 ++++++++---------
.../hive/ql/io/orc/TestOrcRawRecordMerger.java | 10 +++---
.../hive/ql/io/orc/TestOrcRecordUpdater.java | 4 +--
.../hive/ql/io/orc/TestOrcSerDeStats.java | 14 ++++-----
59 files changed, 254 insertions(+), 222 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimalOrcSerializationUtils.java
----------------------------------------------------------------------
diff --git a/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimalOrcSerializationUtils.java b/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimalOrcSerializationUtils.java
index 882ff86..eb6e4e1 100644
--- a/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimalOrcSerializationUtils.java
+++ b/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimalOrcSerializationUtils.java
@@ -30,7 +30,7 @@ import java.math.BigInteger;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritableV1;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
-import org.apache.orc.impl.SerializationUtils;
+import org.apache.hive.orc.impl.SerializationUtils;
import org.apache.hadoop.hive.common.type.RandomTypeUtil;
import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr;
import org.apache.hadoop.hive.ql.util.TimestampUtils;
@@ -111,11 +111,11 @@ public class TestHiveDecimalOrcSerializationUtils extends HiveDecimalTestBase {
fail();
}
byte[] bytes = outputStream.toByteArray();
-
+
ByteArrayOutputStream outputStreamExpected = new ByteArrayOutputStream();
SerializationUtils.writeBigInteger(outputStreamExpected, bigInteger);
byte[] bytesExpected = outputStreamExpected.toByteArray();
-
+
// System.out.println("TEST_FAST_SERIALIZATION_UTILS_WRITE_BIG_INTEGER check streams");
// System.out.println("TEST_FAST_SERIALIZATION_UTILS_WRITE_BIG_INTEGER bytes1 " + displayBytes(bytes, 0, bytes.length));
if (!StringExpr.equal(bytes, 0, bytes.length, bytesExpected, 0, bytesExpected.length)) {
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/SpecialCases.java
----------------------------------------------------------------------
diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/SpecialCases.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/SpecialCases.java
index 60af5c0..b8dfb6d 100644
--- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/SpecialCases.java
+++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/SpecialCases.java
@@ -24,7 +24,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.io.RCFileOutputFormat;
import org.apache.hadoop.hive.ql.io.avro.AvroContainerOutputFormat;
-import org.apache.orc.OrcConf;
+import org.apache.hive.orc.OrcConf;
import org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat;
import org.apache.hadoop.hive.serde2.avro.AvroSerDe;
import org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils;
@@ -114,7 +114,7 @@ public class SpecialCases {
if (jobProperties.get(AvroSerdeUtils.AvroTableProperties.SCHEMA_LITERAL.getPropName())==null
|| jobProperties.get(AvroSerdeUtils.AvroTableProperties.SCHEMA_LITERAL.getPropName()).isEmpty()) {
-
+
jobProperties.put(AvroSerdeUtils.AvroTableProperties.SCHEMA_LITERAL.getPropName(),
AvroSerDe.getSchemaFromCols(properties, colNames, colTypes, null).toString());
}
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java
----------------------------------------------------------------------
diff --git a/hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java b/hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java
index 40cf2b5..4ca9144 100644
--- a/hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java
+++ b/hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java
@@ -66,8 +66,8 @@ import org.apache.hadoop.hive.ql.io.AcidUtils;
import org.apache.hadoop.hive.ql.io.IOConstants;
import org.apache.hadoop.hive.shims.Utils;
import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.orc.impl.OrcAcidUtils;
-import org.apache.orc.tools.FileDump;
+import org.apache.hive.orc.impl.OrcAcidUtils;
+import org.apache.hive.orc.tools.FileDump;
import org.apache.hadoop.hive.ql.io.orc.OrcFile;
import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat;
import org.apache.hadoop.hive.ql.io.orc.OrcStruct;
@@ -1732,7 +1732,7 @@ public class TestStreaming {
txnBatch.write("name4,2,more Streaming unlimited".getBytes());
txnBatch.write("name5,2,even more Streaming unlimited".getBytes());
txnBatch.commit();
-
+
expectedEx = null;
txnBatch.beginNextTransaction();
writer.enableErrors();
@@ -1778,7 +1778,7 @@ public class TestStreaming {
}
Assert.assertTrue("Wrong exception: " + (expectedEx != null ? expectedEx.getMessage() : "?"),
expectedEx != null && expectedEx.getMessage().contains("Simulated fault occurred"));
-
+
r = msClient.showTxns();
Assert.assertEquals("HWM didn't match", 6, r.getTxn_high_water_mark());
ti = r.getOpen_txns();
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/llap-server/src/java/org/apache/hadoop/hive/llap/cache/LowLevelCacheImpl.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/cache/LowLevelCacheImpl.java b/llap-server/src/java/org/apache/hadoop/hive/llap/cache/LowLevelCacheImpl.java
index 4dc1c23..c16eaef 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/cache/LowLevelCacheImpl.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/cache/LowLevelCacheImpl.java
@@ -17,7 +17,7 @@
*/
package org.apache.hadoop.hive.llap.cache;
-import org.apache.orc.impl.RecordReaderUtils;
+import org.apache.hive.orc.impl.RecordReaderUtils;
import java.nio.ByteBuffer;
import java.util.Iterator;
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/llap-server/src/java/org/apache/hadoop/hive/llap/cache/SerDeLowLevelCacheImpl.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/cache/SerDeLowLevelCacheImpl.java b/llap-server/src/java/org/apache/hadoop/hive/llap/cache/SerDeLowLevelCacheImpl.java
index 4809398..dbe8ac9 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/cache/SerDeLowLevelCacheImpl.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/cache/SerDeLowLevelCacheImpl.java
@@ -39,8 +39,8 @@ import org.apache.hadoop.hive.llap.cache.LowLevelCache.Priority;
import org.apache.hadoop.hive.llap.io.api.impl.LlapIoImpl;
import org.apache.hadoop.hive.llap.metrics.LlapDaemonCacheMetrics;
import org.apache.hive.common.util.Ref;
-import org.apache.orc.OrcProto;
-import org.apache.orc.OrcProto.ColumnEncoding;
+import org.apache.hive.orc.OrcProto;
+import org.apache.hive.orc.OrcProto.ColumnEncoding;
import com.google.common.base.Function;
@@ -142,7 +142,7 @@ public class SerDeLowLevelCacheImpl implements BufferUsageManager, LlapOomDebugD
this.rowCount = rowCount;
this.data = encodings == null ? null : new LlapDataBuffer[encodings.length][][];
}
-
+
@Override
public String toString() {
return toCoordinateString() + " with encodings [" + Arrays.toString(encodings)
@@ -219,7 +219,7 @@ public class SerDeLowLevelCacheImpl implements BufferUsageManager, LlapOomDebugD
sb.append("]");
return sb.toString();
}
-
+
public static String toString(LlapDataBuffer[][] data) {
if (data == null) return "null";
@@ -614,7 +614,7 @@ public class SerDeLowLevelCacheImpl implements BufferUsageManager, LlapOomDebugD
}
to.data[colIx] = fromColData;
}
- }
+ }
}
@Override
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapInputFormat.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapInputFormat.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapInputFormat.java
index d00b985..3981300 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapInputFormat.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapInputFormat.java
@@ -67,8 +67,8 @@ import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.RecordReader;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hive.common.util.HiveStringUtils;
-import org.apache.orc.TypeDescription;
-import org.apache.orc.impl.SchemaEvolution;
+import org.apache.hive.orc.TypeDescription;
+import org.apache.hive.orc.impl.SchemaEvolution;
import org.apache.tez.common.counters.TezCounters;
import org.apache.tez.runtime.api.impl.TaskSpec;
import org.slf4j.Logger;
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapRecordReader.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapRecordReader.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapRecordReader.java
index 58eb435..b801f1b 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapRecordReader.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapRecordReader.java
@@ -55,8 +55,8 @@ import org.apache.hadoop.mapred.InputFormat;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.RecordReader;
import org.apache.hadoop.mapred.Reporter;
-import org.apache.orc.TypeDescription;
-import org.apache.orc.impl.SchemaEvolution;
+import org.apache.hive.orc.TypeDescription;
+import org.apache.hive.orc.impl.SchemaEvolution;
import org.apache.tez.common.counters.TezCounters;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/ColumnVectorProducer.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/ColumnVectorProducer.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/ColumnVectorProducer.java
index 1a8ed7b..cf81283 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/ColumnVectorProducer.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/ColumnVectorProducer.java
@@ -33,7 +33,7 @@ import org.apache.hadoop.mapred.FileSplit;
import org.apache.hadoop.mapred.InputFormat;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.Reporter;
-import org.apache.orc.TypeDescription;
+import org.apache.hive.orc.TypeDescription;
/**
* Entry point used by LlapInputFormat to create read pipeline to get data.
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/EncodedDataConsumer.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/EncodedDataConsumer.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/EncodedDataConsumer.java
index 312f008..b1977df 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/EncodedDataConsumer.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/EncodedDataConsumer.java
@@ -28,7 +28,7 @@ import org.apache.hadoop.hive.llap.io.api.impl.LlapIoImpl;
import org.apache.hadoop.hive.llap.metrics.LlapDaemonIOMetrics;
import org.apache.hadoop.hive.ql.io.orc.encoded.Consumer;
import org.apache.hive.common.util.FixedSizedObjectPool;
-import org.apache.orc.TypeDescription;
+import org.apache.hive.orc.TypeDescription;
public abstract class EncodedDataConsumer<BatchKey, BatchType extends EncodedColumnBatch<BatchKey>>
implements Consumer<BatchType>, ReadPipeline {
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/GenericColumnVectorProducer.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/GenericColumnVectorProducer.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/GenericColumnVectorProducer.java
index 15e9b6a..33df03c 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/GenericColumnVectorProducer.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/GenericColumnVectorProducer.java
@@ -48,14 +48,14 @@ import org.apache.hadoop.mapred.FileSplit;
import org.apache.hadoop.mapred.InputFormat;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.Reporter;
-import org.apache.orc.CompressionKind;
-import org.apache.orc.OrcProto;
-import org.apache.orc.OrcUtils;
-import org.apache.orc.OrcProto.ColumnEncoding;
-import org.apache.orc.OrcProto.RowIndex;
-import org.apache.orc.OrcProto.RowIndexEntry;
-import org.apache.orc.OrcProto.Type;
-import org.apache.orc.TypeDescription;
+import org.apache.hive.orc.CompressionKind;
+import org.apache.hive.orc.OrcProto;
+import org.apache.hive.orc.OrcUtils;
+import org.apache.hive.orc.OrcProto.ColumnEncoding;
+import org.apache.hive.orc.OrcProto.RowIndex;
+import org.apache.hive.orc.OrcProto.RowIndexEntry;
+import org.apache.hive.orc.OrcProto.Type;
+import org.apache.hive.orc.TypeDescription;
public class GenericColumnVectorProducer implements ColumnVectorProducer {
private final SerDeLowLevelCacheImpl cache;
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcColumnVectorProducer.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcColumnVectorProducer.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcColumnVectorProducer.java
index f82ede5..0a3d317 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcColumnVectorProducer.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcColumnVectorProducer.java
@@ -43,7 +43,7 @@ import org.apache.hadoop.mapred.FileSplit;
import org.apache.hadoop.mapred.InputFormat;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.Reporter;
-import org.apache.orc.TypeDescription;
+import org.apache.hive.orc.TypeDescription;
public class OrcColumnVectorProducer implements ColumnVectorProducer {
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcEncodedDataConsumer.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcEncodedDataConsumer.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcEncodedDataConsumer.java
index 9414202..169e6be 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcEncodedDataConsumer.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcEncodedDataConsumer.java
@@ -39,21 +39,21 @@ import org.apache.hadoop.hive.ql.exec.vector.StructColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.UnionColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
-import org.apache.orc.CompressionCodec;
-import org.apache.orc.impl.PositionProvider;
+import org.apache.hive.orc.CompressionCodec;
+import org.apache.hive.orc.impl.PositionProvider;
import org.apache.hadoop.hive.ql.io.orc.encoded.Consumer;
import org.apache.hadoop.hive.ql.io.orc.encoded.EncodedTreeReaderFactory;
import org.apache.hadoop.hive.ql.io.orc.encoded.EncodedTreeReaderFactory.SettableTreeReader;
import org.apache.hadoop.hive.ql.io.orc.encoded.OrcBatchKey;
import org.apache.hadoop.hive.ql.io.orc.encoded.Reader.OrcEncodedColumnBatch;
import org.apache.hadoop.hive.ql.io.orc.RecordReaderImpl;
-import org.apache.orc.OrcUtils;
-import org.apache.orc.TypeDescription;
-import org.apache.orc.impl.PhysicalFsWriter;
-import org.apache.orc.impl.TreeReaderFactory;
-import org.apache.orc.impl.TreeReaderFactory.StructTreeReader;
-import org.apache.orc.impl.TreeReaderFactory.TreeReader;
-import org.apache.orc.OrcProto;
+import org.apache.hive.orc.OrcUtils;
+import org.apache.hive.orc.TypeDescription;
+import org.apache.hive.orc.impl.PhysicalFsWriter;
+import org.apache.hive.orc.impl.TreeReaderFactory;
+import org.apache.hive.orc.impl.TreeReaderFactory.StructTreeReader;
+import org.apache.hive.orc.impl.TreeReaderFactory.TreeReader;
+import org.apache.hive.orc.OrcProto;
public class OrcEncodedDataConsumer
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/ReadPipeline.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/ReadPipeline.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/ReadPipeline.java
index 5489326..f3ff56d 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/ReadPipeline.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/ReadPipeline.java
@@ -21,7 +21,7 @@ import java.util.concurrent.Callable;
import org.apache.hadoop.hive.llap.ConsumerFeedback;
import org.apache.hadoop.hive.llap.io.api.impl.ColumnVectorBatch;
-import org.apache.orc.TypeDescription;
+import org.apache.hive.orc.TypeDescription;
public interface ReadPipeline extends ConsumerFeedback<ColumnVectorBatch> {
public Callable<Void> getReadCallable();
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java
index 1c295a3..4b05b6f 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java
@@ -26,11 +26,11 @@ import java.util.Collections;
import java.util.List;
import org.apache.hadoop.hive.llap.counters.LlapIOCounters;
-import org.apache.orc.OrcUtils;
-import org.apache.orc.TypeDescription;
-import org.apache.orc.impl.DataReaderProperties;
-import org.apache.orc.impl.OrcIndex;
-import org.apache.orc.impl.SchemaEvolution;
+import org.apache.hive.orc.OrcUtils;
+import org.apache.hive.orc.TypeDescription;
+import org.apache.hive.orc.impl.DataReaderProperties;
+import org.apache.hive.orc.impl.OrcIndex;
+import org.apache.hive.orc.impl.SchemaEvolution;
import org.apache.tez.common.counters.TezCounters;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -61,11 +61,11 @@ import org.apache.hadoop.hive.llap.io.metadata.OrcStripeMetadata;
import org.apache.hadoop.hive.ql.exec.DDLTask;
import org.apache.hadoop.hive.ql.io.AcidUtils;
import org.apache.hadoop.hive.ql.io.HdfsUtils;
-import org.apache.orc.CompressionKind;
-import org.apache.orc.DataReader;
+import org.apache.hive.orc.CompressionKind;
+import org.apache.hive.orc.DataReader;
import org.apache.hadoop.hive.ql.io.orc.OrcFile;
import org.apache.hadoop.hive.ql.io.orc.OrcFile.ReaderOptions;
-import org.apache.orc.OrcConf;
+import org.apache.hive.orc.OrcConf;
import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat;
import org.apache.hadoop.hive.ql.io.orc.OrcSplit;
import org.apache.hadoop.hive.ql.io.orc.encoded.Reader;
@@ -75,13 +75,13 @@ import org.apache.hadoop.hive.ql.io.orc.encoded.EncodedReader;
import org.apache.hadoop.hive.ql.io.orc.encoded.OrcBatchKey;
import org.apache.hadoop.hive.ql.io.orc.encoded.Reader.OrcEncodedColumnBatch;
import org.apache.hadoop.hive.ql.io.orc.encoded.Reader.PoolFactory;
-import org.apache.orc.impl.RecordReaderUtils;
-import org.apache.orc.StripeInformation;
+import org.apache.hive.orc.impl.RecordReaderUtils;
+import org.apache.hive.orc.StripeInformation;
import org.apache.hadoop.hive.ql.io.sarg.SearchArgument;
import org.apache.hadoop.mapred.FileSplit;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hive.common.util.FixedSizedObjectPool;
-import org.apache.orc.OrcProto;
+import org.apache.hive.orc.OrcProto;
import org.apache.tez.common.CallableWithNdc;
/**
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/SerDeEncodedDataReader.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/SerDeEncodedDataReader.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/SerDeEncodedDataReader.java
index a9d91b3..cd578f6 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/SerDeEncodedDataReader.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/SerDeEncodedDataReader.java
@@ -78,19 +78,19 @@ import org.apache.hadoop.mapred.SplitLocationInfo;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hive.common.util.FixedSizedObjectPool;
import org.apache.hive.common.util.Ref;
-import org.apache.orc.CompressionCodec;
-import org.apache.orc.CompressionKind;
-import org.apache.orc.OrcUtils;
-import org.apache.orc.OrcFile.EncodingStrategy;
-import org.apache.orc.OrcFile.Version;
-import org.apache.orc.OrcProto;
-import org.apache.orc.OrcProto.ColumnEncoding;
-import org.apache.orc.TypeDescription;
-import org.apache.orc.impl.MemoryManager;
-import org.apache.orc.impl.OutStream;
-import org.apache.orc.impl.OutStream.OutputReceiver;
-import org.apache.orc.impl.PhysicalWriter;
-import org.apache.orc.impl.StreamName;
+import org.apache.hive.orc.CompressionCodec;
+import org.apache.hive.orc.CompressionKind;
+import org.apache.hive.orc.OrcUtils;
+import org.apache.hive.orc.OrcFile.EncodingStrategy;
+import org.apache.hive.orc.OrcFile.Version;
+import org.apache.hive.orc.OrcProto;
+import org.apache.hive.orc.OrcProto.ColumnEncoding;
+import org.apache.hive.orc.TypeDescription;
+import org.apache.hive.orc.impl.MemoryManager;
+import org.apache.hive.orc.impl.OutStream;
+import org.apache.hive.orc.impl.OutStream.OutputReceiver;
+import org.apache.hive.orc.impl.PhysicalWriter;
+import org.apache.hive.orc.impl.StreamName;
import org.apache.tez.common.CallableWithNdc;
import org.apache.tez.common.counters.TezCounters;
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/ConsumerFileMetadata.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/ConsumerFileMetadata.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/ConsumerFileMetadata.java
index 040f1a7..f000678 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/ConsumerFileMetadata.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/ConsumerFileMetadata.java
@@ -19,9 +19,9 @@ package org.apache.hadoop.hive.llap.io.metadata;
import java.util.List;
-import org.apache.orc.CompressionKind;
-import org.apache.orc.OrcProto.Type;
-import org.apache.orc.TypeDescription;
+import org.apache.hive.orc.CompressionKind;
+import org.apache.hive.orc.OrcProto.Type;
+import org.apache.hive.orc.TypeDescription;
public interface ConsumerFileMetadata {
int getStripeCount();
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/ConsumerStripeMetadata.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/ConsumerStripeMetadata.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/ConsumerStripeMetadata.java
index 1e28f5f..87be05b 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/ConsumerStripeMetadata.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/ConsumerStripeMetadata.java
@@ -19,10 +19,10 @@ package org.apache.hadoop.hive.llap.io.metadata;
import java.util.List;
-import org.apache.orc.OrcProto;
-import org.apache.orc.OrcProto.ColumnEncoding;
-import org.apache.orc.OrcProto.RowIndex;
-import org.apache.orc.OrcProto.RowIndexEntry;
+import org.apache.hive.orc.OrcProto;
+import org.apache.hive.orc.OrcProto.ColumnEncoding;
+import org.apache.hive.orc.OrcProto.RowIndex;
+import org.apache.hive.orc.OrcProto.RowIndexEntry;
public interface ConsumerStripeMetadata {
int getStripeIx();
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/OrcFileMetadata.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/OrcFileMetadata.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/OrcFileMetadata.java
index 2c7a234..b4cf6a6 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/OrcFileMetadata.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/OrcFileMetadata.java
@@ -29,13 +29,13 @@ import org.apache.hadoop.hive.llap.cache.LlapCacheableBuffer;
import org.apache.hadoop.hive.ql.io.SyntheticFileId;
import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat;
import org.apache.hadoop.hive.ql.io.orc.Reader;
-import org.apache.orc.CompressionKind;
-import org.apache.orc.FileMetadata;
-import org.apache.orc.OrcProto;
-import org.apache.orc.OrcUtils;
-import org.apache.orc.StripeInformation;
-import org.apache.orc.TypeDescription;
-import org.apache.orc.impl.ReaderImpl;
+import org.apache.hive.orc.CompressionKind;
+import org.apache.hive.orc.FileMetadata;
+import org.apache.hive.orc.OrcProto;
+import org.apache.hive.orc.OrcUtils;
+import org.apache.hive.orc.StripeInformation;
+import org.apache.hive.orc.TypeDescription;
+import org.apache.hive.orc.impl.ReaderImpl;
/** ORC file metadata. Currently contains some duplicate info due to how different parts
* of ORC use different info. Ideally we would get rid of protobuf structs in code beyond reading,
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/OrcStripeMetadata.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/OrcStripeMetadata.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/OrcStripeMetadata.java
index 5ef1678..3c7268b 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/OrcStripeMetadata.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/OrcStripeMetadata.java
@@ -30,11 +30,11 @@ import org.apache.hadoop.hive.llap.cache.EvictionDispatcher;
import org.apache.hadoop.hive.llap.cache.LlapCacheableBuffer;
import org.apache.hadoop.hive.ql.io.SyntheticFileId;
import org.apache.hadoop.hive.ql.io.orc.encoded.OrcBatchKey;
-import org.apache.orc.DataReader;
-import org.apache.orc.OrcProto;
-import org.apache.orc.OrcProto.RowIndexEntry;
-import org.apache.orc.StripeInformation;
-import org.apache.orc.impl.OrcIndex;
+import org.apache.hive.orc.DataReader;
+import org.apache.hive.orc.OrcProto;
+import org.apache.hive.orc.OrcProto.RowIndexEntry;
+import org.apache.hive.orc.StripeInformation;
+import org.apache.hive.orc.impl.OrcIndex;
public class OrcStripeMetadata extends LlapCacheableBuffer implements ConsumerStripeMetadata {
private final OrcBatchKey stripeKey;
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestIncrementalObjectSizeEstimator.java
----------------------------------------------------------------------
diff --git a/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestIncrementalObjectSizeEstimator.java b/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestIncrementalObjectSizeEstimator.java
index 183fb1b..3c9d1e6 100644
--- a/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestIncrementalObjectSizeEstimator.java
+++ b/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestIncrementalObjectSizeEstimator.java
@@ -28,18 +28,18 @@ import java.util.ArrayList;
import java.util.LinkedHashSet;
import org.apache.hadoop.hive.common.io.DiskRangeList;
-import org.apache.orc.DataReader;
+import org.apache.hive.orc.DataReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.hive.llap.IncrementalObjectSizeEstimator;
import org.apache.hadoop.hive.llap.IncrementalObjectSizeEstimator.ObjectEstimator;
import org.apache.hadoop.hive.llap.io.metadata.OrcFileMetadata;
import org.apache.hadoop.hive.llap.io.metadata.OrcStripeMetadata;
-import org.apache.orc.impl.OrcIndex;
-import org.apache.orc.StripeInformation;
+import org.apache.hive.orc.impl.OrcIndex;
+import org.apache.hive.orc.StripeInformation;
import org.apache.hadoop.hive.ql.io.orc.encoded.OrcBatchKey;
import org.apache.hadoop.hive.ql.util.JavaDataModel;
-import org.apache.orc.OrcProto;
+import org.apache.hive.orc.OrcProto;
import org.junit.Test;
import org.mockito.Mockito;
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/orc/pom.xml
----------------------------------------------------------------------
diff --git a/orc/pom.xml b/orc/pom.xml
index f75b91c..de9c417 100644
--- a/orc/pom.xml
+++ b/orc/pom.xml
@@ -179,6 +179,38 @@
</execution>
</executions>
</plugin>
+
+ <!-- Below we shade all of the org.apache.orc class names in hive-orc
+ to a unique orc.apache.hive.orc prefix. This allows clients to
+ use both Hive 2.2 and the standalone orc project. The uses in
+ Hive 2.2 have been changed to use the org.apache.hive.orc prefix.
+ In Hive 2.3 and beyond, hive uses the standalone ORC project. -->
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-shade-plugin</artifactId>
+ <version>3.0.0</version>
+ <executions>
+ <execution>
+ <phase>package</phase>
+ <goals>
+ <goal>shade</goal>
+ </goals>
+ <configuration>
+ <artifactSet>
+ <includes>
+ <include>org.apache.hive:hive-orc</include>
+ </includes>
+ </artifactSet>
+ <relocations>
+ <relocation>
+ <pattern>org.apache.orc</pattern>
+ <shadedPattern>org.apache.hive.orc</shadedPattern>
+ </relocation>
+ </relocations>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
</plugins>
</build>
</project>
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/ql/src/java/org/apache/hadoop/hive/ql/exec/OrcFileMergeOperator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/OrcFileMergeOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/OrcFileMergeOperator.java
index e3cb765..d0455c1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/OrcFileMergeOperator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/OrcFileMergeOperator.java
@@ -26,7 +26,7 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.Path;
-import org.apache.orc.CompressionKind;
+import org.apache.hive.orc.CompressionKind;
import org.apache.hadoop.hive.ql.io.orc.OrcFile;
import org.apache.hadoop.hive.ql.io.orc.OrcFileKeyWrapper;
import org.apache.hadoop.hive.ql.io.orc.OrcFileValueWrapper;
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/ql/src/java/org/apache/hadoop/hive/ql/hooks/PostExecOrcFileDump.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/PostExecOrcFileDump.java b/ql/src/java/org/apache/hadoop/hive/ql/hooks/PostExecOrcFileDump.java
index b1595ce..4311339 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/PostExecOrcFileDump.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/PostExecOrcFileDump.java
@@ -30,8 +30,8 @@ import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.QueryPlan;
import org.apache.hadoop.hive.ql.exec.FetchTask;
-import org.apache.orc.FileFormatException;
-import org.apache.orc.tools.FileDump;
+import org.apache.hive.orc.FileFormatException;
+import org.apache.hive.orc.tools.FileDump;
import org.apache.hadoop.hive.ql.io.orc.OrcFile;
import org.apache.hadoop.hive.ql.plan.FetchWork;
import org.apache.hadoop.hive.ql.session.SessionState;
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/ql/src/java/org/apache/hadoop/hive/ql/io/orc/CompressionKind.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/CompressionKind.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/CompressionKind.java
index 22627df..26b4c8e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/CompressionKind.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/CompressionKind.java
@@ -24,18 +24,18 @@ package org.apache.hadoop.hive.ql.io.orc;
* migrate to the org.apache.orc package.
*/
public enum CompressionKind {
- NONE(org.apache.orc.CompressionKind.NONE),
- ZLIB(org.apache.orc.CompressionKind.ZLIB),
- SNAPPY(org.apache.orc.CompressionKind.SNAPPY),
- LZO(org.apache.orc.CompressionKind.LZO);
+ NONE(org.apache.hive.orc.CompressionKind.NONE),
+ ZLIB(org.apache.hive.orc.CompressionKind.ZLIB),
+ SNAPPY(org.apache.hive.orc.CompressionKind.SNAPPY),
+ LZO(org.apache.hive.orc.CompressionKind.LZO);
- CompressionKind(org.apache.orc.CompressionKind underlying) {
+ CompressionKind(org.apache.hive.orc.CompressionKind underlying) {
this.underlying = underlying;
}
- public org.apache.orc.CompressionKind getUnderlying() {
+ public org.apache.hive.orc.CompressionKind getUnderlying() {
return underlying;
}
- private final org.apache.orc.CompressionKind underlying;
+ private final org.apache.hive.orc.CompressionKind underlying;
}
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ExternalCache.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ExternalCache.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ExternalCache.java
index 9299306..86769a8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ExternalCache.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ExternalCache.java
@@ -43,7 +43,7 @@ import org.apache.hadoop.hive.ql.io.sarg.SearchArgument;
import org.apache.hadoop.hive.ql.io.sarg.SearchArgumentFactory;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.shims.HadoopShims.HdfsFileStatusWithId;
-import org.apache.orc.impl.OrcTail;
+import org.apache.hive.orc.impl.OrcTail;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/ql/src/java/org/apache/hadoop/hive/ql/io/orc/LocalCache.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/LocalCache.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/LocalCache.java
index 88b65dc..9a9eff1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/LocalCache.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/LocalCache.java
@@ -26,7 +26,7 @@ import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.shims.HadoopShims;
-import org.apache.orc.impl.OrcTail;
+import org.apache.hive.orc.impl.OrcTail;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFile.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFile.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFile.java
index 5366020..521c575 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFile.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFile.java
@@ -27,15 +27,15 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
-import org.apache.orc.FileMetadata;
-import org.apache.orc.impl.MemoryManager;
-import org.apache.orc.TypeDescription;
-import org.apache.orc.impl.OrcTail;
+import org.apache.hive.orc.FileMetadata;
+import org.apache.hive.orc.impl.MemoryManager;
+import org.apache.hive.orc.TypeDescription;
+import org.apache.hive.orc.impl.OrcTail;
/**
* Contains factory methods to read or write ORC files.
*/
-public final class OrcFile extends org.apache.orc.OrcFile {
+public final class OrcFile extends org.apache.hive.orc.OrcFile {
// unused
protected OrcFile() {}
@@ -54,7 +54,7 @@ public final class OrcFile extends org.apache.orc.OrcFile {
return new ReaderImpl(path, opts);
}
- public static class ReaderOptions extends org.apache.orc.OrcFile.ReaderOptions {
+ public static class ReaderOptions extends org.apache.hive.orc.OrcFile.ReaderOptions {
public ReaderOptions(Configuration conf) {
super(conf);
}
@@ -92,7 +92,7 @@ public final class OrcFile extends org.apache.orc.OrcFile {
/**
* Options for creating ORC file writers.
*/
- public static class WriterOptions extends org.apache.orc.OrcFile.WriterOptions {
+ public static class WriterOptions extends org.apache.hive.orc.OrcFile.WriterOptions {
private boolean explicitSchema = false;
private ObjectInspector inspector = null;
// Setting the default batch size to 1000 makes the memory check at 5000
@@ -231,7 +231,7 @@ public final class OrcFile extends org.apache.orc.OrcFile {
/**
* Sets the generic compression that is used to compress the data.
*/
- public WriterOptions compress(org.apache.orc.CompressionKind value) {
+ public WriterOptions compress(org.apache.hive.orc.CompressionKind value) {
super.compress(value);
return this;
}
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFileFormatProxy.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFileFormatProxy.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFileFormatProxy.java
index 6d9f653..7b1c41f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFileFormatProxy.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFileFormatProxy.java
@@ -27,10 +27,10 @@ import org.apache.hadoop.hive.metastore.FileFormatProxy;
import org.apache.hadoop.hive.metastore.Metastore.SplitInfo;
import org.apache.hadoop.hive.metastore.Metastore.SplitInfos;
import org.apache.hadoop.hive.ql.io.sarg.SearchArgument;
-import org.apache.orc.OrcProto;
-import org.apache.orc.StripeInformation;
-import org.apache.orc.StripeStatistics;
-import org.apache.orc.impl.OrcTail;
+import org.apache.hive.orc.OrcProto;
+import org.apache.hive.orc.StripeInformation;
+import org.apache.hive.orc.StripeStatistics;
+import org.apache.hive.orc.impl.OrcTail;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFileKeyWrapper.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFileKeyWrapper.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFileKeyWrapper.java
index 40f1da0..d6ff52c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFileKeyWrapper.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFileKeyWrapper.java
@@ -25,8 +25,8 @@ import java.util.List;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.WritableComparable;
-import org.apache.orc.CompressionKind;
-import org.apache.orc.OrcProto;
+import org.apache.hive.orc.CompressionKind;
+import org.apache.hive.orc.OrcProto;
/**
* Key for OrcFileMergeMapper task. Contains orc file related information that
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFileStripeMergeRecordReader.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFileStripeMergeRecordReader.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFileStripeMergeRecordReader.java
index f06195f..334f311 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFileStripeMergeRecordReader.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFileStripeMergeRecordReader.java
@@ -27,8 +27,8 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.FileSplit;
import org.apache.hadoop.mapred.RecordReader;
-import org.apache.orc.OrcProto;
-import org.apache.orc.StripeInformation;
+import org.apache.hive.orc.OrcProto;
+import org.apache.hive.orc.StripeInformation;
public class OrcFileStripeMergeRecordReader implements
RecordReader<OrcFileKeyWrapper, OrcFileValueWrapper> {
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFileValueWrapper.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFileValueWrapper.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFileValueWrapper.java
index 846c874..6a61729 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFileValueWrapper.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFileValueWrapper.java
@@ -24,8 +24,8 @@ import java.io.IOException;
import java.util.List;
import org.apache.hadoop.io.WritableComparable;
-import org.apache.orc.OrcProto;
-import org.apache.orc.StripeInformation;
+import org.apache.hive.orc.OrcProto;
+import org.apache.hive.orc.StripeInformation;
/**
* Value for OrcFileMergeMapper. Contains stripe related information for the
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java
index 56eeb58..fc7bbe4 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java
@@ -18,10 +18,10 @@
package org.apache.hadoop.hive.ql.io.orc;
-import org.apache.orc.impl.InStream;
-import org.apache.orc.impl.SchemaEvolution;
+import org.apache.hive.orc.impl.InStream;
+import org.apache.hive.orc.impl.SchemaEvolution;
+
-
import java.io.IOException;
import java.nio.ByteBuffer;
import java.security.PrivilegedExceptionAction;
@@ -54,12 +54,12 @@ import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo;
-import org.apache.orc.ColumnStatistics;
-import org.apache.orc.OrcUtils;
-import org.apache.orc.StripeInformation;
-import org.apache.orc.StripeStatistics;
-import org.apache.orc.TypeDescription;
-import org.apache.orc.impl.OrcTail;
+import org.apache.hive.orc.ColumnStatistics;
+import org.apache.hive.orc.OrcUtils;
+import org.apache.hive.orc.StripeInformation;
+import org.apache.hive.orc.StripeStatistics;
+import org.apache.hive.orc.TypeDescription;
+import org.apache.hive.orc.impl.OrcTail;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
@@ -115,8 +115,8 @@ import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.StringUtils;
import org.apache.hive.common.util.Ref;
-import org.apache.orc.ColumnStatistics;
-import org.apache.orc.OrcProto;
+import org.apache.hive.orc.ColumnStatistics;
+import org.apache.hive.orc.OrcProto;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Lists;
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcNewInputFormat.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcNewInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcNewInputFormat.java
index 3b05fb2..f2cf0b9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcNewInputFormat.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcNewInputFormat.java
@@ -35,7 +35,7 @@ import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
-import org.apache.orc.OrcProto;
+import org.apache.hive.orc.OrcProto;
/** An InputFormat for ORC files. Keys are meaningless,
* value is the OrcStruct object */
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcNewSplit.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcNewSplit.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcNewSplit.java
index 0c85827..a317262 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcNewSplit.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcNewSplit.java
@@ -26,8 +26,8 @@ import java.util.List;
import org.apache.hadoop.hive.ql.io.AcidInputFormat;
import org.apache.hadoop.io.WritableUtils;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
-import org.apache.orc.OrcProto;
-import org.apache.orc.impl.OrcTail;
+import org.apache.hive.orc.OrcProto;
+import org.apache.hive.orc.impl.OrcTail;
/**
* OrcFileSplit. Holds file meta info
@@ -46,7 +46,7 @@ public class OrcNewSplit extends FileSplit {
// so just pass nulls to this super constructor.
super(null, 0, 0, null);
}
-
+
public OrcNewSplit(OrcSplit inner) throws IOException {
super(inner.getPath(), inner.getStart(), inner.getLength(),
inner.getLocations());
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcOutputFormat.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcOutputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcOutputFormat.java
index b0f8c8b..4dfa791 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcOutputFormat.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcOutputFormat.java
@@ -24,8 +24,8 @@ import java.util.Arrays;
import java.util.List;
import java.util.Properties;
-import org.apache.orc.CompressionKind;
-import org.apache.orc.TypeDescription;
+import org.apache.hive.orc.CompressionKind;
+import org.apache.hive.orc.TypeDescription;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.fs.FileSystem;
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java
index fb7a6b2..0803df1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java
@@ -23,11 +23,11 @@ import java.util.List;
import java.util.Map;
import java.util.TreeMap;
-import org.apache.orc.OrcUtils;
-import org.apache.orc.StripeInformation;
-import org.apache.orc.TypeDescription;
-import org.apache.orc.impl.AcidStats;
-import org.apache.orc.impl.OrcAcidUtils;
+import org.apache.hive.orc.OrcUtils;
+import org.apache.hive.orc.StripeInformation;
+import org.apache.hive.orc.TypeDescription;
+import org.apache.hive.orc.impl.AcidStats;
+import org.apache.hive.orc.impl.OrcAcidUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRecordUpdater.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRecordUpdater.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRecordUpdater.java
index 1a1af28..758ae95 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRecordUpdater.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRecordUpdater.java
@@ -25,9 +25,9 @@ import java.nio.charset.CharsetDecoder;
import java.util.ArrayList;
import java.util.List;
-import org.apache.orc.impl.AcidStats;
-import org.apache.orc.impl.OrcAcidUtils;
-import org.apache.orc.OrcConf;
+import org.apache.hive.orc.impl.AcidStats;
+import org.apache.hive.orc.impl.OrcAcidUtils;
+import org.apache.hive.orc.OrcConf;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
@@ -284,7 +284,7 @@ public class OrcRecordUpdater implements RecordUpdater {
return acidStats.inserts;
}
}
- //if we got here, we looked at all delta files in this txn, prior to current statement and didn't
+ //if we got here, we looked at all delta files in this txn, prior to current statement and didn't
//find any inserts...
return 0;
}
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSerde.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSerde.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSerde.java
index 59876e2..e3a6f05 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSerde.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSerde.java
@@ -23,7 +23,7 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.Properties;
-import org.apache.orc.OrcConf;
+import org.apache.hive.orc.OrcConf;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSplit.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSplit.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSplit.java
index d61b24b..f3acbb1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSplit.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSplit.java
@@ -34,8 +34,8 @@ import org.apache.hadoop.hive.ql.io.SyntheticFileId;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableUtils;
import org.apache.hadoop.mapred.FileSplit;
-import org.apache.orc.OrcProto;
-import org.apache.orc.impl.OrcTail;
+import org.apache.hive.orc.OrcProto;
+import org.apache.hive.orc.impl.OrcTail;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcStruct.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcStruct.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcStruct.java
index d48cadd..7b9c569 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcStruct.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcStruct.java
@@ -34,7 +34,7 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo;
import org.apache.hadoop.io.Writable;
-import org.apache.orc.OrcProto;
+import org.apache.hive.orc.OrcProto;
import java.io.DataInput;
import java.io.DataOutput;
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcUnion.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcUnion.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcUnion.java
index e36a744..936f171 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcUnion.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcUnion.java
@@ -22,7 +22,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.UnionObject;
import org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo;
-import org.apache.orc.OrcProto;
+import org.apache.hive.orc.OrcProto;
import java.util.ArrayList;
import java.util.List;
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/ql/src/java/org/apache/hadoop/hive/ql/io/orc/Reader.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/Reader.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/Reader.java
index 8823e21..7439364 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/Reader.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/Reader.java
@@ -28,7 +28,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
*
* One Reader can support multiple concurrent RecordReader.
*/
-public interface Reader extends org.apache.orc.Reader {
+public interface Reader extends org.apache.hive.orc.Reader {
/**
* Get the object inspector for looking at the objects.
@@ -55,7 +55,7 @@ public interface Reader extends org.apache.orc.Reader {
* @throws IOException
*/
RecordReader rowsOptions(Options options) throws IOException;
-
+
/**
* Create a RecordReader that will scan the entire file.
* This is a legacy method and rowsOptions is preferred.
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ReaderImpl.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ReaderImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ReaderImpl.java
index cbbbb15..9295480 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ReaderImpl.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ReaderImpl.java
@@ -26,11 +26,11 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.ql.io.sarg.SearchArgument;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.orc.TypeDescription;
+import org.apache.hive.orc.TypeDescription;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-public class ReaderImpl extends org.apache.orc.impl.ReaderImpl
+public class ReaderImpl extends org.apache.hive.orc.impl.ReaderImpl
implements Reader {
private static final Logger LOG = LoggerFactory.getLogger(ReaderImpl.class);
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReader.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReader.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReader.java
index ff5612d..e3945a6 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReader.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReader.java
@@ -24,7 +24,7 @@ import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
/**
* A row-by-row iterator for ORC files.
*/
-public interface RecordReader extends org.apache.orc.RecordReader {
+public interface RecordReader extends org.apache.hive.orc.RecordReader {
/**
* Does the reader have more rows available.
* @return true if there are more rows
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java
index e46ca51..cd19beb 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java
@@ -46,12 +46,12 @@ import org.apache.hadoop.io.FloatWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
-import org.apache.orc.TypeDescription;
+import org.apache.hive.orc.TypeDescription;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
-public class RecordReaderImpl extends org.apache.orc.impl.RecordReaderImpl
+public class RecordReaderImpl extends org.apache.hive.orc.impl.RecordReaderImpl
implements RecordReader {
static final Logger LOG = LoggerFactory.getLogger(RecordReaderImpl.class);
private final VectorizedRowBatch batch;
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/ql/src/java/org/apache/hadoop/hive/ql/io/orc/VectorizedOrcInputFormat.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/VectorizedOrcInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/VectorizedOrcInputFormat.java
index f7388a4..fcf7e09 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/VectorizedOrcInputFormat.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/VectorizedOrcInputFormat.java
@@ -42,9 +42,9 @@ import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.RecordReader;
import org.apache.hadoop.mapred.Reporter;
-import org.apache.orc.OrcProto;
-import org.apache.orc.OrcUtils;
-import org.apache.orc.TypeDescription;
+import org.apache.hive.orc.OrcProto;
+import org.apache.hive.orc.OrcUtils;
+import org.apache.hive.orc.TypeDescription;
/**
* A MapReduce/Hive input format for ORC files.
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/ql/src/java/org/apache/hadoop/hive/ql/io/orc/Writer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/Writer.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/Writer.java
index 92f5ab8..a4c1bf1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/Writer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/Writer.java
@@ -23,7 +23,7 @@ import java.io.IOException;
/**
* The HIVE interface for writing ORC files.
*/
-public interface Writer extends org.apache.orc.Writer {
+public interface Writer extends org.apache.hive.orc.Writer {
/**
* Add a row to the end of the ORC file.
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java
index 3e4ec2e..b4a167a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java
@@ -60,7 +60,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspe
import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.Text;
-import org.apache.orc.impl.PhysicalWriter;
+import org.apache.hive.orc.impl.PhysicalWriter;
import com.google.common.annotations.VisibleForTesting;
@@ -75,14 +75,14 @@ import com.google.common.annotations.VisibleForTesting;
*
* This class is unsynchronized like most Stream objects, so from the creation of an OrcFile and all
* access to a single instance has to be from a single thread.
- *
+ *
* There are no known cases where these happen between different threads today.
- *
+ *
* Caveat: the MemoryManager is created during WriterOptions create, that has to be confined to a single
* thread as well.
- *
+ *
*/
-public class WriterImpl extends org.apache.orc.impl.WriterImpl implements Writer {
+public class WriterImpl extends org.apache.hive.orc.impl.WriterImpl implements Writer {
private final ObjectInspector inspector;
private final VectorizedRowBatch internalBatch;
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedReader.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedReader.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedReader.java
index ea9904a..9bbc6d8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedReader.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedReader.java
@@ -20,9 +20,9 @@ package org.apache.hadoop.hive.ql.io.orc.encoded;
import java.io.IOException;
import java.util.List;
-import org.apache.orc.StripeInformation;
+import org.apache.hive.orc.StripeInformation;
import org.apache.hadoop.hive.ql.io.orc.encoded.Reader.OrcEncodedColumnBatch;
-import org.apache.orc.OrcProto;
+import org.apache.hive.orc.OrcProto;
public interface EncodedReader {
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedReaderImpl.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedReaderImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedReaderImpl.java
index 3e26d9a..5aaedce 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedReaderImpl.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedReaderImpl.java
@@ -35,18 +35,18 @@ import org.apache.hadoop.hive.common.io.DataCache.DiskRangeListFactory;
import org.apache.hadoop.hive.common.io.DiskRangeList.CreateHelper;
import org.apache.hadoop.hive.common.io.encoded.EncodedColumnBatch.ColumnStreamData;
import org.apache.hadoop.hive.common.io.encoded.MemoryBuffer;
-import org.apache.orc.CompressionCodec;
-import org.apache.orc.DataReader;
-import org.apache.orc.OrcConf;
-import org.apache.orc.impl.OutStream;
-import org.apache.orc.impl.RecordReaderUtils;
-import org.apache.orc.impl.StreamName;
-import org.apache.orc.StripeInformation;
-import org.apache.orc.impl.BufferChunk;
+import org.apache.hive.orc.CompressionCodec;
+import org.apache.hive.orc.DataReader;
+import org.apache.hive.orc.OrcConf;
+import org.apache.hive.orc.impl.OutStream;
+import org.apache.hive.orc.impl.RecordReaderUtils;
+import org.apache.hive.orc.impl.StreamName;
+import org.apache.hive.orc.StripeInformation;
+import org.apache.hive.orc.impl.BufferChunk;
import org.apache.hadoop.hive.llap.DebugUtils;
import org.apache.hadoop.hive.ql.io.orc.encoded.Reader.OrcEncodedColumnBatch;
import org.apache.hadoop.hive.ql.io.orc.encoded.Reader.PoolFactory;
-import org.apache.orc.OrcProto;
+import org.apache.hive.orc.OrcProto;
/**
* Encoded reader implementation.
@@ -374,7 +374,7 @@ class EncodedReaderImpl implements EncodedReader {
if (sctx.stripeLevelStream == null) {
sctx.stripeLevelStream = POOLS.csdPool.take();
// We will be using this for each RG while also sending RGs to processing.
- // To avoid buffers being unlocked, run refcount one ahead; so each RG
+ // To avoid buffers being unlocked, run refcount one ahead; so each RG
// processing will decref once, and the
// last one will unlock the buffers.
sctx.stripeLevelStream.incRef();
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedTreeReaderFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedTreeReaderFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedTreeReaderFactory.java
index a6b11c9..117923d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedTreeReaderFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedTreeReaderFactory.java
@@ -17,7 +17,7 @@
*/
package org.apache.hadoop.hive.ql.io.orc.encoded;
-import org.apache.orc.impl.RunLengthByteReader;
+import org.apache.hive.orc.impl.RunLengthByteReader;
import java.io.IOException;
import java.util.List;
@@ -36,13 +36,13 @@ import org.apache.hadoop.hive.ql.exec.vector.StructColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.UnionColumnVector;
import org.apache.hadoop.hive.ql.io.orc.encoded.Reader.OrcEncodedColumnBatch;
-import org.apache.orc.CompressionCodec;
-import org.apache.orc.TypeDescription;
-import org.apache.orc.TypeDescription.Category;
-import org.apache.orc.impl.PositionProvider;
-import org.apache.orc.impl.SettableUncompressedStream;
-import org.apache.orc.impl.TreeReaderFactory;
-import org.apache.orc.OrcProto;
+import org.apache.hive.orc.CompressionCodec;
+import org.apache.hive.orc.TypeDescription;
+import org.apache.hive.orc.TypeDescription.Category;
+import org.apache.hive.orc.impl.PositionProvider;
+import org.apache.hive.orc.impl.SettableUncompressedStream;
+import org.apache.hive.orc.impl.TreeReaderFactory;
+import org.apache.hive.orc.OrcProto;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/Reader.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/Reader.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/Reader.java
index 31b0609..2731065 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/Reader.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/Reader.java
@@ -28,8 +28,8 @@ import org.apache.hadoop.hive.common.io.DataCache;
import org.apache.hadoop.hive.common.io.encoded.EncodedColumnBatch;
import org.apache.hadoop.hive.common.io.encoded.EncodedColumnBatch.ColumnStreamData;
import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
-import org.apache.orc.DataReader;
-import org.apache.orc.OrcProto;
+import org.apache.hive.orc.DataReader;
+import org.apache.hive.orc.OrcProto;
/**
* The interface for reading encoded data from ORC files.
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/ReaderImpl.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/ReaderImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/ReaderImpl.java
index 4856fb3..8893d9d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/ReaderImpl.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/ReaderImpl.java
@@ -22,7 +22,7 @@ import java.io.IOException;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.common.io.DataCache;
-import org.apache.orc.DataReader;
+import org.apache.hive.orc.DataReader;
import org.apache.hadoop.hive.ql.io.orc.OrcFile.ReaderOptions;
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/StreamUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/StreamUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/StreamUtils.java
index 35be661..bec6c9a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/StreamUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/StreamUtils.java
@@ -24,8 +24,8 @@ import org.apache.hadoop.hive.common.DiskRangeInfo;
import org.apache.hadoop.hive.common.io.DiskRange;
import org.apache.hadoop.hive.common.io.encoded.EncodedColumnBatch.ColumnStreamData;
import org.apache.hadoop.hive.common.io.encoded.MemoryBuffer;
-import org.apache.orc.impl.SettableUncompressedStream;
-import org.apache.orc.impl.BufferChunk;
+import org.apache.hive.orc.impl.SettableUncompressedStream;
+import org.apache.hive.orc.impl.BufferChunk;
import com.google.common.collect.Lists;
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java
index 7cd62f8..27bd934 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java
@@ -121,9 +121,9 @@ import org.apache.hadoop.mapred.RecordWriter;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.Progressable;
-import org.apache.orc.OrcProto;
-import org.apache.orc.TypeDescription;
-import org.apache.orc.impl.PhysicalFsWriter;
+import org.apache.hive.orc.OrcProto;
+import org.apache.hive.orc.TypeDescription;
+import org.apache.hive.orc.impl.PhysicalFsWriter;
import org.junit.Before;
import org.junit.Rule;
@@ -3570,7 +3570,7 @@ public class TestInputOutputFormat {
OrcFile.writerOptions(conf)
.fileSystem(fs)
.setSchema(fileSchema)
- .compress(org.apache.orc.CompressionKind.NONE));
+ .compress(org.apache.hive.orc.CompressionKind.NONE));
VectorizedRowBatch batch = fileSchema.createRowBatch(1000);
batch.size = 1000;
LongColumnVector lcv = ((LongColumnVector) ((StructColumnVector) batch.cols[1]).fields[0]);
@@ -3648,7 +3648,7 @@ public class TestInputOutputFormat {
OrcFile.writerOptions(conf)
.fileSystem(fs)
.setSchema(fileSchema)
- .compress(org.apache.orc.CompressionKind.NONE));
+ .compress(org.apache.hive.orc.CompressionKind.NONE));
VectorizedRowBatch batch = fileSchema.createRowBatch(1000);
batch.size = 1000;
StructColumnVector scv = (StructColumnVector)batch.cols[5];
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java
index f9949df..ba05528 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java
@@ -79,20 +79,20 @@ import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hive.common.util.HiveTestUtils;
-import org.apache.orc.BinaryColumnStatistics;
-import org.apache.orc.BooleanColumnStatistics;
-import org.apache.orc.ColumnStatistics;
-import org.apache.orc.DecimalColumnStatistics;
-import org.apache.orc.DoubleColumnStatistics;
-import org.apache.orc.IntegerColumnStatistics;
-import org.apache.orc.impl.MemoryManager;
-import org.apache.orc.OrcProto;
-
-import org.apache.orc.OrcUtils;
-import org.apache.orc.StringColumnStatistics;
-import org.apache.orc.StripeInformation;
-import org.apache.orc.StripeStatistics;
-import org.apache.orc.TypeDescription;
+import org.apache.hive.orc.BinaryColumnStatistics;
+import org.apache.hive.orc.BooleanColumnStatistics;
+import org.apache.hive.orc.ColumnStatistics;
+import org.apache.hive.orc.DecimalColumnStatistics;
+import org.apache.hive.orc.DoubleColumnStatistics;
+import org.apache.hive.orc.IntegerColumnStatistics;
+import org.apache.hive.orc.impl.MemoryManager;
+import org.apache.hive.orc.OrcProto;
+
+import org.apache.hive.orc.OrcUtils;
+import org.apache.hive.orc.StringColumnStatistics;
+import org.apache.hive.orc.StripeInformation;
+import org.apache.hive.orc.StripeStatistics;
+import org.apache.hive.orc.TypeDescription;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcRawRecordMerger.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcRawRecordMerger.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcRawRecordMerger.java
index f07aa49..89ca282 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcRawRecordMerger.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcRawRecordMerger.java
@@ -18,9 +18,9 @@
package org.apache.hadoop.hive.ql.io.orc;
-import org.apache.orc.CompressionKind;
-import org.apache.orc.impl.MemoryManager;
-import org.apache.orc.StripeInformation;
+import org.apache.hive.orc.CompressionKind;
+import org.apache.hive.orc.impl.MemoryManager;
+import org.apache.hive.orc.StripeInformation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
@@ -51,7 +51,7 @@ import org.apache.hadoop.mapred.InputFormat;
import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.Reporter;
-import org.apache.orc.OrcProto;
+import org.apache.hive.orc.OrcProto;
import org.junit.Test;
import org.mockito.MockSettings;
import org.mockito.Mockito;
@@ -1116,7 +1116,7 @@ public class TestOrcRawRecordMerger {
testRecordReaderIncompleteDelta(true);
}
/**
- *
+ *
* @param use130Format true means use delta_0001_0001_0000 format, else delta_0001_00001
*/
private void testRecordReaderIncompleteDelta(boolean use130Format) throws Exception {
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcRecordUpdater.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcRecordUpdater.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcRecordUpdater.java
index 67c473e..be6bd8b 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcRecordUpdater.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcRecordUpdater.java
@@ -41,8 +41,8 @@ import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.Reporter;
-import org.apache.orc.impl.OrcAcidUtils;
-import org.apache.orc.tools.FileDump;
+import org.apache.hive.orc.impl.OrcAcidUtils;
+import org.apache.hive.orc.tools.FileDump;
import org.junit.Test;
public class TestOrcRecordUpdater {
http://git-wip-us.apache.org/repos/asf/hive/blob/fd1188a6/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcSerDeStats.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcSerDeStats.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcSerDeStats.java
index 575a948..3d9e00d 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcSerDeStats.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcSerDeStats.java
@@ -43,13 +43,13 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspe
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.Text;
import org.apache.hive.common.util.HiveTestUtils;
-import org.apache.orc.BinaryColumnStatistics;
-import org.apache.orc.BooleanColumnStatistics;
-import org.apache.orc.ColumnStatistics;
-import org.apache.orc.DoubleColumnStatistics;
-import org.apache.orc.IntegerColumnStatistics;
-import org.apache.orc.StringColumnStatistics;
-import org.apache.orc.StripeInformation;
+import org.apache.hive.orc.BinaryColumnStatistics;
+import org.apache.hive.orc.BooleanColumnStatistics;
+import org.apache.hive.orc.ColumnStatistics;
+import org.apache.hive.orc.DoubleColumnStatistics;
+import org.apache.hive.orc.IntegerColumnStatistics;
+import org.apache.hive.orc.StringColumnStatistics;
+import org.apache.hive.orc.StripeInformation;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;