You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by om...@apache.org on 2015/12/12 00:28:13 UTC

[16/16] hive git commit: HIVE-11890. Create ORC submodue. (omalley reviewed by prasanthj)

HIVE-11890. Create ORC submodue. (omalley reviewed by prasanthj)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/9c7a78ee
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/9c7a78ee
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/9c7a78ee

Branch: refs/heads/master
Commit: 9c7a78ee36d43637237bc9af2df6dbd108f5c43e
Parents: 3e3d966
Author: Owen O'Malley <om...@apache.org>
Authored: Mon Nov 2 09:00:33 2015 -0800
Committer: Owen O'Malley <om...@apache.org>
Committed: Fri Dec 11 15:27:05 2015 -0800

----------------------------------------------------------------------
 common/pom.xml                                  |     5 +
 .../hadoop/hive/common/DiskRangeInfo.java       |    59 -
 .../hive/hcatalog/mapreduce/SpecialCases.java   |     2 +-
 .../llap/io/decode/OrcEncodedDataConsumer.java  |     4 +-
 .../llap/io/encoded/OrcEncodedDataReader.java   |    12 +-
 .../hive/llap/io/metadata/OrcFileMetadata.java  |    41 +-
 .../llap/io/metadata/OrcStripeMetadata.java     |    46 +-
 .../TestIncrementalObjectSizeEstimator.java     |    91 +-
 orc/pom.xml                                     |   136 +
 .../protobuf-java/org/apache/orc/OrcProto.java  | 19279 ++++++++++++++++
 .../org/apache/orc/BinaryColumnStatistics.java  |    27 +
 .../org/apache/orc/BooleanColumnStatistics.java |    29 +
 .../java/org/apache/orc/ColumnStatistics.java   |    36 +
 .../java/org/apache/orc/CompressionCodec.java   |    69 +
 .../java/org/apache/orc/CompressionKind.java    |    27 +
 orc/src/java/org/apache/orc/DataReader.java     |    58 +
 .../org/apache/orc/DateColumnStatistics.java    |    39 +
 .../org/apache/orc/DecimalColumnStatistics.java |    46 +
 .../org/apache/orc/DoubleColumnStatistics.java  |    46 +
 orc/src/java/org/apache/orc/FileMetaInfo.java   |    64 +
 orc/src/java/org/apache/orc/FileMetadata.java   |    64 +
 .../org/apache/orc/IntegerColumnStatistics.java |    52 +
 orc/src/java/org/apache/orc/OrcConf.java        |   191 +
 orc/src/java/org/apache/orc/OrcFile.java        |   514 +
 orc/src/java/org/apache/orc/OrcUtils.java       |   452 +
 orc/src/java/org/apache/orc/Reader.java         |   363 +
 orc/src/java/org/apache/orc/RecordReader.java   |    66 +
 .../org/apache/orc/StringColumnStatistics.java  |    43 +
 .../java/org/apache/orc/StripeInformation.java  |    59 +
 .../java/org/apache/orc/StripeStatistics.java   |    44 +
 .../apache/orc/TimestampColumnStatistics.java   |    38 +
 .../java/org/apache/orc/TypeDescription.java    |   540 +
 orc/src/java/org/apache/orc/Writer.java         |   114 +
 .../org/apache/orc/impl/BitFieldReader.java     |   216 +
 .../org/apache/orc/impl/BitFieldWriter.java     |    73 +
 .../java/org/apache/orc/impl/BufferChunk.java   |    85 +
 .../apache/orc/impl/ColumnStatisticsImpl.java   |  1097 +
 .../orc/impl/DirectDecompressionCodec.java      |    28 +
 .../org/apache/orc/impl/DynamicByteArray.java   |   303 +
 .../org/apache/orc/impl/DynamicIntArray.java    |   142 +
 .../java/org/apache/orc/impl/HadoopShims.java   |    64 +
 .../org/apache/orc/impl/HadoopShimsCurrent.java |    62 +
 .../org/apache/orc/impl/HadoopShims_2_2.java    |    36 +
 orc/src/java/org/apache/orc/impl/InStream.java  |   496 +
 .../java/org/apache/orc/impl/IntegerReader.java |    67 +
 .../java/org/apache/orc/impl/IntegerWriter.java |    47 +
 .../java/org/apache/orc/impl/MemoryManager.java |   214 +
 .../org/apache/orc/impl/MetadataReader.java     |    34 +
 .../org/apache/orc/impl/MetadataReaderImpl.java |   125 +
 orc/src/java/org/apache/orc/impl/OrcIndex.java  |    43 +
 orc/src/java/org/apache/orc/impl/OutStream.java |   289 +
 .../org/apache/orc/impl/PositionProvider.java   |    26 +
 .../org/apache/orc/impl/PositionRecorder.java   |    25 +
 .../apache/orc/impl/PositionedOutputStream.java |    39 +
 .../java/org/apache/orc/impl/RedBlackTree.java  |   311 +
 .../apache/orc/impl/RunLengthByteReader.java    |   150 +
 .../apache/orc/impl/RunLengthByteWriter.java    |   106 +
 .../apache/orc/impl/RunLengthIntegerReader.java |   157 +
 .../orc/impl/RunLengthIntegerReaderV2.java      |   390 +
 .../apache/orc/impl/RunLengthIntegerWriter.java |   143 +
 .../orc/impl/RunLengthIntegerWriterV2.java      |   831 +
 .../org/apache/orc/impl/SerializationUtils.java |  1297 ++
 .../orc/impl/SettableUncompressedStream.java    |    44 +
 .../java/org/apache/orc/impl/SnappyCodec.java   |   108 +
 .../java/org/apache/orc/impl/StreamName.java    |    97 +
 .../org/apache/orc/impl/StringRedBlackTree.java |   210 +
 orc/src/java/org/apache/orc/impl/ZlibCodec.java |   169 +
 orc/src/protobuf/orc_proto.proto                |   220 +
 .../org/apache/orc/impl/TestBitFieldReader.java |   145 +
 .../test/org/apache/orc/impl/TestBitPack.java   |   279 +
 .../org/apache/orc/impl/TestDynamicArray.java   |    90 +
 .../test/org/apache/orc/impl/TestInStream.java  |   314 +
 .../orc/impl/TestIntegerCompressionReader.java  |   130 +
 .../org/apache/orc/impl/TestMemoryManager.java  |   133 +
 .../orc/impl/TestRunLengthByteReader.java       |   143 +
 .../orc/impl/TestRunLengthIntegerReader.java    |   125 +
 .../apache/orc/impl/TestSerializationUtils.java |   164 +
 .../apache/orc/impl/TestStringRedBlackTree.java |   234 +
 orc/src/test/org/apache/orc/impl/TestZlib.java  |    56 +
 pom.xml                                         |     1 +
 ql/pom.xml                                      |    54 +-
 .../apache/hadoop/hive/ql/io/orc/OrcProto.java  | 19280 -----------------
 .../hive/ql/exec/OrcFileMergeOperator.java      |     4 +-
 .../hive/ql/io/filters/BloomFilterIO.java       |     2 +-
 .../hive/ql/io/orc/BinaryColumnStatistics.java  |    25 -
 .../hadoop/hive/ql/io/orc/BitFieldReader.java   |   212 -
 .../hadoop/hive/ql/io/orc/BitFieldWriter.java   |    69 -
 .../hive/ql/io/orc/BooleanColumnStatistics.java |    27 -
 .../hadoop/hive/ql/io/orc/ColumnStatistics.java |    36 -
 .../hive/ql/io/orc/ColumnStatisticsImpl.java    |  1082 -
 .../hadoop/hive/ql/io/orc/CompressionCodec.java |    69 -
 .../hadoop/hive/ql/io/orc/CompressionKind.java  |    18 +-
 .../hadoop/hive/ql/io/orc/DataReader.java       |    58 -
 .../hive/ql/io/orc/DateColumnStatistics.java    |    37 -
 .../hive/ql/io/orc/DecimalColumnStatistics.java |    45 -
 .../ql/io/orc/DirectDecompressionCodec.java     |    26 -
 .../hive/ql/io/orc/DoubleColumnStatistics.java  |    44 -
 .../hadoop/hive/ql/io/orc/DynamicByteArray.java |   303 -
 .../hadoop/hive/ql/io/orc/DynamicIntArray.java  |   142 -
 .../apache/hadoop/hive/ql/io/orc/FileDump.java  |    20 +-
 .../hadoop/hive/ql/io/orc/FileMetaInfo.java     |    64 -
 .../hadoop/hive/ql/io/orc/FileMetadata.java     |    63 -
 .../apache/hadoop/hive/ql/io/orc/InStream.java  |   497 -
 .../hive/ql/io/orc/IntegerColumnStatistics.java |    50 -
 .../hadoop/hive/ql/io/orc/IntegerReader.java    |    67 -
 .../hadoop/hive/ql/io/orc/IntegerWriter.java    |    47 -
 .../hadoop/hive/ql/io/orc/JsonFileDump.java     |    16 +-
 .../hadoop/hive/ql/io/orc/MemoryManager.java    |   213 -
 .../hadoop/hive/ql/io/orc/MetadataReader.java   |    34 -
 .../hive/ql/io/orc/MetadataReaderImpl.java      |   123 -
 .../apache/hadoop/hive/ql/io/orc/OrcConf.java   |   191 -
 .../apache/hadoop/hive/ql/io/orc/OrcFile.java   |   364 +-
 .../hive/ql/io/orc/OrcFileKeyWrapper.java       |     2 +
 .../io/orc/OrcFileStripeMergeRecordReader.java  |     4 +-
 .../hive/ql/io/orc/OrcFileValueWrapper.java     |     2 +
 .../hadoop/hive/ql/io/orc/OrcInputFormat.java   |   193 +-
 .../hive/ql/io/orc/OrcNewInputFormat.java       |     1 +
 .../hadoop/hive/ql/io/orc/OrcNewSplit.java      |     1 +
 .../hadoop/hive/ql/io/orc/OrcOutputFormat.java  |     5 +-
 .../hive/ql/io/orc/OrcRawRecordMerger.java      |     7 +-
 .../apache/hadoop/hive/ql/io/orc/OrcSerde.java  |     1 +
 .../apache/hadoop/hive/ql/io/orc/OrcSplit.java  |     2 +-
 .../apache/hadoop/hive/ql/io/orc/OrcStruct.java |     1 +
 .../apache/hadoop/hive/ql/io/orc/OrcUnion.java  |     1 +
 .../apache/hadoop/hive/ql/io/orc/OrcUtils.java  |   629 -
 .../apache/hadoop/hive/ql/io/orc/OutStream.java |   286 -
 .../hadoop/hive/ql/io/orc/PositionProvider.java |    26 -
 .../hadoop/hive/ql/io/orc/PositionRecorder.java |    25 -
 .../hive/ql/io/orc/PositionedOutputStream.java  |    38 -
 .../apache/hadoop/hive/ql/io/orc/Reader.java    |   319 +-
 .../hadoop/hive/ql/io/orc/ReaderImpl.java       |    50 +-
 .../hadoop/hive/ql/io/orc/RecordReader.java     |    38 +-
 .../hadoop/hive/ql/io/orc/RecordReaderImpl.java |   116 +-
 .../hive/ql/io/orc/RecordReaderUtils.java       |    12 +-
 .../hadoop/hive/ql/io/orc/RedBlackTree.java     |   309 -
 .../hive/ql/io/orc/RunLengthByteReader.java     |   150 -
 .../hive/ql/io/orc/RunLengthByteWriter.java     |   106 -
 .../hive/ql/io/orc/RunLengthIntegerReader.java  |   157 -
 .../ql/io/orc/RunLengthIntegerReaderV2.java     |   392 -
 .../hive/ql/io/orc/RunLengthIntegerWriter.java  |   143 -
 .../ql/io/orc/RunLengthIntegerWriterV2.java     |   831 -
 .../hadoop/hive/ql/io/orc/SchemaEvolution.java  |    12 +-
 .../hive/ql/io/orc/SerializationUtils.java      |  1291 --
 .../ql/io/orc/SettableUncompressedStream.java   |    43 -
 .../hadoop/hive/ql/io/orc/SnappyCodec.java      |   109 -
 .../hadoop/hive/ql/io/orc/StreamName.java       |    95 -
 .../hive/ql/io/orc/StringColumnStatistics.java  |    41 -
 .../hive/ql/io/orc/StringRedBlackTree.java      |   207 -
 .../hive/ql/io/orc/StripeInformation.java       |    59 -
 .../hadoop/hive/ql/io/orc/StripeStatistics.java |    42 -
 .../ql/io/orc/TimestampColumnStatistics.java    |    38 -
 .../hive/ql/io/orc/TreeReaderFactory.java       |    14 +-
 .../hadoop/hive/ql/io/orc/TypeDescription.java  |   540 -
 .../ql/io/orc/VectorizedOrcInputFormat.java     |     4 +-
 .../apache/hadoop/hive/ql/io/orc/Writer.java    |    88 +-
 .../hadoop/hive/ql/io/orc/WriterImpl.java       |    57 +-
 .../apache/hadoop/hive/ql/io/orc/ZlibCodec.java |   171 -
 .../hive/ql/io/orc/encoded/EncodedReader.java   |     9 +-
 .../ql/io/orc/encoded/EncodedReaderImpl.java    |    31 +-
 .../orc/encoded/EncodedTreeReaderFactory.java   |    99 +-
 .../hadoop/hive/ql/io/orc/encoded/Reader.java   |     6 +-
 .../hive/ql/io/orc/encoded/ReaderImpl.java      |     3 +-
 .../hive/ql/io/orc/encoded/StreamUtils.java     |     7 +-
 .../ppr/PartitionExpressionForMetastore.java    |     4 +-
 .../hadoop/hive/ql/io/orc/orc_proto.proto       |   220 -
 .../ql/exec/vector/util/OrcFileGenerator.java   |     2 +-
 .../hive/ql/io/orc/TestBitFieldReader.java      |   144 -
 .../hadoop/hive/ql/io/orc/TestBitPack.java      |   316 -
 .../hive/ql/io/orc/TestColumnStatistics.java    |    11 +-
 .../hadoop/hive/ql/io/orc/TestDynamicArray.java |    87 -
 .../hadoop/hive/ql/io/orc/TestInStream.java     |   313 -
 .../hive/ql/io/orc/TestInputOutputFormat.java   |     2 +
 .../ql/io/orc/TestIntegerCompressionReader.java |   129 -
 .../hadoop/hive/ql/io/orc/TestJsonFileDump.java |     1 +
 .../hive/ql/io/orc/TestMemoryManager.java       |   132 -
 .../hive/ql/io/orc/TestNewIntegerEncoding.java  |    10 +-
 .../hadoop/hive/ql/io/orc/TestOrcFile.java      |    63 +-
 .../hive/ql/io/orc/TestOrcNullOptimization.java |     7 +
 .../hive/ql/io/orc/TestOrcRawRecordMerger.java  |     9 +-
 .../hive/ql/io/orc/TestOrcSerDeStats.java       |     7 +
 .../hive/ql/io/orc/TestRecordReaderImpl.java    |     5 +-
 .../hive/ql/io/orc/TestRunLengthByteReader.java |   142 -
 .../ql/io/orc/TestRunLengthIntegerReader.java   |   124 -
 .../hive/ql/io/orc/TestSerializationUtils.java  |   164 -
 .../hadoop/hive/ql/io/orc/TestStreamName.java   |     2 +
 .../hive/ql/io/orc/TestStringDictionary.java    |     7 +-
 .../hive/ql/io/orc/TestStringRedBlackTree.java  |   288 -
 .../hive/ql/io/orc/TestTypeDescription.java     |     1 +
 .../hive/ql/io/orc/TestUnrolledBitPack.java     |     1 +
 .../hive/ql/io/orc/TestVectorOrcFile.java       |    38 +-
 .../hive/ql/io/orc/TestVectorizedORCReader.java |     1 -
 .../apache/hadoop/hive/ql/io/orc/TestZlib.java  |    55 -
 .../hadoop/hive/serde2/io/DateWritable.java     |   179 -
 .../apache/hadoop/hive/shims/Hadoop23Shims.java |     9 -
 .../apache/hadoop/hive/shims/ZeroCopyShims.java |    42 -
 .../apache/hadoop/hive/shims/HadoopShims.java   |    13 -
 .../hadoop/hive/common/DiskRangeInfo.java       |    59 +
 .../hadoop/hive/serde2/io/DateWritable.java     |   179 +
 198 files changed, 32974 insertions(+), 32101 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/9c7a78ee/common/pom.xml
----------------------------------------------------------------------
diff --git a/common/pom.xml b/common/pom.xml
index ee597cf..8141f75 100644
--- a/common/pom.xml
+++ b/common/pom.xml
@@ -44,6 +44,11 @@
       <artifactId>hive-storage-api</artifactId>
       <version>${project.version}</version>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-orc</artifactId>
+      <version>${project.version}</version>
+    </dependency>
     <!-- inter-project -->
     <dependency>
       <groupId>commons-cli</groupId>

http://git-wip-us.apache.org/repos/asf/hive/blob/9c7a78ee/common/src/java/org/apache/hadoop/hive/common/DiskRangeInfo.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/common/DiskRangeInfo.java b/common/src/java/org/apache/hadoop/hive/common/DiskRangeInfo.java
deleted file mode 100644
index 86b838c..0000000
--- a/common/src/java/org/apache/hadoop/hive/common/DiskRangeInfo.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.common;
-
-import java.util.List;
-
-import org.apache.hadoop.hive.common.io.DiskRange;
-
-import com.google.common.collect.Lists;
-
-/**
- * Disk range information class containing disk ranges and total length.
- */
-public class DiskRangeInfo {
-  List<DiskRange> diskRanges; // TODO: use DiskRangeList instead
-  long totalLength;
-
-  public DiskRangeInfo(int indexBaseOffset) {
-    this.diskRanges = Lists.newArrayList();
-    // Some data is missing from the stream for PPD uncompressed read (because index offset is
-    // relative to the entire stream and we only read part of stream if RGs are filtered; unlike
-    // with compressed data where PPD only filters CBs, so we always get full CB, and index offset
-    // is relative to CB). To take care of the case when UncompressedStream goes seeking around by
-    // its incorrect (relative to partial stream) index offset, we will increase the length by our
-    // offset-relative-to-the-stream, and also account for it in buffers (see createDiskRangeInfo).
-    // So, index offset now works; as long as noone seeks into this data before the RG (why would
-    // they), everything works. This is hacky... Stream shouldn't depend on having all the data.
-    this.totalLength = indexBaseOffset;
-  }
-
-  public void addDiskRange(DiskRange diskRange) {
-    diskRanges.add(diskRange);
-    totalLength += diskRange.getLength();
-  }
-
-  public List<DiskRange> getDiskRanges() {
-    return diskRanges;
-  }
-
-  public long getTotalLength() {
-    return totalLength;
-  }
-}
-

http://git-wip-us.apache.org/repos/asf/hive/blob/9c7a78ee/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/SpecialCases.java
----------------------------------------------------------------------
diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/SpecialCases.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/SpecialCases.java
index 756abf8..60af5c0 100644
--- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/SpecialCases.java
+++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/SpecialCases.java
@@ -24,7 +24,7 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.io.RCFileOutputFormat;
 import org.apache.hadoop.hive.ql.io.avro.AvroContainerOutputFormat;
-import org.apache.hadoop.hive.ql.io.orc.OrcConf;
+import org.apache.orc.OrcConf;
 import org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat;
 import org.apache.hadoop.hive.serde2.avro.AvroSerDe;
 import org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils;

http://git-wip-us.apache.org/repos/asf/hive/blob/9c7a78ee/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcEncodedDataConsumer.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcEncodedDataConsumer.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcEncodedDataConsumer.java
index 5fd5226..2597848 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcEncodedDataConsumer.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcEncodedDataConsumer.java
@@ -28,16 +28,16 @@ import org.apache.hadoop.hive.llap.io.metadata.OrcStripeMetadata;
 import org.apache.hadoop.hive.llap.metrics.LlapDaemonQueueMetrics;
 import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
-import org.apache.hadoop.hive.ql.io.orc.CompressionCodec;
+import org.apache.orc.CompressionCodec;
 import org.apache.hadoop.hive.ql.io.orc.encoded.Consumer;
 import org.apache.hadoop.hive.ql.io.orc.encoded.EncodedTreeReaderFactory;
 import org.apache.hadoop.hive.ql.io.orc.encoded.EncodedTreeReaderFactory.SettableTreeReader;
 import org.apache.hadoop.hive.ql.io.orc.encoded.OrcBatchKey;
 import org.apache.hadoop.hive.ql.io.orc.encoded.Reader.OrcEncodedColumnBatch;
-import org.apache.hadoop.hive.ql.io.orc.OrcProto;
 import org.apache.hadoop.hive.ql.io.orc.RecordReaderImpl;
 import org.apache.hadoop.hive.ql.io.orc.TreeReaderFactory;
 import org.apache.hadoop.hive.ql.io.orc.WriterImpl;
+import org.apache.orc.OrcProto;
 
 public class OrcEncodedDataConsumer
   extends EncodedDataConsumer<OrcBatchKey, OrcEncodedColumnBatch> {

http://git-wip-us.apache.org/repos/asf/hive/blob/9c7a78ee/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java
index f9d06e9..729f1bd 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java
@@ -55,14 +55,13 @@ import org.apache.hadoop.hive.llap.io.metadata.OrcStripeMetadata;
 import org.apache.hadoop.hive.ql.exec.DDLTask;
 import org.apache.hadoop.hive.ql.io.AcidUtils;
 import org.apache.hadoop.hive.ql.io.HdfsUtils;
-import org.apache.hadoop.hive.ql.io.orc.CompressionKind;
-import org.apache.hadoop.hive.ql.io.orc.DataReader;
-import org.apache.hadoop.hive.ql.io.orc.MetadataReader;
+import org.apache.orc.CompressionKind;
+import org.apache.orc.DataReader;
+import org.apache.orc.impl.MetadataReader;
 import org.apache.hadoop.hive.ql.io.orc.OrcFile;
 import org.apache.hadoop.hive.ql.io.orc.OrcFile.ReaderOptions;
-import org.apache.hadoop.hive.ql.io.orc.OrcConf;
+import org.apache.orc.OrcConf;
 import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat;
-import org.apache.hadoop.hive.ql.io.orc.OrcProto;
 import org.apache.hadoop.hive.ql.io.orc.OrcSplit;
 import org.apache.hadoop.hive.ql.io.orc.encoded.Reader;
 import org.apache.hadoop.hive.ql.io.orc.RecordReaderImpl;
@@ -75,12 +74,13 @@ import org.apache.hadoop.hive.ql.io.orc.encoded.OrcCacheKey;
 import org.apache.hadoop.hive.ql.io.orc.encoded.Reader.OrcEncodedColumnBatch;
 import org.apache.hadoop.hive.ql.io.orc.encoded.Reader.PoolFactory;
 import org.apache.hadoop.hive.ql.io.orc.RecordReaderUtils;
-import org.apache.hadoop.hive.ql.io.orc.StripeInformation;
+import org.apache.orc.StripeInformation;
 import org.apache.hadoop.hive.ql.io.sarg.SearchArgument;
 import org.apache.hadoop.mapred.FileSplit;
 import org.apache.hadoop.mapred.InputSplit;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hive.common.util.FixedSizedObjectPool;
+import org.apache.orc.OrcProto;
 import org.apache.tez.common.CallableWithNdc;
 
 /**

http://git-wip-us.apache.org/repos/asf/hive/blob/9c7a78ee/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/OrcFileMetadata.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/OrcFileMetadata.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/OrcFileMetadata.java
index 9ff3523..2e4e0c5 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/OrcFileMetadata.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/OrcFileMetadata.java
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.hive.llap.io.metadata;
 
-import java.io.IOException;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
@@ -27,19 +26,13 @@ import org.apache.hadoop.hive.llap.IncrementalObjectSizeEstimator;
 import org.apache.hadoop.hive.llap.IncrementalObjectSizeEstimator.ObjectEstimator;
 import org.apache.hadoop.hive.llap.cache.EvictionDispatcher;
 import org.apache.hadoop.hive.llap.cache.LlapCacheableBuffer;
-import org.apache.hadoop.hive.llap.io.api.impl.LlapIoImpl;
-import org.apache.hadoop.hive.ql.io.orc.CompressionKind;
-import org.apache.hadoop.hive.ql.io.orc.FileMetadata;
+import org.apache.orc.CompressionKind;
+import org.apache.orc.FileMetadata;
 import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat;
-import org.apache.hadoop.hive.ql.io.orc.OrcProto;
-import org.apache.hadoop.hive.ql.io.orc.OrcProto.BucketStatistics;
-import org.apache.hadoop.hive.ql.io.orc.OrcProto.ColumnStatistics;
-import org.apache.hadoop.hive.ql.io.orc.OrcProto.StringStatistics;
-import org.apache.hadoop.hive.ql.io.orc.OrcProto.StripeStatistics;
-import org.apache.hadoop.hive.ql.io.orc.OrcProto.Type;
 import org.apache.hadoop.hive.ql.io.orc.Reader;
 import org.apache.hadoop.hive.ql.io.orc.ReaderImpl.StripeInformationImpl;
-import org.apache.hadoop.hive.ql.io.orc.StripeInformation;
+import org.apache.orc.StripeInformation;
+import org.apache.orc.OrcProto;
 
 import com.google.common.annotations.VisibleForTesting;
 
@@ -50,9 +43,9 @@ import com.google.common.annotations.VisibleForTesting;
 public final class OrcFileMetadata extends LlapCacheableBuffer implements FileMetadata {
   private final List<StripeInformation> stripes;
   private final List<Integer> versionList;
-  private final List<StripeStatistics> stripeStats;
-  private final List<Type> types;
-  private final List<ColumnStatistics> fileStats;
+  private final List<OrcProto.StripeStatistics> stripeStats;
+  private final List<OrcProto.Type> types;
+  private final List<OrcProto.ColumnStatistics> fileStats;
   private final long fileId;
   private final CompressionKind compressionKind;
   private final int rowIndexStride;
@@ -80,17 +73,17 @@ public final class OrcFileMetadata extends LlapCacheableBuffer implements FileMe
     OrcFileMetadata ofm = new OrcFileMetadata(fileId);
     ofm.stripes.add(new StripeInformationImpl(
         OrcProto.StripeInformation.getDefaultInstance()));
-    ofm.fileStats.add(ColumnStatistics.getDefaultInstance());
-    ofm.stripeStats.add(StripeStatistics.newBuilder().addColStats(createStatsDummy()).build());
-    ofm.types.add(Type.newBuilder().addFieldNames("a").addSubtypes(0).build());
+    ofm.fileStats.add(OrcProto.ColumnStatistics.getDefaultInstance());
+    ofm.stripeStats.add(OrcProto.StripeStatistics.newBuilder().addColStats(createStatsDummy()).build());
+    ofm.types.add(OrcProto.Type.newBuilder().addFieldNames("a").addSubtypes(0).build());
     ofm.versionList.add(0);
     return ofm;
   }
 
-  static ColumnStatistics.Builder createStatsDummy() {
-    return ColumnStatistics.newBuilder().setBucketStatistics(
-            BucketStatistics.newBuilder().addCount(0)).setStringStatistics(
-            StringStatistics.newBuilder().setMaximum("zzz"));
+  static OrcProto.ColumnStatistics.Builder createStatsDummy() {
+    return OrcProto.ColumnStatistics.newBuilder().setBucketStatistics(
+            OrcProto.BucketStatistics.newBuilder().addCount(0)).setStringStatistics(
+            OrcProto.StringStatistics.newBuilder().setMaximum("zzz"));
   }
 
   // Ctor for memory estimation and tests
@@ -111,7 +104,7 @@ public final class OrcFileMetadata extends LlapCacheableBuffer implements FileMe
   public OrcFileMetadata(long fileId, Reader reader) {
     this.fileId = fileId;
     this.stripeStats = reader.getOrcProtoStripeStatistics();
-    this.compressionKind = reader.getCompression();
+    this.compressionKind = reader.getCompressionKind();
     this.compressionBufferSize = reader.getCompressionSize();
     this.stripes = reader.getStripes();
     this.isOriginalFormat = OrcInputFormat.isOriginal(reader);
@@ -210,7 +203,7 @@ public final class OrcFileMetadata extends LlapCacheableBuffer implements FileMe
   }
 
   @Override
-  public List<StripeStatistics> getStripeStats() {
+  public List<OrcProto.StripeStatistics> getStripeStats() {
     return stripeStats;
   }
 
@@ -225,7 +218,7 @@ public final class OrcFileMetadata extends LlapCacheableBuffer implements FileMe
   }
 
   @Override
-  public List<ColumnStatistics> getFileStats() {
+  public List<OrcProto.ColumnStatistics> getFileStats() {
     return fileStats;
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/9c7a78ee/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/OrcStripeMetadata.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/OrcStripeMetadata.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/OrcStripeMetadata.java
index d70ff1a..8479d22 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/OrcStripeMetadata.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/OrcStripeMetadata.java
@@ -26,26 +26,20 @@ import org.apache.hadoop.hive.llap.IncrementalObjectSizeEstimator;
 import org.apache.hadoop.hive.llap.IncrementalObjectSizeEstimator.ObjectEstimator;
 import org.apache.hadoop.hive.llap.cache.EvictionDispatcher;
 import org.apache.hadoop.hive.llap.cache.LlapCacheableBuffer;
-import org.apache.hadoop.hive.ql.io.orc.MetadataReader;
-import org.apache.hadoop.hive.ql.io.orc.OrcProto.BloomFilter;
-import org.apache.hadoop.hive.ql.io.orc.OrcProto.BloomFilterIndex;
-import org.apache.hadoop.hive.ql.io.orc.OrcProto.ColumnEncoding;
-import org.apache.hadoop.hive.ql.io.orc.OrcProto.RowIndex;
-import org.apache.hadoop.hive.ql.io.orc.OrcProto.RowIndexEntry;
-import org.apache.hadoop.hive.ql.io.orc.OrcProto.Stream;
-import org.apache.hadoop.hive.ql.io.orc.OrcProto.StripeFooter;
-import org.apache.hadoop.hive.ql.io.orc.RecordReaderImpl;
-import org.apache.hadoop.hive.ql.io.orc.StripeInformation;
+import org.apache.orc.impl.MetadataReader;
+import org.apache.orc.impl.OrcIndex;
+import org.apache.orc.StripeInformation;
 import org.apache.hadoop.hive.ql.io.orc.encoded.OrcBatchKey;
+import org.apache.orc.OrcProto;
 
 import com.google.common.annotations.VisibleForTesting;
 
 public class OrcStripeMetadata extends LlapCacheableBuffer {
   private final OrcBatchKey stripeKey;
-  private final List<ColumnEncoding> encodings;
-  private final List<Stream> streams;
+  private final List<OrcProto.ColumnEncoding> encodings;
+  private final List<OrcProto.Stream> streams;
   private final long rowCount;
-  private RecordReaderImpl.Index rowIndex;
+  private OrcIndex rowIndex;
 
   private final int estimatedMemUsage;
 
@@ -62,7 +56,7 @@ public class OrcStripeMetadata extends LlapCacheableBuffer {
   public OrcStripeMetadata(OrcBatchKey stripeKey, MetadataReader mr, StripeInformation stripe,
       boolean[] includes, boolean[] sargColumns) throws IOException {
     this.stripeKey = stripeKey;
-    StripeFooter footer = mr.readStripeFooter(stripe);
+    OrcProto.StripeFooter footer = mr.readStripeFooter(stripe);
     streams = footer.getStreamsList();
     encodings = footer.getColumnsList();
     rowCount = stripe.getNumberOfRows();
@@ -81,15 +75,15 @@ public class OrcStripeMetadata extends LlapCacheableBuffer {
   @VisibleForTesting
   public static OrcStripeMetadata createDummy(long id) {
     OrcStripeMetadata dummy = new OrcStripeMetadata(id);
-    dummy.encodings.add(ColumnEncoding.getDefaultInstance());
-    dummy.streams.add(Stream.getDefaultInstance());
-    RowIndex ri = RowIndex.newBuilder().addEntry(
-        RowIndexEntry.newBuilder().addPositions(1).setStatistics(
+    dummy.encodings.add(OrcProto.ColumnEncoding.getDefaultInstance());
+    dummy.streams.add(OrcProto.Stream.getDefaultInstance());
+    OrcProto.RowIndex ri = OrcProto.RowIndex.newBuilder().addEntry(
+        OrcProto.RowIndexEntry.newBuilder().addPositions(1).setStatistics(
             OrcFileMetadata.createStatsDummy())).build();
-    BloomFilterIndex bfi = BloomFilterIndex.newBuilder().addBloomFilter(
-        BloomFilter.newBuilder().addBitset(0)).build();
-    dummy.rowIndex = new RecordReaderImpl.Index(
-        new RowIndex[] { ri }, new BloomFilterIndex[] { bfi });
+    OrcProto.BloomFilterIndex bfi = OrcProto.BloomFilterIndex.newBuilder().addBloomFilter(
+        OrcProto.BloomFilter.newBuilder().addBitset(0)).build();
+    dummy.rowIndex = new OrcIndex(
+        new OrcProto.RowIndex[] { ri }, new OrcProto.BloomFilterIndex[] { bfi });
     return dummy;
   }
 
@@ -112,19 +106,19 @@ public class OrcStripeMetadata extends LlapCacheableBuffer {
     return stripeKey.stripeIx;
   }
 
-  public RowIndex[] getRowIndexes() {
+  public OrcProto.RowIndex[] getRowIndexes() {
     return rowIndex.getRowGroupIndex();
   }
 
-  public BloomFilterIndex[] getBloomFilterIndexes() {
+  public OrcProto.BloomFilterIndex[] getBloomFilterIndexes() {
     return rowIndex.getBloomFilterIndex();
   }
 
-  public List<ColumnEncoding> getEncodings() {
+  public List<OrcProto.ColumnEncoding> getEncodings() {
     return encodings;
   }
 
-  public List<Stream> getStreams() {
+  public List<OrcProto.Stream> getStreams() {
     return streams;
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/9c7a78ee/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestIncrementalObjectSizeEstimator.java
----------------------------------------------------------------------
diff --git a/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestIncrementalObjectSizeEstimator.java b/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestIncrementalObjectSizeEstimator.java
index 7be82c2..a078f73 100644
--- a/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestIncrementalObjectSizeEstimator.java
+++ b/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestIncrementalObjectSizeEstimator.java
@@ -32,29 +32,12 @@ import org.apache.hadoop.hive.llap.IncrementalObjectSizeEstimator;
 import org.apache.hadoop.hive.llap.IncrementalObjectSizeEstimator.ObjectEstimator;
 import org.apache.hadoop.hive.llap.io.metadata.OrcFileMetadata;
 import org.apache.hadoop.hive.llap.io.metadata.OrcStripeMetadata;
-import org.apache.hadoop.hive.ql.io.orc.MetadataReader;
-import org.apache.hadoop.hive.ql.io.orc.OrcProto.BinaryStatistics;
-import org.apache.hadoop.hive.ql.io.orc.OrcProto.BloomFilter;
-import org.apache.hadoop.hive.ql.io.orc.OrcProto.BloomFilterIndex;
-import org.apache.hadoop.hive.ql.io.orc.OrcProto.BucketStatistics;
-import org.apache.hadoop.hive.ql.io.orc.OrcProto.ColumnEncoding;
-import org.apache.hadoop.hive.ql.io.orc.OrcProto.ColumnEncoding.Kind;
-import org.apache.hadoop.hive.ql.io.orc.OrcProto.ColumnStatistics;
-import org.apache.hadoop.hive.ql.io.orc.OrcProto.DateStatistics;
-import org.apache.hadoop.hive.ql.io.orc.OrcProto.DecimalStatistics;
-import org.apache.hadoop.hive.ql.io.orc.OrcProto.DoubleStatistics;
-import org.apache.hadoop.hive.ql.io.orc.OrcProto.IntegerStatistics;
-import org.apache.hadoop.hive.ql.io.orc.OrcProto.RowIndex;
-import org.apache.hadoop.hive.ql.io.orc.OrcProto.RowIndexEntry;
-import org.apache.hadoop.hive.ql.io.orc.OrcProto.Stream;
-import org.apache.hadoop.hive.ql.io.orc.OrcProto.StringStatistics;
-import org.apache.hadoop.hive.ql.io.orc.OrcProto.StripeFooter;
-import org.apache.hadoop.hive.ql.io.orc.OrcProto.TimestampStatistics;
-import org.apache.hadoop.hive.ql.io.orc.RecordReaderImpl;
-import org.apache.hadoop.hive.ql.io.orc.RecordReaderImpl.Index;
-import org.apache.hadoop.hive.ql.io.orc.StripeInformation;
+import org.apache.orc.impl.MetadataReader;
+import org.apache.orc.impl.OrcIndex;
+import org.apache.orc.StripeInformation;
 import org.apache.hadoop.hive.ql.io.orc.encoded.OrcBatchKey;
 import org.apache.hadoop.hive.ql.util.JavaDataModel;
+import org.apache.orc.OrcProto;
 import org.junit.Test;
 import org.mockito.Mockito;
 
@@ -68,66 +51,68 @@ public class TestIncrementalObjectSizeEstimator {
     public boolean isEmpty;
 
     @Override
-    public Index readRowIndex(StripeInformation stripe, StripeFooter footer,
-        boolean[] included, RowIndex[] indexes, boolean[] sargColumns,
-        BloomFilterIndex[] bloomFilterIndices) throws IOException {
+    public OrcIndex readRowIndex(StripeInformation stripe,
+                              OrcProto.StripeFooter footer,
+        boolean[] included, OrcProto.RowIndex[] indexes, boolean[] sargColumns,
+        OrcProto.BloomFilterIndex[] bloomFilterIndices) throws IOException {
       if (isEmpty) {
-        return new RecordReaderImpl.Index(new RowIndex[] { }, new BloomFilterIndex[] { });
+        return new OrcIndex(new OrcProto.RowIndex[] { },
+            new OrcProto.BloomFilterIndex[] { });
       }
-      ColumnStatistics cs = ColumnStatistics.newBuilder()
-          .setBucketStatistics(BucketStatistics.newBuilder().addCount(0))
-          .setStringStatistics(StringStatistics.newBuilder().setMaximum("zzz").setMinimum("aaa"))
-          .setBinaryStatistics(BinaryStatistics.newBuilder().setSum(5))
-          .setDateStatistics(DateStatistics.newBuilder().setMinimum(4545).setMaximum(6656))
-          .setDecimalStatistics(DecimalStatistics.newBuilder().setMaximum("zzz").setMinimum("aaa"))
-          .setDoubleStatistics(DoubleStatistics.newBuilder().setMinimum(0.5).setMaximum(1.5))
-          .setIntStatistics(IntegerStatistics.newBuilder().setMaximum(10).setMinimum(5))
-          .setTimestampStatistics(TimestampStatistics.newBuilder().setMaximum(10)).build();
-      RowIndex ri = RowIndex.newBuilder()
-          .addEntry(RowIndexEntry.newBuilder().addPositions(1))
-          .addEntry(RowIndexEntry.newBuilder().addPositions(0).addPositions(2).setStatistics(cs))
+      OrcProto.ColumnStatistics cs = OrcProto.ColumnStatistics.newBuilder()
+          .setBucketStatistics(OrcProto.BucketStatistics.newBuilder().addCount(0))
+          .setStringStatistics(OrcProto.StringStatistics.newBuilder().setMaximum("zzz").setMinimum("aaa"))
+          .setBinaryStatistics(OrcProto.BinaryStatistics.newBuilder().setSum(5))
+          .setDateStatistics(OrcProto.DateStatistics.newBuilder().setMinimum(4545).setMaximum(6656))
+          .setDecimalStatistics(OrcProto.DecimalStatistics.newBuilder().setMaximum("zzz").setMinimum("aaa"))
+          .setDoubleStatistics(OrcProto.DoubleStatistics.newBuilder().setMinimum(0.5).setMaximum(1.5))
+          .setIntStatistics(OrcProto.IntegerStatistics.newBuilder().setMaximum(10).setMinimum(5))
+          .setTimestampStatistics(OrcProto.TimestampStatistics.newBuilder().setMaximum(10)).build();
+      OrcProto.RowIndex ri = OrcProto.RowIndex.newBuilder()
+          .addEntry(OrcProto.RowIndexEntry.newBuilder().addPositions(1))
+          .addEntry(OrcProto.RowIndexEntry.newBuilder().addPositions(0).addPositions(2).setStatistics(cs))
           .build();
-      RowIndex ri2 = RowIndex.newBuilder()
-          .addEntry(RowIndexEntry.newBuilder().addPositions(3))
+      OrcProto.RowIndex ri2 = OrcProto.RowIndex.newBuilder()
+          .addEntry(OrcProto.RowIndexEntry.newBuilder().addPositions(3))
           .build();
-      BloomFilterIndex bfi = BloomFilterIndex.newBuilder().addBloomFilter(
-          BloomFilter.newBuilder().addBitset(0).addBitset(1)).build();
+      OrcProto.BloomFilterIndex bfi = OrcProto.BloomFilterIndex.newBuilder().addBloomFilter(
+          OrcProto.BloomFilter.newBuilder().addBitset(0).addBitset(1)).build();
       if (doStreamStep) {
         ByteArrayOutputStream baos = new ByteArrayOutputStream();
         CodedOutputStream cos = CodedOutputStream.newInstance(baos);
         ri.writeTo(cos);
         cos.flush();
-        ri = RowIndex.newBuilder().mergeFrom(baos.toByteArray()).build();
+        ri = OrcProto.RowIndex.newBuilder().mergeFrom(baos.toByteArray()).build();
         baos = new ByteArrayOutputStream();
         cos = CodedOutputStream.newInstance(baos);
         ri2.writeTo(cos);
         cos.flush();
-        ri2 = RowIndex.newBuilder().mergeFrom(baos.toByteArray()).build();
+        ri2 = OrcProto.RowIndex.newBuilder().mergeFrom(baos.toByteArray()).build();
         baos = new ByteArrayOutputStream();
         cos = CodedOutputStream.newInstance(baos);
         bfi.writeTo(cos);
         cos.flush();
-        bfi = BloomFilterIndex.newBuilder().mergeFrom(baos.toByteArray()).build();
+        bfi = OrcProto.BloomFilterIndex.newBuilder().mergeFrom(baos.toByteArray()).build();
       }
-      return new RecordReaderImpl.Index(
-          new RowIndex[] { ri, ri2 }, new BloomFilterIndex[] { bfi });
+      return new OrcIndex(
+          new OrcProto.RowIndex[] { ri, ri2 }, new OrcProto.BloomFilterIndex[] { bfi });
     }
 
     @Override
-    public StripeFooter readStripeFooter(StripeInformation stripe) throws IOException {
-      StripeFooter.Builder fb = StripeFooter.newBuilder();
+    public OrcProto.StripeFooter readStripeFooter(StripeInformation stripe) throws IOException {
+      OrcProto.StripeFooter.Builder fb = OrcProto.StripeFooter.newBuilder();
       if (!isEmpty) {
-        fb.addStreams(Stream.newBuilder().setColumn(0).setLength(20).setKind(Stream.Kind.LENGTH))
-          .addStreams(Stream.newBuilder().setColumn(0).setLength(40).setKind(Stream.Kind.DATA))
-          .addColumns(ColumnEncoding.newBuilder().setDictionarySize(10).setKind(Kind.DIRECT_V2));
+        fb.addStreams(OrcProto.Stream.newBuilder().setColumn(0).setLength(20).setKind(OrcProto.Stream.Kind.LENGTH))
+          .addStreams(OrcProto.Stream.newBuilder().setColumn(0).setLength(40).setKind(OrcProto.Stream.Kind.DATA))
+          .addColumns(OrcProto.ColumnEncoding.newBuilder().setDictionarySize(10).setKind(OrcProto.ColumnEncoding.Kind.DIRECT_V2));
       }
-      StripeFooter footer = fb.build();
+      OrcProto.StripeFooter footer = fb.build();
       if (doStreamStep) {
         ByteArrayOutputStream baos = new ByteArrayOutputStream();
         CodedOutputStream cos = CodedOutputStream.newInstance(baos);
         footer.writeTo(cos);
         cos.flush();
-        footer = StripeFooter.newBuilder().mergeFrom(baos.toByteArray()).build();
+        footer = OrcProto.StripeFooter.newBuilder().mergeFrom(baos.toByteArray()).build();
       }
       return footer;
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/9c7a78ee/orc/pom.xml
----------------------------------------------------------------------
diff --git a/orc/pom.xml b/orc/pom.xml
new file mode 100644
index 0000000..fef23e8
--- /dev/null
+++ b/orc/pom.xml
@@ -0,0 +1,136 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.hive</groupId>
+    <artifactId>hive</artifactId>
+    <version>2.1.0-SNAPSHOT</version>
+    <relativePath>../pom.xml</relativePath>
+  </parent>
+
+  <artifactId>hive-orc</artifactId>
+  <packaging>jar</packaging>
+  <name>Hive ORC</name>
+
+  <properties>
+    <hive.path.to.root>..</hive.path.to.root>
+  </properties>
+
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-storage-api</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+
+    <!-- inter-project -->
+    <dependency>
+      <groupId>com.google.protobuf</groupId>
+      <artifactId>protobuf-java</artifactId>
+      <version>${protobuf.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.iq80.snappy</groupId>
+      <artifactId>snappy</artifactId>
+      <version>${snappy.version}</version>
+    </dependency>
+
+    <!-- test inter-project -->
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <version>${junit.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.mockito</groupId>
+      <artifactId>mockito-all</artifactId>
+      <version>${mockito-all.version}</version>
+      <scope>test</scope>
+    </dependency>
+  </dependencies>
+
+  <profiles>
+    <profile>
+      <id>protobuf</id>
+      <build>
+        <plugins>
+	  <plugin>
+            <groupId>com.github.os72</groupId>
+            <artifactId>protoc-jar-maven-plugin</artifactId>
+            <version>3.0.0-a3</version>
+            <executions>
+              <execution>
+                <phase>generate-sources</phase>
+                <goals>
+                  <goal>run</goal>
+                </goals>
+                <configuration>
+                  <protocVersion>2.5.0</protocVersion>
+                  <addSources>none</addSources>
+                  <outputDirectory>src/gen/protobuf-java</outputDirectory>
+                  <includeDirectories>
+                    <include>src/protobuf</include>
+                  </includeDirectories>
+                  <inputDirectories>
+                    <include>src/protobuf</include>
+                  </inputDirectories>
+                </configuration>
+              </execution>
+            </executions>
+	  </plugin>
+        </plugins>
+      </build>
+    </profile>
+  </profiles>
+
+  <build>
+    <sourceDirectory>${basedir}/src/java</sourceDirectory>
+    <testSourceDirectory>${basedir}/src/test</testSourceDirectory>
+    <testResources>
+      <testResource>
+        <directory>${basedir}/src/test/resources</directory>
+      </testResource>
+    </testResources>
+    <plugins>
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>build-helper-maven-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>add-source</id>
+            <phase>generate-sources</phase>
+            <goals>
+              <goal>add-source</goal>
+            </goals>
+            <configuration>
+              <sources>
+                <source>src/gen/protobuf-java</source>
+              </sources>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+</project>