You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by go...@apache.org on 2015/05/29 02:51:06 UTC
[59/82] [abbrv] hive git commit: HIVE-9605:Remove parquet nested
objects from wrapper writable objects (Sergio Pena, reviewed by Ferdinand Xu)
http://git-wip-us.apache.org/repos/asf/hive/blob/4157374d/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestArrayCompatibility.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestArrayCompatibility.java b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestArrayCompatibility.java
index f349aa0..8e386c1 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestArrayCompatibility.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestArrayCompatibility.java
@@ -20,11 +20,7 @@ package org.apache.hadoop.hive.ql.io.parquet;
import java.util.List;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.serde2.io.DoubleWritable;
-import org.apache.hadoop.io.ArrayWritable;
-import org.apache.hadoop.io.FloatWritable;
-import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.hive.serde2.io.ObjectArrayWritable;
import org.junit.Assert;
import org.junit.Test;
import parquet.io.api.RecordConsumer;
@@ -62,10 +58,10 @@ public class TestArrayCompatibility extends AbstractTestParquetDirect {
}
});
- ArrayWritable expected = record(list(
- new IntWritable(34), new IntWritable(35), new IntWritable(36)));
+ ObjectArrayWritable expected = list(
+ new Integer(34), new Integer(35), new Integer(36));
- List<ArrayWritable> records = read(test);
+ List<ObjectArrayWritable> records = read(test);
Assert.assertEquals("Should have only one record", 1, records.size());
assertEquals("Should match expected record",
expected, records.get(0));
@@ -109,11 +105,11 @@ public class TestArrayCompatibility extends AbstractTestParquetDirect {
}
});
- ArrayWritable expected = record(list(
- record(new FloatWritable(1.0f), new FloatWritable(1.0f)),
- record(new FloatWritable(2.0f), new FloatWritable(2.0f))));
+ ObjectArrayWritable expected = record(list(
+ record(new Float(1.0f), new Float(1.0f)),
+ record(new Float(2.0f), new Float(2.0f))));
- List<ArrayWritable> records = read(test);
+ List<ObjectArrayWritable> records = read(test);
Assert.assertEquals("Should have only one record", 1, records.size());
assertEquals("Should match expected record",
expected, records.get(0));
@@ -148,10 +144,10 @@ public class TestArrayCompatibility extends AbstractTestParquetDirect {
}
});
- ArrayWritable expected = record(list(
- new IntWritable(34), new IntWritable(35), new IntWritable(36)));
+ ObjectArrayWritable expected = record(list(
+ new Integer(34), new Integer(35), new Integer(36)));
- List<ArrayWritable> records = read(test);
+ List<ObjectArrayWritable> records = read(test);
Assert.assertEquals("Should have only one record", 1, records.size());
assertEquals("Should match expected record", expected, records.get(0));
}
@@ -198,11 +194,11 @@ public class TestArrayCompatibility extends AbstractTestParquetDirect {
}
});
- ArrayWritable expected = record(list(
- record(new LongWritable(1234L)),
- record(new LongWritable(2345L))));
+ ObjectArrayWritable expected = record(list(
+ record(new Long(1234L)),
+ record(new Long(2345L))));
- List<ArrayWritable> records = read(test);
+ List<ObjectArrayWritable> records = read(test);
Assert.assertEquals("Should have only one record", 1, records.size());
assertEquals("Should match expected record",
expected, records.get(0));
@@ -237,10 +233,10 @@ public class TestArrayCompatibility extends AbstractTestParquetDirect {
}
});
- ArrayWritable expected = record(list(
- new IntWritable(34), new IntWritable(35), new IntWritable(36)));
+ ObjectArrayWritable expected = record(list(
+ new Integer(34), new Integer(35), new Integer(36)));
- List<ArrayWritable> records = read(test);
+ List<ObjectArrayWritable> records = read(test);
Assert.assertEquals("Should have only one record", 1, records.size());
assertEquals("Should match expected record", expected, records.get(0));
}
@@ -287,11 +283,11 @@ public class TestArrayCompatibility extends AbstractTestParquetDirect {
}
});
- ArrayWritable expected = record(list(
- record(new LongWritable(1234L)),
- record(new LongWritable(2345L))));
+ ObjectArrayWritable expected = record(list(
+ record(new Long(1234L)),
+ record(new Long(2345L))));
- List<ArrayWritable> records = read(test);
+ List<ObjectArrayWritable> records = read(test);
Assert.assertEquals("Should have only one record", 1, records.size());
assertEquals("Should match expected record",
expected, records.get(0));
@@ -339,11 +335,11 @@ public class TestArrayCompatibility extends AbstractTestParquetDirect {
}
});
- ArrayWritable expected = record(list(
- new LongWritable(1234L),
- new LongWritable(2345L)));
+ ObjectArrayWritable expected = record(list(
+ new Long(1234L),
+ new Long(2345L)));
- List<ArrayWritable> records = read(test);
+ List<ObjectArrayWritable> records = read(test);
Assert.assertEquals("Should have only one record", 1, records.size());
assertEquals("Should match expected record",
expected, records.get(0));
@@ -397,11 +393,11 @@ public class TestArrayCompatibility extends AbstractTestParquetDirect {
}
});
- ArrayWritable expected = record(list(
- record(new DoubleWritable(0.0), new DoubleWritable(0.0)),
- record(new DoubleWritable(0.0), new DoubleWritable(180.0))));
+ ObjectArrayWritable expected = record(list(
+ record(new Double(0.0), new Double(0.0)),
+ record(new Double(0.0), new Double(180.0))));
- List<ArrayWritable> records = read(test);
+ List<ObjectArrayWritable> records = read(test);
Assert.assertEquals("Should have only one record", 1, records.size());
assertEquals("Should match expected record",
expected, records.get(0));
@@ -473,12 +469,12 @@ public class TestArrayCompatibility extends AbstractTestParquetDirect {
}
});
- ArrayWritable expected = record(list(
- record(new DoubleWritable(0.0), new DoubleWritable(0.0)),
+ ObjectArrayWritable expected = record(list(
+ record(new Double(0.0), new Double(0.0)),
null,
- record(new DoubleWritable(0.0), new DoubleWritable(180.0))));
+ record(new Double(0.0), new Double(180.0))));
- List<ArrayWritable> records = read(test);
+ List<ObjectArrayWritable> records = read(test);
Assert.assertEquals("Should have only one record", 1, records.size());
assertEquals("Should match expected record",
expected, records.get(0));
@@ -546,11 +542,11 @@ public class TestArrayCompatibility extends AbstractTestParquetDirect {
}
});
- ArrayWritable expected = record(list(
- record(new DoubleWritable(0.0), new DoubleWritable(180.0)),
- record(new DoubleWritable(0.0), new DoubleWritable(0.0))));
+ ObjectArrayWritable expected = record(list(
+ record(new Double(0.0), new Double(180.0)),
+ record(new Double(0.0), new Double(0.0))));
- List<ArrayWritable> records = read(test);
+ List<ObjectArrayWritable> records = read(test);
Assert.assertEquals("Should have only one record", 1, records.size());
assertEquals("Should match expected record",
expected, records.get(0));
@@ -619,11 +615,11 @@ public class TestArrayCompatibility extends AbstractTestParquetDirect {
}
});
- ArrayWritable expected = record(list(
- record(new DoubleWritable(0.0), new DoubleWritable(180.0)),
- record(new DoubleWritable(0.0), new DoubleWritable(0.0))));
+ ObjectArrayWritable expected = record(list(
+ record(new Double(0.0), new Double(180.0)),
+ record(new Double(0.0), new Double(0.0))));
- List<ArrayWritable> records = read(test);
+ List<ObjectArrayWritable> records = read(test);
Assert.assertEquals("Should have only one record", 1, records.size());
assertEquals("Should match expected record",
expected, records.get(0));
http://git-wip-us.apache.org/repos/asf/hive/blob/4157374d/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestDataWritableWriter.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestDataWritableWriter.java b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestDataWritableWriter.java
index 8f03c5b..5f586e4 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestDataWritableWriter.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestDataWritableWriter.java
@@ -14,25 +14,18 @@
package org.apache.hadoop.hive.ql.io.parquet;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.io.parquet.serde.ArrayWritableObjectInspector;
+import org.apache.hadoop.hive.ql.io.parquet.serde.ObjectArrayWritableObjectInspector;
import org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe;
import org.apache.hadoop.hive.ql.io.parquet.write.DataWritableWriter;
-import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.ParquetHiveRecord;
import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hadoop.hive.serde2.SerDeUtils;
-import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.ObjectArrayWritable;
+import org.apache.hadoop.hive.serde2.io.ParquetHiveRecord;
import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
-import org.apache.hadoop.io.ArrayWritable;
-import org.apache.hadoop.io.BooleanWritable;
import org.apache.hadoop.io.BytesWritable;
-import org.apache.hadoop.io.FloatWritable;
-import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Writable;
import org.junit.Before;
import org.junit.Test;
@@ -114,38 +107,38 @@ public class TestDataWritableWriter {
private Writable createNull() { return null; }
- private ByteWritable createTinyInt(byte value) { return new ByteWritable(value); }
+ private byte createTinyInt(byte value) { return value; }
- private ShortWritable createSmallInt(short value) { return new ShortWritable(value); }
+ private short createSmallInt(short value) { return value; }
- private LongWritable createBigInt(long value) { return new LongWritable(value); }
+ private long createBigInt(long value) { return value; }
- private IntWritable createInt(int value) {
- return new IntWritable(value);
+ private int createInt(int value) {
+ return value;
}
- private FloatWritable createFloat(float value) {
- return new FloatWritable(value);
+ private float createFloat(float value) {
+ return value;
}
- private DoubleWritable createDouble(double value) {
- return new DoubleWritable(value);
+ private double createDouble(double value) {
+ return value;
}
- private BooleanWritable createBoolean(boolean value) {
- return new BooleanWritable(value);
+ private boolean createBoolean(boolean value) {
+ return value;
}
private BytesWritable createString(String value) throws UnsupportedEncodingException {
return new BytesWritable(value.getBytes("UTF-8"));
}
- private ArrayWritable createGroup(Writable...values) {
- return new ArrayWritable(Writable.class, values);
+ private ObjectArrayWritable createGroup(Object...values) {
+ return new ObjectArrayWritable(values);
}
- private ArrayWritable createArray(Writable...values) {
- return new ArrayWritable(Writable.class, createGroup(values).get());
+ private ObjectArrayWritable createArray(Object...values) {
+ return new ObjectArrayWritable(createGroup(values).get());
}
private List<String> createHiveColumnsFrom(final String columnNamesStr) {
@@ -171,15 +164,15 @@ public class TestDataWritableWriter {
return columnTypes;
}
- private ArrayWritableObjectInspector getObjectInspector(final String columnNames, final String columnTypes) {
+ private ObjectArrayWritableObjectInspector getObjectInspector(final String columnNames, final String columnTypes) {
List<TypeInfo> columnTypeList = createHiveTypeInfoFrom(columnTypes);
List<String> columnNameList = createHiveColumnsFrom(columnNames);
StructTypeInfo rowTypeInfo = (StructTypeInfo) TypeInfoFactory.getStructTypeInfo(columnNameList, columnTypeList);
- return new ArrayWritableObjectInspector(rowTypeInfo);
+ return new ObjectArrayWritableObjectInspector(rowTypeInfo);
}
- private ParquetHiveRecord getParquetWritable(String columnNames, String columnTypes, ArrayWritable record) throws SerDeException {
+ private ParquetHiveRecord getParquetWritable(String columnNames, String columnTypes, ObjectArrayWritable record) throws SerDeException {
Properties recordProperties = new Properties();
recordProperties.setProperty("columns", columnNames);
recordProperties.setProperty("columns.types", columnTypes);
@@ -212,7 +205,7 @@ public class TestDataWritableWriter {
+ " optional int64 bigint;\n"
+ "}\n";
- ArrayWritable hiveRecord = createGroup(
+ ObjectArrayWritable hiveRecord = createGroup(
createInt(1),
createDouble(1.0),
createBoolean(true),
@@ -268,7 +261,7 @@ public class TestDataWritableWriter {
+ " }\n"
+ "}\n";
- ArrayWritable hiveRecord = createGroup(
+ ObjectArrayWritable hiveRecord = createGroup(
createGroup(
createInt(1),
createDouble(1.0),
@@ -310,7 +303,7 @@ public class TestDataWritableWriter {
+ " }\n"
+ "}\n";
- ArrayWritable hiveRecord = createGroup(
+ ObjectArrayWritable hiveRecord = createGroup(
createGroup(
createArray(
createInt(1),
@@ -360,7 +353,7 @@ public class TestDataWritableWriter {
+ " }\n"
+ "}\n";
- ArrayWritable hiveRecord = createGroup(
+ ObjectArrayWritable hiveRecord = createGroup(
createGroup(
createArray(
createArray(
@@ -431,7 +424,7 @@ public class TestDataWritableWriter {
+ " }\n"
+ "}\n";
- ArrayWritable hiveRecord = createGroup(
+ ObjectArrayWritable hiveRecord = createGroup(
createGroup(
createArray(
createGroup(
@@ -481,7 +474,7 @@ public class TestDataWritableWriter {
String columnNames = "structCol";
String columnTypes = "int";
- ArrayWritable hiveRecord = createGroup(
+ ObjectArrayWritable hiveRecord = createGroup(
createInt(1)
);
@@ -504,7 +497,7 @@ public class TestDataWritableWriter {
String columnNames = "arrayCol";
String columnTypes = "int";
- ArrayWritable hiveRecord = createGroup(
+ ObjectArrayWritable hiveRecord = createGroup(
createInt(1)
);
@@ -529,7 +522,7 @@ public class TestDataWritableWriter {
String columnNames = "mapCol";
String columnTypes = "int";
- ArrayWritable hiveRecord = createGroup(
+ ObjectArrayWritable hiveRecord = createGroup(
createInt(1)
);
http://git-wip-us.apache.org/repos/asf/hive/blob/4157374d/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestMapStructures.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestMapStructures.java b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestMapStructures.java
index 6fc83e0..436569a 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestMapStructures.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestMapStructures.java
@@ -22,7 +22,7 @@ import java.util.Arrays;
import java.util.List;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
-import org.apache.hadoop.io.ArrayWritable;
+import org.apache.hadoop.hive.serde2.io.ObjectArrayWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.junit.Assert;
@@ -82,11 +82,11 @@ public class TestMapStructures extends AbstractTestParquetDirect {
}
});
- ArrayWritable expected = record(list(
+ ObjectArrayWritable expected = record(list(
record(new Text("lettuce"), new IntWritable(34)),
record(new Text("cabbage"), new IntWritable(18))));
- List<ArrayWritable> records = read(test);
+ List<ObjectArrayWritable> records = read(test);
Assert.assertEquals("Should have only one record", 1, records.size());
assertEquals("Should match expected record",
expected, records.get(0));
@@ -149,12 +149,12 @@ public class TestMapStructures extends AbstractTestParquetDirect {
}
});
- ArrayWritable expected = record(list(
+ ObjectArrayWritable expected = record(list(
record(new Text("lettuce"), new IntWritable(34)),
record(new Text("kale"), null),
record(new Text("cabbage"), new IntWritable(18))));
- List<ArrayWritable> records = read(test);
+ List<ObjectArrayWritable> records = read(test);
Assert.assertEquals("Should have only one record", 1, records.size());
assertEquals("Should match expected record",
expected, records.get(0));
@@ -230,11 +230,11 @@ public class TestMapStructures extends AbstractTestParquetDirect {
}
});
- ArrayWritable expected = record(list(
+ ObjectArrayWritable expected = record(list(
record(new Text("green"), list(new Text("lettuce"), new Text("kale"), null)),
record(new Text("brown"), null)));
- List<ArrayWritable> records = read(test);
+ List<ObjectArrayWritable> records = read(test);
Assert.assertEquals("Should have only one record", 1, records.size());
assertEquals("Should match expected record",
expected, records.get(0));
@@ -325,11 +325,11 @@ public class TestMapStructures extends AbstractTestParquetDirect {
}
});
- ArrayWritable expected = record(list(
+ ObjectArrayWritable expected = record(list(
record(new Text("low"), list(new IntWritable(34), new IntWritable(35), null)),
record(new Text("high"), list(new IntWritable(340), new IntWritable(360)))));
- List<ArrayWritable> records = read(test);
+ List<ObjectArrayWritable> records = read(test);
Assert.assertEquals("Should have only one record", 1, records.size());
assertEquals("Should match expected record",
expected, records.get(0));
@@ -386,11 +386,11 @@ public class TestMapStructures extends AbstractTestParquetDirect {
}
});
- ArrayWritable expected = record(list(record(
+ ObjectArrayWritable expected = record(list(record(
record(new IntWritable(7), new IntWritable(22)),
new DoubleWritable(3.14))));
- List<ArrayWritable> records = read(test);
+ List<ObjectArrayWritable> records = read(test);
Assert.assertEquals("Should have only one record", 1, records.size());
assertEquals("Should match expected record",
expected, records.get(0));
@@ -447,11 +447,11 @@ public class TestMapStructures extends AbstractTestParquetDirect {
}
});
- ArrayWritable expected = record(list(record(
+ ObjectArrayWritable expected = record(list(record(
new DoubleWritable(3.14),
record(new IntWritable(7), new IntWritable(22)))));
- List<ArrayWritable> records = read(test);
+ List<ObjectArrayWritable> records = read(test);
Assert.assertEquals("Should have only one record", 1, records.size());
assertEquals("Should match expected record",
expected, records.get(0));
@@ -542,7 +542,7 @@ public class TestMapStructures extends AbstractTestParquetDirect {
}
});
- ArrayWritable expected = record(list(
+ ObjectArrayWritable expected = record(list(
record(new Text("a"), list(
record(new Text("b"), new IntWritable(1)))),
record(new Text("b"), list(
@@ -550,7 +550,7 @@ public class TestMapStructures extends AbstractTestParquetDirect {
record(new Text("b"), new IntWritable(-2))))
));
- List<ArrayWritable> records = read(test);
+ List<ObjectArrayWritable> records = read(test);
Assert.assertEquals("Should have only one record", 1, records.size());
assertEquals("Should match expected record",
expected, records.get(0));
http://git-wip-us.apache.org/repos/asf/hive/blob/4157374d/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestMapredParquetInputFormat.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestMapredParquetInputFormat.java b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestMapredParquetInputFormat.java
index 1a54bf5..1b98cd3 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestMapredParquetInputFormat.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestMapredParquetInputFormat.java
@@ -15,7 +15,7 @@ package org.apache.hadoop.hive.ql.io.parquet;
import static org.mockito.Mockito.mock;
-import org.apache.hadoop.io.ArrayWritable;
+import org.apache.hadoop.hive.serde2.io.ObjectArrayWritable;
import org.junit.Test;
import parquet.hadoop.ParquetInputFormat;
@@ -30,7 +30,7 @@ public class TestMapredParquetInputFormat {
@Test
public void testConstructorWithParquetInputFormat() {
new MapredParquetInputFormat(
- (ParquetInputFormat<ArrayWritable>) mock(ParquetInputFormat.class)
+ (ParquetInputFormat<ObjectArrayWritable>) mock(ParquetInputFormat.class)
);
}
http://git-wip-us.apache.org/repos/asf/hive/blob/4157374d/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestParquetRowGroupFilter.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestParquetRowGroupFilter.java b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestParquetRowGroupFilter.java
index 4ccb207..5f58d4f 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestParquetRowGroupFilter.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestParquetRowGroupFilter.java
@@ -22,7 +22,7 @@ import com.google.common.collect.Lists;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.io.parquet.read.ParquetRecordReaderWrapper;
-import org.apache.hadoop.hive.ql.io.parquet.serde.ArrayWritableObjectInspector;
+import org.apache.hadoop.hive.ql.io.parquet.serde.ObjectArrayWritableObjectInspector;
import org.apache.hadoop.hive.ql.plan.*;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPGreaterThan;
@@ -119,12 +119,12 @@ public class TestParquetRowGroupFilter extends AbstractTestParquetDirect {
Assert.assertEquals("row group is not filtered correctly", 0, recordReader.getFiltedBlocks().size());
}
- private ArrayWritableObjectInspector getObjectInspector(final String columnNames, final String columnTypes) {
+ private ObjectArrayWritableObjectInspector getObjectInspector(final String columnNames, final String columnTypes) {
List<TypeInfo> columnTypeList = createHiveTypeInfoFrom(columnTypes);
List<String> columnNameList = createHiveColumnsFrom(columnNames);
StructTypeInfo rowTypeInfo = (StructTypeInfo) TypeInfoFactory.getStructTypeInfo(columnNameList, columnTypeList);
- return new ArrayWritableObjectInspector(rowTypeInfo);
+ return new ObjectArrayWritableObjectInspector(rowTypeInfo);
}
private List<String> createHiveColumnsFrom(final String columnNamesStr) {
http://git-wip-us.apache.org/repos/asf/hive/blob/4157374d/serde/src/java/org/apache/hadoop/hive/serde2/io/ObjectArrayWritable.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/io/ObjectArrayWritable.java b/serde/src/java/org/apache/hadoop/hive/serde2/io/ObjectArrayWritable.java
new file mode 100644
index 0000000..84faa3c
--- /dev/null
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/io/ObjectArrayWritable.java
@@ -0,0 +1,51 @@
+/**
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.serde2.io;
+
+import org.apache.hadoop.io.Writable;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+
+/**
+ * This class is a container of an array of objects of any type. It implements
+ * the Writable interface so that it can bypass generic objects read from InputFormat
+ * implementations up to Hive object inspectors. This class helps storage formats
+ * to avoid Writable objects allocation unnecessary as the only Writable class needed
+ * for map/reduce functions is this array of objects.
+ *
+ * This is the replacement for ArrayWritable class that contains only Writable objects.
+ */
+public class ObjectArrayWritable implements Writable {
+ private Object[] values;
+
+ public ObjectArrayWritable(final Object[] values) {
+ this.values = values;
+ }
+
+ public Object[] get() {
+ return values;
+ }
+
+ @Override
+ public void write(DataOutput dataOutput) throws IOException {
+ throw new UnsupportedOperationException("Unsupported method call.");
+ }
+
+ @Override
+ public void readFields(DataInput dataInput) throws IOException {
+ throw new UnsupportedOperationException("Unsupported method call.");
+ }
+}