You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by om...@apache.org on 2013/04/19 17:52:20 UTC
svn commit: r1469919 - in /hive/branches/branch-0.11: ./
ql/src/java/org/apache/hadoop/hive/ql/io/orc/
ql/src/test/queries/clientpositive/ ql/src/test/results/clientpositive/
Author: omalley
Date: Fri Apr 19 15:52:19 2013
New Revision: 1469919
URL: http://svn.apache.org/r1469919
Log:
HIVE-4178 : ORC fails with files with different numbers of columns (Kevin
Wilfong)
Added:
hive/branches/branch-0.11/ql/src/test/queries/clientpositive/orc_diff_part_cols.q
- copied unchanged from r1469908, hive/trunk/ql/src/test/queries/clientpositive/orc_diff_part_cols.q
hive/branches/branch-0.11/ql/src/test/queries/clientpositive/orc_empty_files.q
- copied unchanged from r1469908, hive/trunk/ql/src/test/queries/clientpositive/orc_empty_files.q
hive/branches/branch-0.11/ql/src/test/results/clientpositive/orc_diff_part_cols.q.out
- copied unchanged from r1469908, hive/trunk/ql/src/test/results/clientpositive/orc_diff_part_cols.q.out
hive/branches/branch-0.11/ql/src/test/results/clientpositive/orc_empty_files.q.out
- copied unchanged from r1469908, hive/trunk/ql/src/test/results/clientpositive/orc_empty_files.q.out
Modified:
hive/branches/branch-0.11/ (props changed)
hive/branches/branch-0.11/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcStruct.java
hive/branches/branch-0.11/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java
Propchange: hive/branches/branch-0.11/
------------------------------------------------------------------------------
svn:mergeinfo = /hive/trunk:1469908
Modified: hive/branches/branch-0.11/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcStruct.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.11/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcStruct.java?rev=1469919&r1=1469918&r2=1469919&view=diff
==============================================================================
--- hive/branches/branch-0.11/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcStruct.java (original)
+++ hive/branches/branch-0.11/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcStruct.java Fri Apr 19 15:52:19 2013
@@ -17,6 +17,13 @@
*/
package org.apache.hadoop.hive.ql.io.orc;
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -31,16 +38,9 @@ import org.apache.hadoop.hive.serde2.typ
import org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo;
import org.apache.hadoop.io.Writable;
-import java.io.DataInput;
-import java.io.DataOutput;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-
final class OrcStruct implements Writable {
- private final Object[] fields;
+ private Object[] fields;
OrcStruct(int children) {
fields = new Object[children];
@@ -54,6 +54,14 @@ final class OrcStruct implements Writabl
fields[fieldIndex] = value;
}
+ public int getNumFields() {
+ return fields.length;
+ }
+
+ public void setNumFields(int numFields) {
+ fields = new Object[numFields];
+ }
+
@Override
public void write(DataOutput dataOutput) throws IOException {
throw new UnsupportedOperationException("write unsupported");
Modified: hive/branches/branch-0.11/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.11/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java?rev=1469919&r1=1469918&r2=1469919&view=diff
==============================================================================
--- hive/branches/branch-0.11/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java (original)
+++ hive/branches/branch-0.11/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java Fri Apr 19 15:52:19 2013
@@ -733,8 +733,9 @@ class RecordReaderImpl implements Record
} else {
length = dictionaryBuffer.size() - offset;
}
- // If the column is just empty strings, the size will be zero, so the buffer will be null,
- // in that case just return result as it will default to empty
+ // If the column is just empty strings, the size will be zero,
+ // so the buffer will be null, in that case just return result
+ // as it will default to empty
if (dictionaryBuffer != null) {
dictionaryBuffer.setText(result, offset, length);
} else {
@@ -788,6 +789,13 @@ class RecordReaderImpl implements Record
result = new OrcStruct(fields.length);
} else {
result = (OrcStruct) previous;
+
+ // If the input format was initialized with a file with a
+ // different number of fields, the number of fields needs to
+ // be updated to the correct number
+ if (result.getNumFields() != fields.length) {
+ result.setNumFields(fields.length);
+ }
}
for(int i=0; i < fields.length; ++i) {
if (fields[i] != null) {