You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2014/09/02 18:30:34 UTC
svn commit: r1622060 [2/3] - in /hive/trunk:
beeline/src/java/org/apache/hive/beeline/
common/src/java/org/apache/hadoop/hive/common/
common/src/java/org/apache/hadoop/hive/conf/
hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/ hc...
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java?rev=1622060&r1=1622059&r2=1622060&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java Tue Sep 2 16:30:33 2014
@@ -82,7 +82,7 @@ public final class VectorExpressionWrite
this.objectInspector = objectInspector;
return this;
}
-
+
/**
* The base implementation must be overridden by the Long specialization
*/
@@ -90,7 +90,7 @@ public final class VectorExpressionWrite
public Object writeValue(long value) throws HiveException {
throw new HiveException("Internal error: should not reach here");
}
-
+
/**
* The base implementation must be overridden by the Long specialization
*/
@@ -112,7 +112,7 @@ public final class VectorExpressionWrite
public Object setValue(Object field, double value) throws HiveException {
throw new HiveException("Internal error: should not reach here");
}
-
+
/**
* The base implementation must be overridden by the Bytes specialization
*/
@@ -120,7 +120,7 @@ public final class VectorExpressionWrite
public Object writeValue(byte[] value, int start, int length) throws HiveException {
throw new HiveException("Internal error: should not reach here");
}
-
+
/**
* The base implementation must be overridden by the Bytes specialization
*/
@@ -171,7 +171,7 @@ public final class VectorExpressionWrite
"Incorrect null/repeating: row:%d noNulls:%b isRepeating:%b isNull[row]:%b isNull[0]:%b",
row, lcv.noNulls, lcv.isRepeating, lcv.isNull[row], lcv.isNull[0]));
}
-
+
@Override
public Object setValue(Object field, ColumnVector column, int row) throws HiveException {
LongColumnVector lcv = (LongColumnVector) column;
@@ -192,7 +192,7 @@ public final class VectorExpressionWrite
String.format(
"Incorrect null/repeating: row:%d noNulls:%b isRepeating:%b isNull[row]:%b isNull[0]:%b",
row, lcv.noNulls, lcv.isRepeating, lcv.isNull[row], lcv.isNull[0]));
- }
+ }
}
/**
@@ -221,7 +221,7 @@ public final class VectorExpressionWrite
"Incorrect null/repeating: row:%d noNulls:%b isRepeating:%b isNull[row]:%b isNull[0]:%b",
row, dcv.noNulls, dcv.isRepeating, dcv.isNull[row], dcv.isNull[0]));
}
-
+
@Override
public Object setValue(Object field, ColumnVector column, int row) throws HiveException {
DoubleColumnVector dcv = (DoubleColumnVector) column;
@@ -242,7 +242,7 @@ public final class VectorExpressionWrite
String.format(
"Incorrect null/repeating: row:%d noNulls:%b isRepeating:%b isNull[row]:%b isNull[0]:%b",
row, dcv.noNulls, dcv.isRepeating, dcv.isNull[row], dcv.isNull[0]));
- }
+ }
}
/**
@@ -292,7 +292,7 @@ public final class VectorExpressionWrite
String.format(
"Incorrect null/repeating: row:%d noNulls:%b isRepeating:%b isNull[row]:%b isNull[0]:%b",
row, bcv.noNulls, bcv.isRepeating, bcv.isNull[row], bcv.isNull[0]));
- }
+ }
}
@@ -396,7 +396,7 @@ public final class VectorExpressionWrite
(SettableLongObjectInspector) fieldObjInspector);
case VOID:
return genVectorExpressionWritableVoid(
- (VoidObjectInspector) fieldObjInspector);
+ (VoidObjectInspector) fieldObjInspector);
case BINARY:
return genVectorExpressionWritableBinary(
(SettableBinaryObjectInspector) fieldObjInspector);
@@ -419,7 +419,7 @@ public final class VectorExpressionWrite
throw new IllegalArgumentException("Unknown primitive type: " +
((PrimitiveObjectInspector) fieldObjInspector).getPrimitiveCategory());
}
-
+
case STRUCT:
case UNION:
case MAP:
@@ -428,7 +428,7 @@ public final class VectorExpressionWrite
fieldObjInspector.getCategory());
default:
throw new IllegalArgumentException("Unknown type " +
- fieldObjInspector.getCategory());
+ fieldObjInspector.getCategory());
}
}
@@ -526,7 +526,7 @@ public final class VectorExpressionWrite
private Object obj;
private Timestamp ts;
- public VectorExpressionWriter init(SettableTimestampObjectInspector objInspector)
+ public VectorExpressionWriter init(SettableTimestampObjectInspector objInspector)
throws HiveException {
super.init(objInspector);
ts = new Timestamp(0);
@@ -550,7 +550,7 @@ public final class VectorExpressionWrite
((SettableTimestampObjectInspector) this.objectInspector).set(field, ts);
return field;
}
-
+
@Override
public Object initValue(Object ignored) {
return ((SettableTimestampObjectInspector) this.objectInspector).create(new Timestamp(0));
@@ -563,15 +563,15 @@ public final class VectorExpressionWrite
return new VectorExpressionWriterBytes() {
private Object obj;
private Text text;
-
- public VectorExpressionWriter init(SettableHiveVarcharObjectInspector objInspector)
+
+ public VectorExpressionWriter init(SettableHiveVarcharObjectInspector objInspector)
throws HiveException {
super.init(objInspector);
this.text = new Text();
this.obj = initValue(null);
return this;
}
-
+
@Override
public Object writeValue(byte[] value, int start, int length) throws HiveException {
text.set(value, start, length);
@@ -580,7 +580,7 @@ public final class VectorExpressionWrite
}
@Override
- public Object setValue(Object field, byte[] value, int start, int length)
+ public Object setValue(Object field, byte[] value, int start, int length)
throws HiveException {
if (null == field) {
field = initValue(null);
@@ -589,7 +589,7 @@ public final class VectorExpressionWrite
((SettableHiveVarcharObjectInspector) this.objectInspector).set(field, text.toString());
return field;
}
-
+
@Override
public Object initValue(Object ignored) {
return ((SettableHiveVarcharObjectInspector) this.objectInspector)
@@ -603,24 +603,24 @@ public final class VectorExpressionWrite
return new VectorExpressionWriterBytes() {
private Object obj;
private Text text;
-
- public VectorExpressionWriter init(SettableStringObjectInspector objInspector)
+
+ public VectorExpressionWriter init(SettableStringObjectInspector objInspector)
throws HiveException {
super.init(objInspector);
this.text = new Text();
this.obj = initValue(null);
return this;
}
-
+
@Override
public Object writeValue(byte[] value, int start, int length) throws HiveException {
this.text.set(value, start, length);
((SettableStringObjectInspector) this.objectInspector).set(this.obj, this.text.toString());
return this.obj;
}
-
+
@Override
- public Object setValue(Object field, byte[] value, int start, int length)
+ public Object setValue(Object field, byte[] value, int start, int length)
throws HiveException {
if (null == field) {
field = initValue(null);
@@ -628,12 +628,12 @@ public final class VectorExpressionWrite
this.text.set(value, start, length);
((SettableStringObjectInspector) this.objectInspector).set(field, this.text.toString());
return field;
- }
-
+ }
+
@Override
public Object initValue(Object ignored) {
return ((SettableStringObjectInspector) this.objectInspector).create(StringUtils.EMPTY);
- }
+ }
}.init(fieldObjInspector);
}
@@ -642,22 +642,22 @@ public final class VectorExpressionWrite
return new VectorExpressionWriterBytes() {
private Object obj;
private byte[] bytes;
-
- public VectorExpressionWriter init(SettableBinaryObjectInspector objInspector)
+
+ public VectorExpressionWriter init(SettableBinaryObjectInspector objInspector)
throws HiveException {
super.init(objInspector);
this.bytes = ArrayUtils.EMPTY_BYTE_ARRAY;
this.obj = initValue(null);
return this;
}
-
+
@Override
public Object writeValue(byte[] value, int start, int length) throws HiveException {
bytes = Arrays.copyOfRange(value, start, start + length);
((SettableBinaryObjectInspector) this.objectInspector).set(this.obj, bytes);
return this.obj;
}
-
+
@Override
public Object setValue(Object field, byte[] value, int start, int length) throws HiveException {
if (null == field) {
@@ -666,7 +666,7 @@ public final class VectorExpressionWrite
bytes = Arrays.copyOfRange(value, start, start + length);
((SettableBinaryObjectInspector) this.objectInspector).set(field, bytes);
return field;
- }
+ }
@Override
public Object initValue(Object ignored) {
@@ -680,20 +680,20 @@ public final class VectorExpressionWrite
SettableLongObjectInspector fieldObjInspector) throws HiveException {
return new VectorExpressionWriterLong() {
private Object obj;
-
- public VectorExpressionWriter init(SettableLongObjectInspector objInspector)
+
+ public VectorExpressionWriter init(SettableLongObjectInspector objInspector)
throws HiveException {
super.init(objInspector);
this.obj = initValue(null);
return this;
}
-
+
@Override
public Object writeValue(long value) throws HiveException {
((SettableLongObjectInspector) this.objectInspector).set(this.obj, value);
return this.obj;
}
-
+
@Override
public Object setValue(Object field, long value) throws HiveException {
if (null == field) {
@@ -712,56 +712,55 @@ public final class VectorExpressionWrite
}
private static VectorExpressionWriter genVectorExpressionWritableVoid(
- VoidObjectInspector fieldObjInspector) throws HiveException {
- return new VectorExpressionWriterLong() {
- private Object obj;
-
- public VectorExpressionWriter init(VoidObjectInspector objInspector)
- throws HiveException {
- super.init(objInspector);
- this.obj = initValue(null);
- return this;
- }
-
- @Override
- public Object writeValue(long value) throws HiveException {
- return this.obj;
- }
-
- @Override
- public Object setValue(Object field, long value) throws HiveException {
- if (null == field) {
- field = initValue(null);
- }
- return field;
- }
-
- @Override
- public Object initValue(Object ignored) {
- return ((VoidObjectInspector) this.objectInspector).copyObject(null);
- }
- }.init(fieldObjInspector);
- }
-
-
+ VoidObjectInspector fieldObjInspector) throws HiveException {
+ return new VectorExpressionWriterLong() {
+ private Object obj;
+
+ public VectorExpressionWriter init(VoidObjectInspector objInspector) throws HiveException {
+ super.init(objInspector);
+ this.obj = initValue(null);
+ return this;
+ }
+
+ @Override
+ public Object writeValue(long value) throws HiveException {
+ return this.obj;
+ }
+
+ @Override
+ public Object setValue(Object field, long value) throws HiveException {
+ if (null == field) {
+ field = initValue(null);
+ }
+ return field;
+ }
+
+ @Override
+ public Object initValue(Object ignored) {
+ return ((VoidObjectInspector) this.objectInspector).copyObject(null);
+ }
+ }.init(fieldObjInspector);
+ }
+
+
private static VectorExpressionWriter genVectorExpressionWritableInt(
SettableIntObjectInspector fieldObjInspector) throws HiveException {
return new VectorExpressionWriterLong() {
private Object obj;
-
- public VectorExpressionWriter init(SettableIntObjectInspector objInspector)
+
+ public VectorExpressionWriter init(SettableIntObjectInspector objInspector)
throws HiveException {
super.init(objInspector);
this.obj = initValue(null);
return this;
}
-
+
@Override
public Object writeValue(long value) throws HiveException {
((SettableIntObjectInspector) this.objectInspector).set(this.obj, (int) value);
return this.obj;
}
-
+
@Override
public Object setValue(Object field, long value) throws HiveException {
if (null == field) {
@@ -770,7 +769,7 @@ public final class VectorExpressionWrite
((SettableIntObjectInspector) this.objectInspector).set(field, (int) value);
return field;
}
-
+
@Override
public Object initValue(Object ignored) {
return ((SettableIntObjectInspector) this.objectInspector)
@@ -783,20 +782,20 @@ public final class VectorExpressionWrite
SettableShortObjectInspector fieldObjInspector) throws HiveException {
return new VectorExpressionWriterLong() {
private Object obj;
-
- public VectorExpressionWriter init(SettableShortObjectInspector objInspector)
+
+ public VectorExpressionWriter init(SettableShortObjectInspector objInspector)
throws HiveException {
super.init(objInspector);
this.obj = initValue(null);
return this;
}
-
+
@Override
public Object writeValue(long value) throws HiveException {
((SettableShortObjectInspector) this.objectInspector).set(this.obj, (short) value);
return this.obj;
}
-
+
@Override
public Object setValue(Object field, long value) throws HiveException {
if (null == field) {
@@ -805,7 +804,7 @@ public final class VectorExpressionWrite
((SettableShortObjectInspector) this.objectInspector).set(field, (short) value);
return field;
}
-
+
@Override
public Object initValue(Object ignored) {
return ((SettableShortObjectInspector) this.objectInspector)
@@ -818,20 +817,20 @@ public final class VectorExpressionWrite
SettableByteObjectInspector fieldObjInspector) throws HiveException {
return new VectorExpressionWriterLong() {
private Object obj;
-
- public VectorExpressionWriter init(SettableByteObjectInspector objInspector)
+
+ public VectorExpressionWriter init(SettableByteObjectInspector objInspector)
throws HiveException {
super.init(objInspector);
this.obj = initValue(null);
return this;
}
-
+
@Override
public Object writeValue(long value) throws HiveException {
((SettableByteObjectInspector) this.objectInspector).set(this.obj, (byte) value);
return this.obj;
}
-
+
@Override
public Object setValue(Object field, long value) throws HiveException {
if (null == field) {
@@ -840,7 +839,7 @@ public final class VectorExpressionWrite
((SettableByteObjectInspector) this.objectInspector).set(field, (byte) value);
return field;
}
-
+
@Override
public Object initValue(Object ignored) {
return ((SettableByteObjectInspector) this.objectInspector)
@@ -853,31 +852,31 @@ public final class VectorExpressionWrite
SettableBooleanObjectInspector fieldObjInspector) throws HiveException {
return new VectorExpressionWriterLong() {
private Object obj;
-
- public VectorExpressionWriter init(SettableBooleanObjectInspector objInspector)
+
+ public VectorExpressionWriter init(SettableBooleanObjectInspector objInspector)
throws HiveException {
super.init(objInspector);
this.obj = initValue(null);
return this;
}
-
+
@Override
public Object writeValue(long value) throws HiveException {
- ((SettableBooleanObjectInspector) this.objectInspector).set(this.obj,
+ ((SettableBooleanObjectInspector) this.objectInspector).set(this.obj,
value == 0 ? false : true);
return this.obj;
}
-
+
@Override
public Object setValue(Object field, long value) throws HiveException {
if (null == field) {
field = initValue(null);
}
- ((SettableBooleanObjectInspector) this.objectInspector).set(field,
+ ((SettableBooleanObjectInspector) this.objectInspector).set(field,
value == 0 ? false : true);
return field;
}
-
+
@Override
public Object initValue(Object ignored) {
return ((SettableBooleanObjectInspector) this.objectInspector)
@@ -890,20 +889,20 @@ public final class VectorExpressionWrite
SettableDoubleObjectInspector fieldObjInspector) throws HiveException {
return new VectorExpressionWriterDouble() {
private Object obj;
-
- public VectorExpressionWriter init(SettableDoubleObjectInspector objInspector)
+
+ public VectorExpressionWriter init(SettableDoubleObjectInspector objInspector)
throws HiveException {
super.init(objInspector);
this.obj = initValue(null);
return this;
}
-
+
@Override
public Object writeValue(double value) throws HiveException {
((SettableDoubleObjectInspector) this.objectInspector).set(this.obj, value);
return this.obj;
}
-
+
@Override
public Object setValue(Object field, double value) throws HiveException {
if (null == field) {
@@ -911,8 +910,8 @@ public final class VectorExpressionWrite
}
((SettableDoubleObjectInspector) this.objectInspector).set(field, value);
return field;
- }
-
+ }
+
@Override
public Object initValue(Object ignored) {
return ((SettableDoubleObjectInspector) this.objectInspector)
@@ -925,20 +924,20 @@ public final class VectorExpressionWrite
SettableFloatObjectInspector fieldObjInspector) throws HiveException {
return new VectorExpressionWriterDouble() {
private Object obj;
-
- public VectorExpressionWriter init(SettableFloatObjectInspector objInspector)
+
+ public VectorExpressionWriter init(SettableFloatObjectInspector objInspector)
throws HiveException {
super.init(objInspector);
this.obj = initValue(null);
return this;
}
-
+
@Override
public Object writeValue(double value) throws HiveException {
((SettableFloatObjectInspector) this.objectInspector).set(this.obj, (float) value);
return this.obj;
}
-
+
@Override
public Object setValue(Object field, double value) throws HiveException {
if (null == field) {
@@ -947,7 +946,7 @@ public final class VectorExpressionWrite
((SettableFloatObjectInspector) this.objectInspector).set(field, (float) value);
return field;
}
-
+
@Override
public Object initValue(Object ignored) {
return ((SettableFloatObjectInspector) this.objectInspector)
@@ -1027,25 +1026,25 @@ public final class VectorExpressionWrite
*/
public static VectorExpressionWriter[] getExpressionWriters(StructObjectInspector objInspector)
throws HiveException {
-
+
if (objInspector.isSettable()) {
return getSettableExpressionWriters((SettableStructObjectInspector) objInspector);
}
-
+
List<? extends StructField> allFieldRefs = objInspector.getAllStructFieldRefs();
-
+
VectorExpressionWriter[] expressionWriters = new VectorExpressionWriter[allFieldRefs.size()];
-
+
for(int i=0; i<expressionWriters.length; ++i) {
expressionWriters[i] = genVectorExpressionWritable(allFieldRefs.get(i).getFieldObjectInspector());
}
-
+
return expressionWriters;
}
public static VectorExpressionWriter[] getSettableExpressionWriters(
SettableStructObjectInspector objInspector) throws HiveException {
- List<? extends StructField> fieldsRef = objInspector.getAllStructFieldRefs();
+ List<? extends StructField> fieldsRef = objInspector.getAllStructFieldRefs();
VectorExpressionWriter[] writers = new VectorExpressionWriter[fieldsRef.size()];
for(int i=0; i<writers.length; ++i) {
StructField fieldRef = fieldsRef.get(i);
@@ -1054,19 +1053,19 @@ public final class VectorExpressionWrite
writers[i] = genVectorExpressionWritable(objInspector, fieldRef, baseWriter);
}
return writers;
-
+
}
-
+
/**
- * VectorExpressionWriterSetter helper for vector expression writers that use
+ * VectorExpressionWriterSetter helper for vector expression writers that use
* settable ObjectInspector fields to assign the values.
- * This is used by the OrcStruct serialization (eg. CREATE TABLE ... AS ...)
+ * This is used by the OrcStruct serialization (eg. CREATE TABLE ... AS ...)
*/
private static class VectorExpressionWriterSetter extends VectorExpressionWriterBase {
private SettableStructObjectInspector settableObjInspector;
private StructField fieldRef;
private VectorExpressionWriter baseWriter;
-
+
public VectorExpressionWriterSetter init(
SettableStructObjectInspector objInspector,
StructField fieldRef,
@@ -1087,15 +1086,15 @@ public final class VectorExpressionWrite
@Override
public Object setValue(Object row, ColumnVector column, int columnRow)
throws HiveException {
-
+
// NULLs are handled by each individual base writer setter
// We could handle NULLs centrally here but that would result in spurious allocs
-
+
Object fieldValue = this.settableObjInspector.getStructFieldData(row, fieldRef);
fieldValue = baseWriter.setValue(fieldValue, column, columnRow);
return this.settableObjInspector.setStructFieldData(row, fieldRef, fieldValue);
}
-
+
@Override
public Object initValue(Object struct) throws HiveException {
Object initValue = this.baseWriter.initValue(null);
@@ -1103,7 +1102,7 @@ public final class VectorExpressionWrite
return struct;
}
}
-
+
private static VectorExpressionWriter genVectorExpressionWritable(
SettableStructObjectInspector objInspector,
StructField fieldRef,
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveIgnoreKeyTextOutputFormat.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveIgnoreKeyTextOutputFormat.java?rev=1622060&r1=1622059&r2=1622060&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveIgnoreKeyTextOutputFormat.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveIgnoreKeyTextOutputFormat.java Tue Sep 2 16:30:33 2014
@@ -78,7 +78,7 @@ public class HiveIgnoreKeyTextOutputForm
final int finalRowSeparator = rowSeparator;
FileSystem fs = outPath.getFileSystem(jc);
final OutputStream outStream = Utilities.createCompressedStream(jc,
- fs.create(outPath, progress), isCompressed);
+ fs.create(outPath, progress), isCompressed);
return new RecordWriter() {
@Override
public void write(Writable r) throws IOException {
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveNullValueSequenceFileOutputFormat.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveNullValueSequenceFileOutputFormat.java?rev=1622060&r1=1622059&r2=1622060&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveNullValueSequenceFileOutputFormat.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveNullValueSequenceFileOutputFormat.java Tue Sep 2 16:30:33 2014
@@ -54,7 +54,7 @@ public class HiveNullValueSequenceFileOu
FileSystem fs = finalOutPath.getFileSystem(jc);
final SequenceFile.Writer outStream = Utilities.createSequenceWriter(jc, fs, finalOutPath,
- HiveKey.class, NullWritable.class, isCompressed, progress);
+ HiveKey.class, NullWritable.class, isCompressed, progress);
keyWritable = new HiveKey();
keyIsText = valueClass.equals(Text.class);
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveSequenceFileOutputFormat.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveSequenceFileOutputFormat.java?rev=1622060&r1=1622059&r2=1622060&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveSequenceFileOutputFormat.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveSequenceFileOutputFormat.java Tue Sep 2 16:30:33 2014
@@ -62,7 +62,7 @@ public class HiveSequenceFileOutputForma
FileSystem fs = finalOutPath.getFileSystem(jc);
final SequenceFile.Writer outStream = Utilities.createSequenceWriter(jc, fs, finalOutPath,
- BytesWritable.class, valueClass, isCompressed, progress);
+ BytesWritable.class, valueClass, isCompressed, progress);
return new RecordWriter() {
@Override
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/RCFileOutputFormat.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/RCFileOutputFormat.java?rev=1622060&r1=1622059&r2=1622060&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/RCFileOutputFormat.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/RCFileOutputFormat.java Tue Sep 2 16:30:33 2014
@@ -133,7 +133,7 @@ public class RCFileOutputFormat extends
RCFileOutputFormat.setColumnNumber(jc, cols.length);
final RCFile.Writer outWriter = Utilities.createRCFileWriter(jc,
- finalOutPath.getFileSystem(jc), finalOutPath, isCompressed, progress);
+ finalOutPath.getFileSystem(jc), finalOutPath, isCompressed, progress);
return new org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter() {
@Override
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/RCFileRecordReader.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/RCFileRecordReader.java?rev=1622060&r1=1622059&r2=1622060&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/RCFileRecordReader.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/RCFileRecordReader.java Tue Sep 2 16:30:33 2014
@@ -64,7 +64,7 @@ public class RCFileRecordReader<K extend
private final Map<String, RCFileSyncEntry> cache;
public RCFileSyncCache() {
- cache = Collections.synchronizedMap(new WeakHashMap<String, RCFileSyncEntry>());
+ cache = Collections.synchronizedMap(new WeakHashMap<String, RCFileSyncEntry>());
}
public void put(FileSplit split, long endSync) {
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java?rev=1622060&r1=1622059&r2=1622060&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java Tue Sep 2 16:30:33 2014
@@ -910,31 +910,31 @@ public class OrcInputFormat implements
static List<OrcSplit> generateSplitsInfo(Configuration conf)
throws IOException {
- // use threads to resolve directories into splits
- Context context = new Context(conf);
- for(Path dir: getInputPaths(conf)) {
- FileSystem fs = dir.getFileSystem(conf);
- context.schedule(new FileGenerator(context, fs, dir));
- }
- context.waitForTasks();
- // deal with exceptions
- if (!context.errors.isEmpty()) {
- List<IOException> errors =
- new ArrayList<IOException>(context.errors.size());
- for(Throwable th: context.errors) {
- if (th instanceof IOException) {
- errors.add((IOException) th);
- } else {
- throw new RuntimeException("serious problem", th);
- }
- }
- throw new InvalidInputException(errors);
- }
+ // use threads to resolve directories into splits
+ Context context = new Context(conf);
+ for(Path dir: getInputPaths(conf)) {
+ FileSystem fs = dir.getFileSystem(conf);
+ context.schedule(new FileGenerator(context, fs, dir));
+ }
+ context.waitForTasks();
+ // deal with exceptions
+ if (!context.errors.isEmpty()) {
+ List<IOException> errors =
+ new ArrayList<IOException>(context.errors.size());
+ for(Throwable th: context.errors) {
+ if (th instanceof IOException) {
+ errors.add((IOException) th);
+ } else {
+ throw new RuntimeException("serious problem", th);
+ }
+ }
+ throw new InvalidInputException(errors);
+ }
if (context.cacheStripeDetails) {
LOG.info("FooterCacheHitRatio: " + context.cacheHitCounter.get() + "/"
+ context.numFilesCounter.get());
}
- return context.splits;
+ return context.splits;
}
@Override
@@ -998,14 +998,14 @@ public class OrcInputFormat implements
((FileSplit) inputSplit).getPath(),
OrcFile.readerOptions(conf)), conf, (FileSplit) inputSplit);
}
-
+
OrcSplit split = (OrcSplit) inputSplit;
reporter.setStatus(inputSplit.toString());
Options options = new Options(conf).reporter(reporter);
final RowReader<OrcStruct> inner = getReader(inputSplit, options);
-
-
+
+
/*Even though there are no delta files, we still need to produce row ids so that an
* UPDATE or DELETE statement would work on a table which didn't have any previous updates*/
if (split.isOriginal() && split.getDeltas().isEmpty()) {
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveSchemaConverter.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveSchemaConverter.java?rev=1622060&r1=1622059&r2=1622060&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveSchemaConverter.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveSchemaConverter.java Tue Sep 2 16:30:33 2014
@@ -95,7 +95,7 @@ public class HiveSchemaConverter {
int scale = decimalTypeInfo.scale();
int bytes = ParquetHiveSerDe.PRECISION_TO_BYTE_COUNT[prec - 1];
return Types.optional(PrimitiveTypeName.FIXED_LEN_BYTE_ARRAY).length(bytes).as(OriginalType.DECIMAL).
- scale(scale).precision(prec).named(name);
+ scale(scale).precision(prec).named(name);
} else if (typeInfo.equals(TypeInfoFactory.unknownTypeInfo)) {
throw new UnsupportedOperationException("Unknown type not implemented");
} else {
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/DataWritableReadSupport.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/DataWritableReadSupport.java?rev=1622060&r1=1622059&r2=1622060&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/DataWritableReadSupport.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/DataWritableReadSupport.java Tue Sep 2 16:30:33 2014
@@ -140,7 +140,7 @@ public class DataWritableReadSupport ext
throw new IllegalStateException(msg);
}
}
- }
+ }
}
requestedSchemaByUser = resolveSchemaAccess(new MessageType(fileSchema.getName(),
typeListWanted), fileSchema, configuration);
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java?rev=1622060&r1=1622059&r2=1622060&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java Tue Sep 2 16:30:33 2014
@@ -369,7 +369,7 @@ public final class ColumnPrunerProcFacto
return null;
}
cols = cols == null ? new ArrayList<String>() : cols;
-
+
cppCtx.getPrunedColLists().put((Operator<? extends OperatorDesc>) nd,
cols);
RowResolver inputRR = cppCtx.getOpToParseCtxMap().get(scanOp).getRowResolver();
@@ -479,13 +479,13 @@ public final class ColumnPrunerProcFacto
flags[index] = true;
colLists = Utilities.mergeUniqElems(colLists, valCols.get(index).getCols());
}
-
+
Collections.sort(colLists);
pruneReduceSinkOperator(flags, op, cppCtx);
cppCtx.getPrunedColLists().put(op, colLists);
return null;
}
-
+
// Reduce Sink contains the columns needed - no need to aggregate from
// children
for (ExprNodeDesc val : valCols) {
@@ -519,7 +519,7 @@ public final class ColumnPrunerProcFacto
if (cols == null) {
return null;
}
-
+
Map<String, ExprNodeDesc> colExprMap = op.getColumnExprMap();
// As columns go down the DAG, the LVJ will transform internal column
// names from something like 'key' to '_col0'. Because of this, we need
@@ -604,8 +604,8 @@ public final class ColumnPrunerProcFacto
Object... nodeOutputs) throws SemanticException {
SelectOperator op = (SelectOperator) nd;
ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx) ctx;
-
-
+
+
if (op.getChildOperators() != null) {
for (Operator<? extends OperatorDesc> child : op.getChildOperators()) {
// UDTF is not handled yet, so the parent SelectOp of UDTF should just assume
@@ -858,11 +858,11 @@ public final class ColumnPrunerProcFacto
if (inputSchema != null) {
ArrayList<ColumnInfo> rs = new ArrayList<ColumnInfo>();
ArrayList<ColumnInfo> inputCols = inputSchema.getSignature();
- for (ColumnInfo i: inputCols) {
+ for (ColumnInfo i: inputCols) {
if (cols.contains(i.getInternalName())) {
rs.add(i);
}
- }
+ }
op.getSchema().setSignature(rs);
}
}
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java?rev=1622060&r1=1622059&r2=1622060&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java Tue Sep 2 16:30:33 2014
@@ -4,9 +4,9 @@
* copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
@@ -100,7 +100,7 @@ public final class ConstantPropagateProc
/**
* Get ColumnInfo from column expression.
- *
+ *
* @param rr
* @param desc
* @return
@@ -139,7 +139,7 @@ public final class ConstantPropagateProc
/**
* Cast type from expression type to expected type ti.
- *
+ *
* @param desc constant expression
* @param ti expected type info
* @return cast constant, or null if the type cast failed.
@@ -189,10 +189,10 @@ public final class ConstantPropagateProc
/**
* Fold input expression desc.
- *
+ *
* If desc is a UDF and all parameters are constants, evaluate it. If desc is a column expression,
* find it from propagated constants, and if there is, replace it with constant.
- *
+ *
* @param desc folding expression
* @param constants current propagated constant map
* @param cppCtx
@@ -296,7 +296,7 @@ public final class ConstantPropagateProc
/**
* Propagate assignment expression, adding an entry into constant map constants.
- *
+ *
* @param udf expression UDF, currently only 2 UDFs are supported: '=' and 'is null'.
* @param newExprs child expressions (parameters).
* @param cppCtx
@@ -350,7 +350,7 @@ public final class ConstantPropagateProc
ExprNodeConstantDesc c = (ExprNodeConstantDesc) childExpr;
if (Boolean.TRUE.equals(c.getValue())) {
- // if true, prune it
+ // if true, prune it
return newExprs.get(Math.abs(i - 1));
} else {
@@ -384,7 +384,7 @@ public final class ConstantPropagateProc
/**
* Evaluate column, replace the deterministic columns with constants if possible
- *
+ *
* @param desc
* @param ctx
* @param op
@@ -435,7 +435,7 @@ public final class ConstantPropagateProc
/**
* Evaluate UDF
- *
+ *
* @param udf UDF object
* @param exprs
* @param oldExprs
@@ -512,7 +512,7 @@ public final class ConstantPropagateProc
/**
* Change operator row schema, replace column with constant if it is.
- *
+ *
* @param op
* @param constants
* @throws SemanticException
@@ -584,7 +584,7 @@ public final class ConstantPropagateProc
/**
* Factory method to get the ConstantPropagateFilterProc class.
- *
+ *
* @return ConstantPropagateFilterProc
*/
public static ConstantPropagateFilterProc getFilterProc() {
@@ -621,7 +621,7 @@ public final class ConstantPropagateProc
/**
* Factory method to get the ConstantPropagateGroupByProc class.
- *
+ *
* @return ConstantPropagateGroupByProc
*/
public static ConstantPropagateGroupByProc getGroupByProc() {
@@ -650,7 +650,7 @@ public final class ConstantPropagateProc
/**
* Factory method to get the ConstantPropagateDefaultProc class.
- *
+ *
* @return ConstantPropagateDefaultProc
*/
public static ConstantPropagateDefaultProc getDefaultProc() {
@@ -683,7 +683,7 @@ public final class ConstantPropagateProc
/**
* The Factory method to get the ConstantPropagateSelectProc class.
- *
+ *
* @return ConstantPropagateSelectProc
*/
public static ConstantPropagateSelectProc getSelectProc() {
@@ -877,7 +877,7 @@ public final class ConstantPropagateProc
return null;
}
- // Note: the following code (removing folded constants in exprs) is deeply coupled with
+ // Note: the following code (removing folded constants in exprs) is deeply coupled with
// ColumnPruner optimizer.
// Assuming ColumnPrunner will remove constant columns so we don't deal with output columns.
// Except one case that the join operator is followed by a redistribution (RS operator).
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/OpProcFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/OpProcFactory.java?rev=1622060&r1=1622059&r2=1622060&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/OpProcFactory.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/OpProcFactory.java Tue Sep 2 16:30:33 2014
@@ -353,14 +353,14 @@ public class OpProcFactory {
if (inpOp.getSchema() != null && inpOp.getSchema().getSignature() != null ) {
for(ColumnInfo ci : inpOp.getSchema().getSignature()) {
Dependency inp_dep = lctx.getIndex().getDependency(inpOp, ci);
- // The dependency can be null as some of the input cis may not have
- // been set in case of joins.
- if (inp_dep != null) {
- for(BaseColumnInfo bci : inp_dep.getBaseCols()) {
- new_type = LineageCtx.getNewDependencyType(inp_dep.getType(), new_type);
- tai_set.add(bci.getTabAlias());
- }
- }
+ // The dependency can be null as some of the input cis may not have
+ // been set in case of joins.
+ if (inp_dep != null) {
+ for(BaseColumnInfo bci : inp_dep.getBaseCols()) {
+ new_type = LineageCtx.getNewDependencyType(inp_dep.getType(), new_type);
+ tai_set.add(bci.getTabAlias());
+ }
+ }
}
}
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java?rev=1622060&r1=1622059&r2=1622060&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java Tue Sep 2 16:30:33 2014
@@ -66,26 +66,26 @@ public class QB {
private HashMap<String, WindowingSpec> destToWindowingSpec;
/*
- * If this QB represents a SubQuery predicate then this will point to the SubQuery object.
+ * If this QB represents a SubQuery predicate then this will point to the SubQuery object.
*/
private QBSubQuery subQueryPredicateDef;
-
- /*
- * used to give a unique name to each SubQuery QB Currently there can be at
- * most 2 SubQueries in a Query: 1 in the Where clause, and 1 in the Having
- * clause.
- */
- private int numSubQueryPredicates;
-
- /*
- * for now a top level QB can have 1 where clause SQ predicate.
- */
- private QBSubQuery whereClauseSubQueryPredicate;
-
+
+ /*
+ * used to give a unique name to each SubQuery QB Currently there can be at
+ * most 2 SubQueries in a Query: 1 in the Where clause, and 1 in the Having
+ * clause.
+ */
+ private int numSubQueryPredicates;
+
/*
* for now a top level QB can have 1 where clause SQ predicate.
*/
- private QBSubQuery havingClauseSubQueryPredicate;
+ private QBSubQuery whereClauseSubQueryPredicate;
+
+ /*
+ * for now a top level QB can have 1 where clause SQ predicate.
+ */
+ private QBSubQuery havingClauseSubQueryPredicate;
// results
@@ -341,28 +341,28 @@ public class QB {
protected QBSubQuery getSubQueryPredicateDef() {
return subQueryPredicateDef;
}
-
- protected int getNumSubQueryPredicates() {
- return numSubQueryPredicates;
- }
-
- protected int incrNumSubQueryPredicates() {
- return ++numSubQueryPredicates;
- }
-
- void setWhereClauseSubQueryPredicate(QBSubQuery sq) {
- whereClauseSubQueryPredicate = sq;
- }
-
- public QBSubQuery getWhereClauseSubQueryPredicate() {
- return whereClauseSubQueryPredicate;
- }
-
- void setHavingClauseSubQueryPredicate(QBSubQuery sq) {
+
+ protected int getNumSubQueryPredicates() {
+ return numSubQueryPredicates;
+ }
+
+ protected int incrNumSubQueryPredicates() {
+ return ++numSubQueryPredicates;
+ }
+
+ void setWhereClauseSubQueryPredicate(QBSubQuery sq) {
+ whereClauseSubQueryPredicate = sq;
+ }
+
+ public QBSubQuery getWhereClauseSubQueryPredicate() {
+ return whereClauseSubQueryPredicate;
+ }
+
+ void setHavingClauseSubQueryPredicate(QBSubQuery sq) {
havingClauseSubQueryPredicate = sq;
}
-
- public QBSubQuery getHavingClauseSubQueryPredicate() {
+
+ public QBSubQuery getHavingClauseSubQueryPredicate() {
return havingClauseSubQueryPredicate;
}
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBSubQuery.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBSubQuery.java?rev=1622060&r1=1622059&r2=1622060&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBSubQuery.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBSubQuery.java Tue Sep 2 16:30:33 2014
@@ -38,7 +38,7 @@ import org.apache.hadoop.hive.serde2.typ
import org.apache.hadoop.hive.ql.parse.SubQueryDiagnostic.QBSubQueryRewrite;
public class QBSubQuery implements ISubQueryJoinInfo {
-
+
public static enum SubQueryType {
EXISTS,
NOT_EXISTS,
@@ -149,16 +149,16 @@ public class QBSubQuery implements ISubQ
}
/*
- * This class captures the information about a
+ * This class captures the information about a
* conjunct in the where clause of the SubQuery.
* For a equality predicate it capture for each side:
* - the AST
* - the type of Expression (basically what columns are referenced)
- * - for Expressions that refer the parent it captures the
+ * - for Expressions that refer the parent it captures the
* parent's ColumnInfo. In case of outer Aggregation expressions
* we need this to introduce a new mapping in the OuterQuery
* RowResolver. A join condition must use qualified column references,
- * so we generate a new name for the aggr expression and use it in the
+ * so we generate a new name for the aggr expression and use it in the
* joining condition.
* For e.g.
* having exists ( select x from R2 where y = min(R1.z) )
@@ -174,8 +174,8 @@ public class QBSubQuery implements ISubQ
private final ColumnInfo leftOuterColInfo;
private final ColumnInfo rightOuterColInfo;
- Conjunct(ASTNode leftExpr,
- ASTNode rightExpr,
+ Conjunct(ASTNode leftExpr,
+ ASTNode rightExpr,
ExprType leftExprType,
ExprType rightExprType,
ColumnInfo leftOuterColInfo,
@@ -239,8 +239,8 @@ public class QBSubQuery implements ISubQ
Stack<Node> stack;
ConjunctAnalyzer(RowResolver parentQueryRR,
- boolean forHavingClause,
- String parentQueryNewAlias) {
+ boolean forHavingClause,
+ String parentQueryNewAlias) {
this.parentQueryRR = parentQueryRR;
defaultExprProcessor = new DefaultExprProcessor();
this.forHavingClause = forHavingClause;
@@ -260,13 +260,13 @@ public class QBSubQuery implements ISubQ
private ObjectPair<ExprType,ColumnInfo> analyzeExpr(ASTNode expr) {
ColumnInfo cInfo = null;
if ( forHavingClause ) {
- try {
- cInfo = parentQueryRR.getExpression(expr);
- if ( cInfo != null) {
- return ObjectPair.create(ExprType.REFERS_PARENT, cInfo);
- }
- } catch(SemanticException se) {
- }
+ try {
+ cInfo = parentQueryRR.getExpression(expr);
+ if ( cInfo != null) {
+ return ObjectPair.create(ExprType.REFERS_PARENT, cInfo);
+ }
+ } catch(SemanticException se) {
+ }
}
if ( expr.getType() == HiveParser.DOT) {
ASTNode dot = firstDot(expr);
@@ -308,12 +308,12 @@ public class QBSubQuery implements ISubQ
ObjectPair<ExprType,ColumnInfo> leftInfo = analyzeExpr(left);
ObjectPair<ExprType,ColumnInfo> rightInfo = analyzeExpr(right);
- return new Conjunct(left, right,
+ return new Conjunct(left, right,
leftInfo.getFirst(), rightInfo.getFirst(),
leftInfo.getSecond(), rightInfo.getSecond());
} else {
ObjectPair<ExprType,ColumnInfo> sqExprInfo = analyzeExpr(conjunct);
- return new Conjunct(conjunct, null,
+ return new Conjunct(conjunct, null,
sqExprInfo.getFirst(), null,
sqExprInfo.getSecond(), sqExprInfo.getSecond());
}
@@ -354,86 +354,86 @@ public class QBSubQuery implements ISubQ
}
/*
- * When transforming a Not In SubQuery we need to check for nulls in the
+ * When transforming a Not In SubQuery we need to check for nulls in the
* Joining expressions of the SubQuery. If there are nulls then the SubQuery always
- * return false. For more details see
+ * return false. For more details see
* https://issues.apache.org/jira/secure/attachment/12614003/SubQuerySpec.pdf
- *
+ *
* Basically, SQL semantics say that:
* - R1.A not in (null, 1, 2, ...)
- * is always false.
- * A 'not in' operator is equivalent to a '<> all'. Since a not equal check with null
+ * is always false.
+ * A 'not in' operator is equivalent to a '<> all'. Since a not equal check with null
* returns false, a not in predicate against aset with a 'null' value always returns false.
- *
+ *
* So for not in SubQuery predicates:
* - we join in a null count predicate.
* - And the joining condition is that the 'Null Count' query has a count of 0.
- *
+ *
*/
class NotInCheck implements ISubQueryJoinInfo {
-
+
private static final String CNT_ALIAS = "c1";
-
+
/*
* expressions in SubQ that are joined to the Outer Query.
*/
List<ASTNode> subQryCorrExprs;
-
+
/*
* row resolver of the SubQuery.
* Set by the SemanticAnalyzer after the Plan for the SubQuery is genned.
* This is neede in case the SubQuery select list contains a TOK_ALLCOLREF
*/
RowResolver sqRR;
-
+
NotInCheck() {
subQryCorrExprs = new ArrayList<ASTNode>();
}
-
+
void addCorrExpr(ASTNode corrExpr) {
subQryCorrExprs.add(corrExpr);
}
-
+
public ASTNode getSubQueryAST() {
ASTNode ast = SubQueryUtils.buildNotInNullCheckQuery(
- QBSubQuery.this.getSubQueryAST(),
- QBSubQuery.this.getAlias(),
- CNT_ALIAS,
+ QBSubQuery.this.getSubQueryAST(),
+ QBSubQuery.this.getAlias(),
+ CNT_ALIAS,
subQryCorrExprs,
sqRR);
SubQueryUtils.setOriginDeep(ast, QBSubQuery.this.originalSQASTOrigin);
return ast;
}
-
+
public String getAlias() {
return QBSubQuery.this.getAlias() + "_notin_nullcheck";
}
-
+
public JoinType getJoinType() {
return JoinType.LEFTSEMI;
}
-
+
public ASTNode getJoinConditionAST() {
- ASTNode ast =
+ ASTNode ast =
SubQueryUtils.buildNotInNullJoinCond(getAlias(), CNT_ALIAS);
SubQueryUtils.setOriginDeep(ast, QBSubQuery.this.originalSQASTOrigin);
return ast;
}
-
+
public QBSubQuery getSubQuery() {
return QBSubQuery.this;
}
-
+
public String getOuterQueryId() {
return QBSubQuery.this.getOuterQueryId();
}
-
+
void setSQRR(RowResolver sqRR) {
this.sqRR = sqRR;
}
-
+
}
-
+
private final String outerQueryId;
private final int sqIdx;
private final String alias;
@@ -455,11 +455,11 @@ public class QBSubQuery implements ISubQ
private int numOfCorrelationExprsAddedToSQSelect;
private boolean groupbyAddedToSQ;
-
+
private int numOuterCorrExprsForHaving;
-
+
private NotInCheck notInCheck;
-
+
private QBSubQueryRewrite subQueryDiagnostic;
public QBSubQuery(String outerQueryId,
@@ -483,11 +483,11 @@ public class QBSubQuery implements ISubQ
originalSQASTOrigin = new ASTNodeOrigin("SubQuery", alias, s, alias, originalSQAST);
numOfCorrelationExprsAddedToSQSelect = 0;
groupbyAddedToSQ = false;
-
+
if ( operator.getType() == SubQueryType.NOT_IN ) {
notInCheck = new NotInCheck();
}
-
+
subQueryDiagnostic = SubQueryDiagnostic.getRewrite(this, ctx.getTokenRewriteStream(), ctx);
}
@@ -500,18 +500,18 @@ public class QBSubQuery implements ISubQ
public SubQueryTypeDef getOperator() {
return operator;
}
-
+
public ASTNode getOriginalSubQueryASTForRewrite() {
return (operator.getType() == SubQueryType.NOT_EXISTS
- || operator.getType() == SubQueryType.NOT_IN ?
- (ASTNode) originalSQASTOrigin.getUsageNode().getParent() :
+ || operator.getType() == SubQueryType.NOT_IN ?
+ (ASTNode) originalSQASTOrigin.getUsageNode().getParent() :
originalSQASTOrigin.getUsageNode());
}
void validateAndRewriteAST(RowResolver outerQueryRR,
- boolean forHavingClause,
- String outerQueryAlias,
- Set<String> outerQryAliases) throws SemanticException {
+ boolean forHavingClause,
+ String outerQueryAlias,
+ Set<String> outerQryAliases) throws SemanticException {
ASTNode selectClause = (ASTNode) subQueryAST.getChild(1).getChild(1);
@@ -519,12 +519,12 @@ public class QBSubQuery implements ISubQ
if ( selectClause.getChild(0).getType() == HiveParser.TOK_HINTLIST ) {
selectExprStart = 1;
}
-
+
/*
* Restriction.16.s :: Correlated Expression in Outer Query must not contain
* unqualified column references.
*/
- if ( parentQueryExpression != null && !forHavingClause ) {
+ if ( parentQueryExpression != null && !forHavingClause ) {
ASTNode u = SubQueryUtils.hasUnQualifiedColumnReferences(parentQueryExpression);
if ( u != null ) {
subQueryAST.setOrigin(originalSQASTOrigin);
@@ -532,7 +532,7 @@ public class QBSubQuery implements ISubQ
u, "Correlating expression cannot contain unqualified column references."));
}
}
-
+
/*
* Restriction 17.s :: SubQuery cannot use the same table alias as one used in
* the Outer Query.
@@ -546,14 +546,14 @@ public class QBSubQuery implements ISubQ
}
if ( sharedAlias != null) {
ASTNode whereClause = SubQueryUtils.subQueryWhere(subQueryAST);
-
+
if ( whereClause != null ) {
ASTNode u = SubQueryUtils.hasUnQualifiedColumnReferences(whereClause);
if ( u != null ) {
subQueryAST.setOrigin(originalSQASTOrigin);
throw new SemanticException(ErrorMsg.UNSUPPORTED_SUBQUERY_EXPRESSION.getMsg(
u, "SubQuery cannot use the table alias: " + sharedAlias + "; " +
- "this is also an alias in the Outer Query and SubQuery contains a unqualified column reference"));
+ "this is also an alias in the Outer Query and SubQuery contains a unqualified column reference"));
}
}
}
@@ -641,25 +641,25 @@ public class QBSubQuery implements ISubQ
}
void buildJoinCondition(RowResolver outerQueryRR, RowResolver sqRR,
- boolean forHavingClause,
- String outerQueryAlias) throws SemanticException {
+ boolean forHavingClause,
+ String outerQueryAlias) throws SemanticException {
ASTNode parentQueryJoinCond = null;
if ( parentQueryExpression != null ) {
-
+
ColumnInfo outerQueryCol = null;
try {
outerQueryCol = outerQueryRR.getExpression(parentQueryExpression);
} catch(SemanticException se) {
}
-
+
parentQueryJoinCond = SubQueryUtils.buildOuterQryToSQJoinCond(
getOuterQueryExpression(),
alias,
sqRR);
-
+
if ( outerQueryCol != null ) {
- rewriteCorrConjunctForHaving(parentQueryJoinCond, true,
+ rewriteCorrConjunctForHaving(parentQueryJoinCond, true,
outerQueryAlias, outerQueryRR, outerQueryCol);
}
subQueryDiagnostic.addJoinCondition(parentQueryJoinCond, outerQueryCol != null, true);
@@ -682,10 +682,10 @@ public class QBSubQuery implements ISubQ
ASTNode updateOuterQueryFilter(ASTNode outerQryFilter) {
if (postJoinConditionAST == null ) {
return outerQryFilter;
- }
-
+ }
+
subQueryDiagnostic.addPostJoinCondition(postJoinConditionAST);
-
+
if ( outerQryFilter == null ) {
return postJoinConditionAST;
}
@@ -738,7 +738,7 @@ public class QBSubQuery implements ISubQ
* Additional things for Having clause:
* - A correlation predicate may refer to an aggregation expression.
* - This introduces 2 twists to the rewrite:
- * a. When analyzing equality predicates we need to analyze each side
+ * a. When analyzing equality predicates we need to analyze each side
* to see if it is an aggregation expression from the Outer Query.
* So for e.g. this is a valid correlation predicate:
* R2.x = min(R1.y)
@@ -748,12 +748,12 @@ public class QBSubQuery implements ISubQ
* to contain a qualified column references.
* We handle this by generating a new name for the aggregation expression,
* like R1._gby_sq_col_1 and adding this mapping to the Outer Query's
- * Row Resolver. Then we construct a joining predicate using this new
+ * Row Resolver. Then we construct a joining predicate using this new
* name; so in our e.g. the condition would be: R2.x = R1._gby_sq_col_1
*/
private void rewrite(RowResolver parentQueryRR,
- boolean forHavingClause,
- String outerQueryAlias) throws SemanticException {
+ boolean forHavingClause,
+ String outerQueryAlias) throws SemanticException {
ASTNode selectClause = (ASTNode) subQueryAST.getChild(1).getChild(1);
ASTNode whereClause = SubQueryUtils.subQueryWhere(subQueryAST);
@@ -766,7 +766,7 @@ public class QBSubQuery implements ISubQ
SubQueryUtils.extractConjuncts(searchCond, conjuncts);
ConjunctAnalyzer conjunctAnalyzer = new ConjunctAnalyzer(parentQueryRR,
- forHavingClause, outerQueryAlias);
+ forHavingClause, outerQueryAlias);
ASTNode sqNewSearchCond = null;
for(ASTNode conjunctAST : conjuncts) {
@@ -805,7 +805,7 @@ public class QBSubQuery implements ISubQ
corrCondLeftIsRewritten = true;
if ( forHavingClause && conjunct.getRightOuterColInfo() != null ) {
corrCondRightIsRewritten = true;
- rewriteCorrConjunctForHaving(conjunctAST, false, outerQueryAlias,
+ rewriteCorrConjunctForHaving(conjunctAST, false, outerQueryAlias,
parentQueryRR, conjunct.getRightOuterColInfo());
}
ASTNode joinPredciate = SubQueryUtils.alterCorrelatedPredicate(
@@ -829,7 +829,7 @@ public class QBSubQuery implements ISubQ
corrCondRightIsRewritten = true;
if ( forHavingClause && conjunct.getLeftOuterColInfo() != null ) {
corrCondLeftIsRewritten = true;
- rewriteCorrConjunctForHaving(conjunctAST, true, outerQueryAlias,
+ rewriteCorrConjunctForHaving(conjunctAST, true, outerQueryAlias,
parentQueryRR, conjunct.getLeftOuterColInfo());
}
ASTNode joinPredciate = SubQueryUtils.alterCorrelatedPredicate(
@@ -901,7 +901,7 @@ public class QBSubQuery implements ISubQ
for(ASTNode child : newChildren ) {
subQueryAST.addChild(child);
}
-
+
subQueryDiagnostic.setAddGroupByClause();
return groupBy;
@@ -927,26 +927,26 @@ public class QBSubQuery implements ISubQ
public int getNumOfCorrelationExprsAddedToSQSelect() {
return numOfCorrelationExprsAddedToSQSelect;
}
-
-
+
+
public QBSubQueryRewrite getDiagnostic() {
return subQueryDiagnostic;
}
-
+
public QBSubQuery getSubQuery() {
return this;
}
-
+
NotInCheck getNotInCheck() {
return notInCheck;
}
-
+
private void rewriteCorrConjunctForHaving(ASTNode conjunctASTNode,
boolean refersLeft,
String outerQueryAlias,
RowResolver outerQueryRR,
ColumnInfo outerQueryCol) {
-
+
String newColAlias = "_gby_sq_col_" + numOuterCorrExprsForHaving++;
ASTNode outerExprForCorr = SubQueryUtils.createColRefAST(outerQueryAlias, newColAlias);
if ( refersLeft ) {
@@ -956,5 +956,5 @@ public class QBSubQuery implements ISubQ
}
outerQueryRR.put(outerQueryAlias, newColAlias, outerQueryCol);
}
-
+
}
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java?rev=1622060&r1=1622059&r2=1622060&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java Tue Sep 2 16:30:33 2014
@@ -98,7 +98,7 @@ public class RowResolver implements Seri
public void put(String tab_alias, String col_alias, ColumnInfo colInfo) {
if (!addMappingOnly(tab_alias, col_alias, colInfo)) {
- rowSchema.getSignature().add(colInfo);
+ rowSchema.getSignature().add(colInfo);
}
}
@@ -289,7 +289,7 @@ public class RowResolver implements Seri
public boolean getIsExprResolver() {
return isExprResolver;
}
-
+
public String[] getAlternateMappings(String internalName) {
return altInvRslvMap.get(internalName);
}
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1622060&r1=1622059&r2=1622060&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Tue Sep 2 16:30:33 2014
@@ -2239,8 +2239,8 @@ public class SemanticAnalyzer extends Ba
String havingInputAlias = null;
if ( forHavingClause ) {
- havingInputAlias = "gby_sq" + sqIdx;
- aliasToOpInfo.put(havingInputAlias, input);
+ havingInputAlias = "gby_sq" + sqIdx;
+ aliasToOpInfo.put(havingInputAlias, input);
}
subQuery.validateAndRewriteAST(inputRR, forHavingClause, havingInputAlias, aliasToOpInfo.keySet());
@@ -11589,40 +11589,40 @@ public class SemanticAnalyzer extends Ba
}
private void addAlternateGByKeyMappings(ASTNode gByExpr, ColumnInfo colInfo,
- Operator<? extends OperatorDesc> reduceSinkOp, RowResolver gByRR) {
- if ( gByExpr.getType() == HiveParser.DOT
+ Operator<? extends OperatorDesc> reduceSinkOp, RowResolver gByRR) {
+ if ( gByExpr.getType() == HiveParser.DOT
&& gByExpr.getChild(0).getType() == HiveParser.TOK_TABLE_OR_COL ) {
- String tab_alias = BaseSemanticAnalyzer.unescapeIdentifier(gByExpr
- .getChild(0).getChild(0).getText());
- String col_alias = BaseSemanticAnalyzer.unescapeIdentifier(
- gByExpr.getChild(1).getText());
- gByRR.put(tab_alias, col_alias, colInfo);
- } else if ( gByExpr.getType() == HiveParser.TOK_TABLE_OR_COL ) {
- String col_alias = BaseSemanticAnalyzer.unescapeIdentifier(gByExpr
- .getChild(0).getText());
- String tab_alias = null;
- /*
- * If the input to the GBy has a tab alias for the column, then add an entry
- * based on that tab_alias.
- * For e.g. this query:
- * select b.x, count(*) from t1 b group by x
- * needs (tab_alias=b, col_alias=x) in the GBy RR.
- * tab_alias=b comes from looking at the RowResolver that is the ancestor
- * before any GBy/ReduceSinks added for the GBY operation.
- */
- Operator<? extends OperatorDesc> parent = reduceSinkOp;
- while ( parent instanceof ReduceSinkOperator ||
- parent instanceof GroupByOperator ) {
- parent = parent.getParentOperators().get(0);
- }
- RowResolver parentRR = opParseCtx.get(parent).getRowResolver();
- try {
- ColumnInfo pColInfo = parentRR.get(tab_alias, col_alias);
- tab_alias = pColInfo == null ? null : pColInfo.getTabAlias();
- } catch(SemanticException se) {
- }
- gByRR.put(tab_alias, col_alias, colInfo);
- }
+ String tab_alias = BaseSemanticAnalyzer.unescapeIdentifier(gByExpr
+ .getChild(0).getChild(0).getText());
+ String col_alias = BaseSemanticAnalyzer.unescapeIdentifier(
+ gByExpr.getChild(1).getText());
+ gByRR.put(tab_alias, col_alias, colInfo);
+ } else if ( gByExpr.getType() == HiveParser.TOK_TABLE_OR_COL ) {
+ String col_alias = BaseSemanticAnalyzer.unescapeIdentifier(gByExpr
+ .getChild(0).getText());
+ String tab_alias = null;
+ /*
+ * If the input to the GBy has a tab alias for the column, then add an entry
+ * based on that tab_alias.
+ * For e.g. this query:
+ * select b.x, count(*) from t1 b group by x
+ * needs (tab_alias=b, col_alias=x) in the GBy RR.
+ * tab_alias=b comes from looking at the RowResolver that is the ancestor
+ * before any GBy/ReduceSinks added for the GBY operation.
+ */
+ Operator<? extends OperatorDesc> parent = reduceSinkOp;
+ while ( parent instanceof ReduceSinkOperator ||
+ parent instanceof GroupByOperator ) {
+ parent = parent.getParentOperators().get(0);
+ }
+ RowResolver parentRR = opParseCtx.get(parent).getRowResolver();
+ try {
+ ColumnInfo pColInfo = parentRR.get(tab_alias, col_alias);
+ tab_alias = pColInfo == null ? null : pColInfo.getTabAlias();
+ } catch(SemanticException se) {
+ }
+ gByRR.put(tab_alias, col_alias, colInfo);
+ }
}
private WriteEntity.WriteType determineWriteType(LoadTableDesc ltd, boolean isNonNativeTable) {
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadFileDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadFileDesc.java?rev=1622060&r1=1622059&r2=1622060&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadFileDesc.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadFileDesc.java Tue Sep 2 16:30:33 2014
@@ -37,7 +37,7 @@ public class LoadFileDesc extends LoadDe
private String destinationCreateTable;
static {
- PTFUtils.makeTransient(LoadFileDesc.class, "targetDir");
+ PTFUtils.makeTransient(LoadFileDesc.class, "targetDir");
}
public LoadFileDesc() {
}
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadMultiFilesDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadMultiFilesDesc.java?rev=1622060&r1=1622059&r2=1622060&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadMultiFilesDesc.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadMultiFilesDesc.java Tue Sep 2 16:30:33 2014
@@ -38,7 +38,7 @@ public class LoadMultiFilesDesc implemen
private transient List<Path> srcDirs;
static {
- PTFUtils.makeTransient(LoadMultiFilesDesc.class, "targetDirs");
+ PTFUtils.makeTransient(LoadMultiFilesDesc.class, "targetDirs");
}
public LoadMultiFilesDesc() {
}
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsAggregator.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsAggregator.java?rev=1622060&r1=1622059&r2=1622060&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsAggregator.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsAggregator.java Tue Sep 2 16:30:33 2014
@@ -60,8 +60,8 @@ public class CounterStatsAggregator impl
@Override
public String aggregateStats(String counterGrpName, String statType) {
// In case of counters, aggregation is done by JobTracker / MR AM itself
- // so no need to aggregate, simply return the counter value for requested stat.
- return String.valueOf(counters.getGroup(counterGrpName).getCounter(statType));
+ // so no need to aggregate, simply return the counter value for requested stat.
+ return String.valueOf(counters.getGroup(counterGrpName).getCounter(statType));
}
@Override
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCumeDist.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCumeDist.java?rev=1622060&r1=1622059&r2=1622060&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCumeDist.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCumeDist.java Tue Sep 2 16:30:33 2014
@@ -34,43 +34,38 @@ import org.apache.hadoop.io.IntWritable;
@WindowFunctionDescription
(
- description = @Description(
- name = "cume_dist",
- value = "_FUNC_(x) - The CUME_DIST function (defined as the inverse of percentile in some " +
- "statistical books) computes the position of a specified value relative to a set of values. " +
- "To compute the CUME_DIST of a value x in a set S of size N, you use the formula: " +
- "CUME_DIST(x) = number of values in S coming before " +
- " and including x in the specified order/ N"
- ),
- supportsWindow = false,
- pivotResult = true,
- rankingFunction = true,
- impliesOrder = true
+ description = @Description(
+ name = "cume_dist",
+ value = "_FUNC_(x) - The CUME_DIST function (defined as the inverse of percentile in some " +
+ "statistical books) computes the position of a specified value relative to a set of values. " +
+ "To compute the CUME_DIST of a value x in a set S of size N, you use the formula: " +
+ "CUME_DIST(x) = number of values in S coming before " +
+ " and including x in the specified order/ N"
+ ),
+ supportsWindow = false,
+ pivotResult = true,
+ rankingFunction = true,
+ impliesOrder = true
)
-public class GenericUDAFCumeDist extends GenericUDAFRank
-{
+public class GenericUDAFCumeDist extends GenericUDAFRank {
- static final Log LOG = LogFactory.getLog(GenericUDAFCumeDist.class.getName());
+ static final Log LOG = LogFactory.getLog(GenericUDAFCumeDist.class.getName());
- @Override
- protected GenericUDAFAbstractRankEvaluator createEvaluator()
- {
- return new GenericUDAFCumeDistEvaluator();
- }
+ @Override
+ protected GenericUDAFAbstractRankEvaluator createEvaluator() {
+ return new GenericUDAFCumeDistEvaluator();
+ }
- public static class GenericUDAFCumeDistEvaluator extends GenericUDAFAbstractRankEvaluator
- {
+ public static class GenericUDAFCumeDistEvaluator extends GenericUDAFAbstractRankEvaluator {
@Override
- public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException
- {
+ public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException {
super.init(m, parameters);
return ObjectInspectorFactory
.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
}
@Override
- public Object terminate(AggregationBuffer agg) throws HiveException
- {
+ public Object terminate(AggregationBuffer agg) throws HiveException {
List<IntWritable> ranks = ((RankBuffer) agg).rowNums;
int ranksSize = ranks.size();
double ranksSizeDouble = ranksSize;
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFDenseRank.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFDenseRank.java?rev=1622060&r1=1622059&r2=1622060&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFDenseRank.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFDenseRank.java Tue Sep 2 16:30:33 2014
@@ -23,41 +23,38 @@ import org.apache.commons.logging.LogFac
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.WindowFunctionDescription;
-@WindowFunctionDescription
-(
- description = @Description(
- name = "dense_rank",
- value = "_FUNC_(x) The difference between RANK and DENSE_RANK is that DENSE_RANK leaves no " +
- "gaps in ranking sequence when there are ties. That is, if you were " +
- "ranking a competition using DENSE_RANK and had three people tie for " +
- "second place, you would say that all three were in second place and " +
- "that the next person came in third."
- ),
- supportsWindow = false,
- pivotResult = true,
- rankingFunction = true,
- impliesOrder = true
+@WindowFunctionDescription(
+ description = @Description(
+ name = "dense_rank",
+ value = "_FUNC_(x) The difference between RANK and DENSE_RANK is that DENSE_RANK leaves no " +
+ "gaps in ranking sequence when there are ties. That is, if you were " +
+ "ranking a competition using DENSE_RANK and had three people tie for " +
+ "second place, you would say that all three were in second place and " +
+ "that the next person came in third."
+ ),
+ supportsWindow = false,
+ pivotResult = true,
+ rankingFunction = true,
+ impliesOrder = true
)
-public class GenericUDAFDenseRank extends GenericUDAFRank
-{
- static final Log LOG = LogFactory.getLog(GenericUDAFDenseRank.class.getName());
-
- @Override
- protected GenericUDAFAbstractRankEvaluator createEvaluator()
- {
- return new GenericUDAFDenseRankEvaluator();
- }
-
- public static class GenericUDAFDenseRankEvaluator extends GenericUDAFRankEvaluator
- {
- /*
- * Called when the value in the partition has changed. Update the currentRank
- */
- @Override
- protected void nextRank(RankBuffer rb)
- {
- rb.currentRank++;
- }
- }
+public class GenericUDAFDenseRank extends GenericUDAFRank {
+
+ static final Log LOG = LogFactory.getLog(GenericUDAFDenseRank.class.getName());
+
+ @Override
+ protected GenericUDAFAbstractRankEvaluator createEvaluator() {
+ return new GenericUDAFDenseRankEvaluator();
+ }
+
+ public static class GenericUDAFDenseRankEvaluator extends GenericUDAFRankEvaluator {
+
+ /*
+ * Called when the value in the partition has changed. Update the currentRank
+ */
+ @Override
+ protected void nextRank(RankBuffer rb) {
+ rb.currentRank++;
+ }
+ }
}
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFFirstValue.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFFirstValue.java?rev=1622060&r1=1622059&r2=1622060&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFFirstValue.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFFirstValue.java Tue Sep 2 16:30:33 2014
@@ -41,147 +41,128 @@ import org.apache.hadoop.hive.serde2.obj
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
-@WindowFunctionDescription
-(
- description = @Description(
- name = "first_value",
- value = "_FUNC_(x)"
- ),
- supportsWindow = true,
- pivotResult = false,
- impliesOrder = true
+@WindowFunctionDescription(
+ description = @Description(
+ name = "first_value",
+ value = "_FUNC_(x)"
+ ),
+ supportsWindow = true,
+ pivotResult = false,
+ impliesOrder = true
)
-public class GenericUDAFFirstValue extends AbstractGenericUDAFResolver
-{
- static final Log LOG = LogFactory.getLog(GenericUDAFFirstValue.class.getName());
-
- @Override
- public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException
- {
- if (parameters.length > 2)
- {
- throw new UDFArgumentTypeException(2, "At most 2 arguments expected");
- }
- if ( parameters.length > 1 && !parameters[1].equals(TypeInfoFactory.booleanTypeInfo) )
- {
- throw new UDFArgumentTypeException(1, "second argument must be a boolean expression");
- }
- return createEvaluator();
- }
-
- protected GenericUDAFFirstValueEvaluator createEvaluator()
- {
- return new GenericUDAFFirstValueEvaluator();
- }
-
- static class FirstValueBuffer implements AggregationBuffer
- {
- Object val;
- boolean valSet;
- boolean firstRow;
- boolean skipNulls;
-
- FirstValueBuffer()
- {
- init();
- }
-
- void init()
- {
- val = null;
- valSet = false;
- firstRow = true;
- skipNulls = false;
- }
-
- }
-
- public static class GenericUDAFFirstValueEvaluator extends GenericUDAFEvaluator
- {
- ObjectInspector inputOI;
- ObjectInspector outputOI;
-
- @Override
- public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException
- {
- super.init(m, parameters);
- if (m != Mode.COMPLETE)
- {
- throw new HiveException(
- "Only COMPLETE mode supported for Rank function");
- }
- inputOI = parameters[0];
- outputOI = ObjectInspectorUtils.getStandardObjectInspector(inputOI, ObjectInspectorCopyOption.WRITABLE);
- return outputOI;
- }
-
- @Override
- public AggregationBuffer getNewAggregationBuffer() throws HiveException
- {
- return new FirstValueBuffer();
- }
-
- @Override
- public void reset(AggregationBuffer agg) throws HiveException
- {
- ((FirstValueBuffer) agg).init();
- }
-
- @Override
- public void iterate(AggregationBuffer agg, Object[] parameters) throws HiveException
- {
- FirstValueBuffer fb = (FirstValueBuffer) agg;
-
- if (fb.firstRow )
- {
- fb.firstRow = false;
- if ( parameters.length == 2 )
- {
- fb.skipNulls = PrimitiveObjectInspectorUtils.getBoolean(
- parameters[1],
- PrimitiveObjectInspectorFactory.writableBooleanObjectInspector);
- }
- }
-
- if ( !fb.valSet )
- {
- fb.val = ObjectInspectorUtils.copyToStandardObject(parameters[0], inputOI, ObjectInspectorCopyOption.WRITABLE);
- if ( !fb.skipNulls || fb.val != null )
- {
- fb.valSet = true;
- }
- }
- }
-
- @Override
- public Object terminatePartial(AggregationBuffer agg) throws HiveException
- {
- throw new HiveException("terminatePartial not supported");
- }
-
- @Override
- public void merge(AggregationBuffer agg, Object partial) throws HiveException
- {
- throw new HiveException("merge not supported");
- }
-
- @Override
- public Object terminate(AggregationBuffer agg) throws HiveException
- {
- return ((FirstValueBuffer) agg).val;
- }
-
+public class GenericUDAFFirstValue extends AbstractGenericUDAFResolver {
+
+ static final Log LOG = LogFactory.getLog(GenericUDAFFirstValue.class.getName());
+
+ @Override
+ public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException {
+ if (parameters.length > 2) {
+ throw new UDFArgumentTypeException(2, "At most 2 arguments expected");
+ }
+ if (parameters.length > 1 && !parameters[1].equals(TypeInfoFactory.booleanTypeInfo)) {
+ throw new UDFArgumentTypeException(1, "second argument must be a boolean expression");
+ }
+ return createEvaluator();
+ }
+
+ protected GenericUDAFFirstValueEvaluator createEvaluator() {
+ return new GenericUDAFFirstValueEvaluator();
+ }
+
+ static class FirstValueBuffer implements AggregationBuffer {
+
+ Object val;
+ boolean valSet;
+ boolean firstRow;
+ boolean skipNulls;
+
+ FirstValueBuffer() {
+ init();
+ }
+
+ void init() {
+ val = null;
+ valSet = false;
+ firstRow = true;
+ skipNulls = false;
+ }
+
+ }
+
+ public static class GenericUDAFFirstValueEvaluator extends GenericUDAFEvaluator {
+
+ ObjectInspector inputOI;
+ ObjectInspector outputOI;
+
+ @Override
+ public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException {
+ super.init(m, parameters);
+ if (m != Mode.COMPLETE) {
+ throw new HiveException("Only COMPLETE mode supported for Rank function");
+ }
+ inputOI = parameters[0];
+ outputOI = ObjectInspectorUtils.getStandardObjectInspector(inputOI,
+ ObjectInspectorCopyOption.WRITABLE);
+ return outputOI;
+ }
+
+ @Override
+ public AggregationBuffer getNewAggregationBuffer() throws HiveException {
+ return new FirstValueBuffer();
+ }
+
+ @Override
+ public void reset(AggregationBuffer agg) throws HiveException {
+ ((FirstValueBuffer) agg).init();
+ }
+
+ @Override
+ public void iterate(AggregationBuffer agg, Object[] parameters) throws HiveException {
+ FirstValueBuffer fb = (FirstValueBuffer) agg;
+
+ if (fb.firstRow) {
+ fb.firstRow = false;
+ if (parameters.length == 2) {
+ fb.skipNulls = PrimitiveObjectInspectorUtils.getBoolean(parameters[1],
+ PrimitiveObjectInspectorFactory.writableBooleanObjectInspector);
+ }
+ }
+
+ if (!fb.valSet) {
+ fb.val = ObjectInspectorUtils.copyToStandardObject(parameters[0], inputOI,
+ ObjectInspectorCopyOption.WRITABLE);
+ if (!fb.skipNulls || fb.val != null) {
+ fb.valSet = true;
+ }
+ }
+ }
+
+ @Override
+ public Object terminatePartial(AggregationBuffer agg) throws HiveException {
+ throw new HiveException("terminatePartial not supported");
+ }
+
+ @Override
+ public void merge(AggregationBuffer agg, Object partial) throws HiveException {
+ throw new HiveException("merge not supported");
+ }
+
+ @Override
+ public Object terminate(AggregationBuffer agg) throws HiveException {
+ return ((FirstValueBuffer) agg).val;
+ }
+
@Override
public GenericUDAFEvaluator getWindowingEvaluator(WindowFrameDef wFrmDef) {
BoundaryDef start = wFrmDef.getStart();
BoundaryDef end = wFrmDef.getEnd();
- return new FirstValStreamingFixedWindow(this, start.getAmt(),
- end.getAmt());
+ return new FirstValStreamingFixedWindow(this, start.getAmt(), end.getAmt());
}
- }
-
+ }
+
static class ValIndexPair {
+
Object val;
int idx;
@@ -191,16 +172,15 @@ public class GenericUDAFFirstValue exten
}
}
- static class FirstValStreamingFixedWindow extends
- GenericUDAFStreamingEvaluator<Object> {
+ static class FirstValStreamingFixedWindow extends GenericUDAFStreamingEvaluator<Object> {
class State extends GenericUDAFStreamingEvaluator<Object>.StreamingState {
+
private final Deque<ValIndexPair> valueChain;
public State(int numPreceding, int numFollowing, AggregationBuffer buf) {
super(numPreceding, numFollowing, buf);
- valueChain = new ArrayDeque<ValIndexPair>(numPreceding + numFollowing
- + 1);
+ valueChain = new ArrayDeque<ValIndexPair>(numPreceding + numFollowing + 1);
}
@Override
@@ -222,8 +202,8 @@ public class GenericUDAFFirstValue exten
*/
int wdwSz = numPreceding + numFollowing + 1;
- return underlying + (underlying * wdwSz) + (underlying * wdwSz)
- + (3 * JavaDataModel.PRIMITIVES1);
+ return underlying + (underlying * wdwSz) + (underlying * wdwSz) + (3
+ * JavaDataModel.PRIMITIVES1);
}
protected void reset() {
@@ -232,8 +212,8 @@ public class GenericUDAFFirstValue exten
}
}
- public FirstValStreamingFixedWindow(GenericUDAFEvaluator wrappedEval,
- int numPreceding, int numFollowing) {
+ public FirstValStreamingFixedWindow(GenericUDAFEvaluator wrappedEval, int numPreceding,
+ int numFollowing) {
super(wrappedEval, numPreceding, numFollowing);
}
@@ -253,8 +233,7 @@ public class GenericUDAFFirstValue exten
}
@Override
- public void iterate(AggregationBuffer agg, Object[] parameters)
- throws HiveException {
+ public void iterate(AggregationBuffer agg, Object[] parameters) throws HiveException {
State s = (State) agg;
FirstValueBuffer fb = (FirstValueBuffer) s.wrappedBuf;
@@ -266,15 +245,14 @@ public class GenericUDAFFirstValue exten
wrappedEval.iterate(fb, parameters);
}
- Object o = ObjectInspectorUtils.copyToStandardObject(parameters[0],
- inputOI(), ObjectInspectorCopyOption.WRITABLE);
+ Object o = ObjectInspectorUtils.copyToStandardObject(parameters[0], inputOI(),
+ ObjectInspectorCopyOption.WRITABLE);
/*
* add row to chain. except in case of UNB preceding: - only 1 firstVal
* needs to be tracked.
*/
- if (s.numPreceding != BoundarySpec.UNBOUNDED_AMOUNT
- || s.valueChain.isEmpty()) {
+ if (s.numPreceding != BoundarySpec.UNBOUNDED_AMOUNT || s.valueChain.isEmpty()) {
/*
* add value to chain if it is not null or if skipNulls is false.
*/
@@ -309,8 +287,7 @@ public class GenericUDAFFirstValue exten
public Object terminate(AggregationBuffer agg) throws HiveException {
State s = (State) agg;
FirstValueBuffer fb = (FirstValueBuffer) s.wrappedBuf;
- ValIndexPair r = fb.skipNulls && s.valueChain.size() == 0 ? null
- : s.valueChain.getFirst();
+ ValIndexPair r = fb.skipNulls && s.valueChain.size() == 0 ? null : s.valueChain.getFirst();
for (int i = 0; i < s.numFollowing; i++) {
s.results.add(r == null ? null : r.val);