You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by gu...@apache.org on 2014/09/04 04:49:50 UTC
svn commit: r1622396 [6/8] - in /hive/branches/cbo: ./
accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/
beeline/src/java/org/apache/hive/beeline/
beeline/src/test/org/apache/hive/beeline/ bin/ bin/ext/ checkstyle/
common/src/java/or...
Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFNTile.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFNTile.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFNTile.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFNTile.java Thu Sep 4 02:49:46 2014
@@ -38,144 +38,129 @@ import org.apache.hadoop.hive.serde2.typ
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.io.IntWritable;
-@WindowFunctionDescription
-(
- description = @Description(
- name = "rank",
- value = "_FUNC_(x) NTILE allows easy calculation of tertiles, quartiles, deciles and other " +
- "common summary statistics. This function divides an ordered partition into a specified " +
- "number of groups called buckets and assigns a bucket number to each row in the partition."
- ),
- supportsWindow = false,
- pivotResult = true
+@WindowFunctionDescription(
+ description = @Description(
+ name = "rank",
+ value = "_FUNC_(x) NTILE allows easy calculation of tertiles, quartiles, deciles and other "
+ +"common summary statistics. This function divides an ordered partition into a "
+ + "specified number of groups called buckets and assigns a bucket number to each row "
+ + "in the partition."
+ ),
+ supportsWindow = false,
+ pivotResult = true
)
-public class GenericUDAFNTile extends AbstractGenericUDAFResolver
-{
- static final Log LOG = LogFactory.getLog(GenericUDAFNTile.class.getName());
-
- @Override
- public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException
- {
- if (parameters.length != 1)
- {
- throw new UDFArgumentTypeException(parameters.length - 1, "Exactly one argument is expected.");
- }
- ObjectInspector oi = TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(parameters[0]);
-
- boolean c = ObjectInspectorUtils.compareTypes(oi, PrimitiveObjectInspectorFactory.writableIntObjectInspector);
- if (!c)
- {
- throw new UDFArgumentTypeException(0, "Number of tiles must be an int expression");
- }
-
- return new GenericUDAFNTileEvaluator();
- }
-
- static class NTileBuffer implements AggregationBuffer
- {
- Integer numBuckets;
- int numRows;
-
- void init()
- {
- numBuckets = null;
- numRows = 0;
- }
-
- NTileBuffer()
- {
- init();
- }
- }
-
- public static class GenericUDAFNTileEvaluator extends GenericUDAFEvaluator
- {
- private transient PrimitiveObjectInspector inputOI;
-
- @Override
- public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException
- {
- assert (parameters.length == 1);
- super.init(m, parameters);
- if (m != Mode.COMPLETE)
- {
- throw new HiveException(
- "Only COMPLETE mode supported for NTile function");
- }
- inputOI = (PrimitiveObjectInspector) parameters[0];
- return ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableIntObjectInspector);
- }
-
- @Override
- public AggregationBuffer getNewAggregationBuffer() throws HiveException
- {
- return new NTileBuffer();
- }
-
- @Override
- public void reset(AggregationBuffer agg) throws HiveException
- {
- ((NTileBuffer) agg).init();
- }
-
- @Override
- public void iterate(AggregationBuffer agg, Object[] parameters) throws HiveException
- {
- NTileBuffer rb = (NTileBuffer) agg;
- if ( rb.numBuckets == null)
- {
- rb.numBuckets = PrimitiveObjectInspectorUtils.getInt(parameters[0], inputOI);
- }
- rb.numRows++;
- }
-
- @Override
- public Object terminatePartial(AggregationBuffer agg) throws HiveException
- {
- throw new HiveException("terminatePartial not supported");
- }
-
- @Override
- public void merge(AggregationBuffer agg, Object partial) throws HiveException
- {
- throw new HiveException("merge not supported");
- }
-
- @Override
- public Object terminate(AggregationBuffer agg) throws HiveException
- {
- NTileBuffer rb = (NTileBuffer) agg;
- ArrayList<IntWritable> res = new ArrayList<IntWritable>(rb.numRows);
-
- /*
- * if there is a remainder from numRows/numBuckets; then distribute increase the size of the first 'rem' buckets by 1.
- */
-
- int bucketsz = rb.numRows / rb.numBuckets;
- int rem = rb.numRows % rb.numBuckets;
- int start = 0;
- int bucket = 1;
- while ( start < rb.numRows)
- {
- int end = start + bucketsz;
- if (rem > 0)
- {
- end++; rem--;
- }
- end = Math.min(rb.numRows, end);
- for(int i = start; i < end; i++)
- {
- res.add(new IntWritable(bucket));
- }
- start = end;
- bucket++;
- }
+public class GenericUDAFNTile extends AbstractGenericUDAFResolver {
- return res;
- }
+ static final Log LOG = LogFactory.getLog(GenericUDAFNTile.class.getName());
- }
+ @Override
+ public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException {
+ if (parameters.length != 1) {
+ throw new UDFArgumentTypeException(parameters.length - 1,
+ "Exactly one argument is expected.");
+ }
+ ObjectInspector oi = TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(parameters[0]);
+
+ boolean c = ObjectInspectorUtils.compareTypes(oi,
+ PrimitiveObjectInspectorFactory.writableIntObjectInspector);
+ if (!c) {
+ throw new UDFArgumentTypeException(0, "Number of tiles must be an int expression");
+ }
+
+ return new GenericUDAFNTileEvaluator();
+ }
+
+ static class NTileBuffer implements AggregationBuffer {
+
+ Integer numBuckets;
+ int numRows;
+
+ void init() {
+ numBuckets = null;
+ numRows = 0;
+ }
+
+ NTileBuffer() {
+ init();
+ }
+ }
+
+ public static class GenericUDAFNTileEvaluator extends GenericUDAFEvaluator {
+
+ private transient PrimitiveObjectInspector inputOI;
+
+ @Override
+ public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException {
+ assert (parameters.length == 1);
+ super.init(m, parameters);
+ if (m != Mode.COMPLETE) {
+ throw new HiveException("Only COMPLETE mode supported for NTile function");
+ }
+ inputOI = (PrimitiveObjectInspector) parameters[0];
+ return ObjectInspectorFactory.getStandardListObjectInspector(
+ PrimitiveObjectInspectorFactory.writableIntObjectInspector);
+ }
+
+ @Override
+ public AggregationBuffer getNewAggregationBuffer() throws HiveException {
+ return new NTileBuffer();
+ }
+
+ @Override
+ public void reset(AggregationBuffer agg) throws HiveException {
+ ((NTileBuffer) agg).init();
+ }
+
+ @Override
+ public void iterate(AggregationBuffer agg, Object[] parameters) throws HiveException {
+ NTileBuffer rb = (NTileBuffer) agg;
+ if (rb.numBuckets == null) {
+ rb.numBuckets = PrimitiveObjectInspectorUtils.getInt(parameters[0], inputOI);
+ }
+ rb.numRows++;
+ }
+
+ @Override
+ public Object terminatePartial(AggregationBuffer agg) throws HiveException {
+ throw new HiveException("terminatePartial not supported");
+ }
+
+ @Override
+ public void merge(AggregationBuffer agg, Object partial) throws HiveException {
+ throw new HiveException("merge not supported");
+ }
+
+ @Override
+ public Object terminate(AggregationBuffer agg) throws HiveException {
+ NTileBuffer rb = (NTileBuffer) agg;
+ ArrayList<IntWritable> res = new ArrayList<IntWritable>(rb.numRows);
+
+ /*
+ * if there is a remainder from numRows/numBuckets; then distribute increase the size of the first 'rem' buckets by 1.
+ */
+
+ int bucketsz = rb.numRows / rb.numBuckets;
+ int rem = rb.numRows % rb.numBuckets;
+ int start = 0;
+ int bucket = 1;
+ while (start < rb.numRows) {
+ int end = start + bucketsz;
+ if (rem > 0) {
+ end++;
+ rem--;
+ }
+ end = Math.min(rb.numRows, end);
+ for (int i = start; i < end; i++) {
+ res.add(new IntWritable(bucket));
+ }
+ start = end;
+ bucket++;
+ }
+ return res;
+ }
+
+ }
}
Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentRank.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentRank.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentRank.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentRank.java Thu Sep 4 02:49:46 2014
@@ -31,56 +31,52 @@ import org.apache.hadoop.hive.serde2.obj
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.IntWritable;
-@WindowFunctionDescription
-(
- description = @Description(
- name = "percent_rank",
- value = "_FUNC_(x) PERCENT_RANK is similar to CUME_DIST, but it uses rank values rather " +
- "than row counts in its numerator. PERCENT_RANK of a row is calculated as: " +
- "(rank of row in its partition - 1) / (number of rows in the partition - 1)"
- ),
- supportsWindow = false,
- pivotResult = true,
- rankingFunction = true,
- impliesOrder = true
+@WindowFunctionDescription(
+ description = @Description(
+ name = "percent_rank",
+ value = "_FUNC_(x) PERCENT_RANK is similar to CUME_DIST, but it uses rank values rather " +
+ "than row counts in its numerator. PERCENT_RANK of a row is calculated as: " +
+ "(rank of row in its partition - 1) / (number of rows in the partition - 1)"
+ ),
+ supportsWindow = false,
+ pivotResult = true,
+ rankingFunction = true,
+ impliesOrder = true
)
-public class GenericUDAFPercentRank extends GenericUDAFRank
-{
- static final Log LOG = LogFactory.getLog(GenericUDAFPercentRank.class.getName());
-
- @Override
- protected GenericUDAFAbstractRankEvaluator createEvaluator()
- {
- return new GenericUDAFPercentRankEvaluator();
- }
-
- public static class GenericUDAFPercentRankEvaluator extends GenericUDAFAbstractRankEvaluator
- {
- @Override
- public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException
- {
- super.init(m, parameters);
- return ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
- }
-
- @Override
- public Object terminate(AggregationBuffer agg) throws HiveException
- {
- ArrayList<IntWritable> ranks = ((RankBuffer) agg).rowNums;
- double sz = ranks.size();
- if ( sz > 1 ) {
+public class GenericUDAFPercentRank extends GenericUDAFRank {
+
+ static final Log LOG = LogFactory.getLog(GenericUDAFPercentRank.class.getName());
+
+ @Override
+ protected GenericUDAFAbstractRankEvaluator createEvaluator() {
+ return new GenericUDAFPercentRankEvaluator();
+ }
+
+ public static class GenericUDAFPercentRankEvaluator extends GenericUDAFAbstractRankEvaluator {
+
+ @Override
+ public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException {
+ super.init(m, parameters);
+ return ObjectInspectorFactory.getStandardListObjectInspector(
+ PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+ }
+
+ @Override
+ public Object terminate(AggregationBuffer agg) throws HiveException {
+ ArrayList<IntWritable> ranks = ((RankBuffer) agg).rowNums;
+ double sz = ranks.size();
+ if (sz > 1) {
sz = sz - 1;
}
- ArrayList<DoubleWritable> pranks = new ArrayList<DoubleWritable>(ranks.size());
+ ArrayList<DoubleWritable> pranks = new ArrayList<DoubleWritable>(ranks.size());
+
+ for (IntWritable i : ranks) {
+ double pr = ((double) i.get() - 1) / sz;
+ pranks.add(new DoubleWritable(pr));
+ }
- for(IntWritable i : ranks)
- {
- double pr = ((double)i.get() - 1)/sz;
- pranks.add(new DoubleWritable(pr));
- }
-
- return pranks;
- }
- }
+ return pranks;
+ }
+ }
}
Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFRank.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFRank.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFRank.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFRank.java Thu Sep 4 02:49:46 2014
@@ -38,170 +38,150 @@ import org.apache.hadoop.hive.serde2.typ
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.io.IntWritable;
-@WindowFunctionDescription
-(
- description = @Description(
- name = "rank",
- value = "_FUNC_(x)"
- ),
- supportsWindow = false,
- pivotResult = true,
- rankingFunction = true,
- impliesOrder = true
-)
-public class GenericUDAFRank extends AbstractGenericUDAFResolver
-{
- static final Log LOG = LogFactory.getLog(GenericUDAFRank.class.getName());
-
- @Override
- public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException
- {
- if (parameters.length < 1)
- {
- throw new UDFArgumentTypeException(parameters.length - 1, "One or more arguments are expected.");
- }
- for(int i=0; i<parameters.length; i++)
- {
- ObjectInspector oi = TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(parameters[i]);
- if (!ObjectInspectorUtils.compareSupported(oi))
- {
- throw new UDFArgumentTypeException(i,
- "Cannot support comparison of map<> type or complex type containing map<>.");
- }
- }
- return createEvaluator();
- }
-
- protected GenericUDAFAbstractRankEvaluator createEvaluator()
- {
- return new GenericUDAFRankEvaluator();
- }
-
- static class RankBuffer implements AggregationBuffer
- {
- ArrayList<IntWritable> rowNums;
- int currentRowNum;
- Object[] currVal;
- int currentRank;
- int numParams;
- boolean supportsStreaming;
-
- RankBuffer(int numParams, boolean supportsStreaming)
- {
- this.numParams = numParams;
- this.supportsStreaming = supportsStreaming;
- init();
- }
-
- void init()
- {
- rowNums = new ArrayList<IntWritable>();
- currentRowNum = 0;
- currentRank = 0;
- currVal = new Object[numParams];
- if ( supportsStreaming ) {
- /* initialize rowNums to have 1 row */
- rowNums.add(null);
- }
- }
-
- void incrRowNum() { currentRowNum++; }
-
- void addRank()
- {
- if ( supportsStreaming ) {
- rowNums.set(0, new IntWritable(currentRank));
- } else {
- rowNums.add(new IntWritable(currentRank));
- }
- }
- }
-
- public static abstract class GenericUDAFAbstractRankEvaluator extends GenericUDAFEvaluator
- {
- ObjectInspector[] inputOI;
- ObjectInspector[] outputOI;
- boolean isStreamingMode = false;
-
- protected boolean isStreaming() {
- return isStreamingMode;
- }
-
- @Override
- public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException
- {
- super.init(m, parameters);
- if (m != Mode.COMPLETE)
- {
- throw new HiveException(
- "Only COMPLETE mode supported for Rank function");
- }
- inputOI = parameters;
- outputOI = new ObjectInspector[inputOI.length];
- for(int i=0; i < inputOI.length; i++)
- {
- outputOI[i] = ObjectInspectorUtils.getStandardObjectInspector(inputOI[i], ObjectInspectorCopyOption.JAVA);
- }
- return ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableIntObjectInspector);
- }
-
- @Override
- public AggregationBuffer getNewAggregationBuffer() throws HiveException
- {
- return new RankBuffer(inputOI.length, isStreamingMode);
- }
-
- @Override
- public void reset(AggregationBuffer agg) throws HiveException
- {
- ((RankBuffer) agg).init();
- }
-
- @Override
- public void iterate(AggregationBuffer agg, Object[] parameters) throws HiveException
- {
- RankBuffer rb = (RankBuffer) agg;
- int c = GenericUDAFRank.compare(rb.currVal, outputOI, parameters, inputOI);
- rb.incrRowNum();
- if ( rb.currentRowNum == 1 || c != 0 )
- {
- nextRank(rb);
- rb.currVal = GenericUDAFRank.copyToStandardObject(parameters, inputOI, ObjectInspectorCopyOption.JAVA);
- }
- rb.addRank();
- }
-
- /*
- * Called when the value in the partition has changed. Update the currentRank
- */
- protected void nextRank(RankBuffer rb)
- {
- rb.currentRank = rb.currentRowNum;
- }
-
- @Override
- public Object terminatePartial(AggregationBuffer agg) throws HiveException
- {
- throw new HiveException("terminatePartial not supported");
- }
-
- @Override
- public void merge(AggregationBuffer agg, Object partial) throws HiveException
- {
- throw new HiveException("merge not supported");
- }
-
- @Override
- public Object terminate(AggregationBuffer agg) throws HiveException
- {
- return ((RankBuffer) agg).rowNums;
- }
-
- }
-
- public static class GenericUDAFRankEvaluator extends
- GenericUDAFAbstractRankEvaluator implements
- ISupportStreamingModeForWindowing {
+@WindowFunctionDescription(
+ description = @Description(
+ name = "rank",
+ value = "_FUNC_(x)"),
+ supportsWindow = false,
+ pivotResult = true,
+ rankingFunction = true,
+ impliesOrder = true)
+public class GenericUDAFRank extends AbstractGenericUDAFResolver {
+
+ static final Log LOG = LogFactory.getLog(GenericUDAFRank.class.getName());
+
+ @Override
+ public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException {
+ if (parameters.length < 1) {
+ throw new UDFArgumentTypeException(parameters.length - 1,
+ "One or more arguments are expected.");
+ }
+ for (int i = 0; i < parameters.length; i++) {
+ ObjectInspector oi = TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(parameters[i]);
+ if (!ObjectInspectorUtils.compareSupported(oi)) {
+ throw new UDFArgumentTypeException(i,
+ "Cannot support comparison of map<> type or complex type containing map<>.");
+ }
+ }
+ return createEvaluator();
+ }
+
+ protected GenericUDAFAbstractRankEvaluator createEvaluator() {
+ return new GenericUDAFRankEvaluator();
+ }
+
+ static class RankBuffer implements AggregationBuffer {
+
+ ArrayList<IntWritable> rowNums;
+ int currentRowNum;
+ Object[] currVal;
+ int currentRank;
+ int numParams;
+ boolean supportsStreaming;
+
+ RankBuffer(int numParams, boolean supportsStreaming) {
+ this.numParams = numParams;
+ this.supportsStreaming = supportsStreaming;
+ init();
+ }
+
+ void init() {
+ rowNums = new ArrayList<IntWritable>();
+ currentRowNum = 0;
+ currentRank = 0;
+ currVal = new Object[numParams];
+ if (supportsStreaming) {
+ /* initialize rowNums to have 1 row */
+ rowNums.add(null);
+ }
+ }
+
+ void incrRowNum() { currentRowNum++; }
+
+ void addRank() {
+ if (supportsStreaming) {
+ rowNums.set(0, new IntWritable(currentRank));
+ } else {
+ rowNums.add(new IntWritable(currentRank));
+ }
+ }
+ }
+
+ public static abstract class GenericUDAFAbstractRankEvaluator extends GenericUDAFEvaluator {
+
+ ObjectInspector[] inputOI;
+ ObjectInspector[] outputOI;
+ boolean isStreamingMode = false;
+
+ protected boolean isStreaming() {
+ return isStreamingMode;
+ }
+
+ @Override
+ public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException {
+ super.init(m, parameters);
+ if (m != Mode.COMPLETE) {
+ throw new HiveException("Only COMPLETE mode supported for Rank function");
+ }
+ inputOI = parameters;
+ outputOI = new ObjectInspector[inputOI.length];
+ for (int i = 0; i < inputOI.length; i++) {
+ outputOI[i] = ObjectInspectorUtils.getStandardObjectInspector(inputOI[i],
+ ObjectInspectorCopyOption.JAVA);
+ }
+ return ObjectInspectorFactory.getStandardListObjectInspector(
+ PrimitiveObjectInspectorFactory.writableIntObjectInspector);
+ }
+
+ @Override
+ public AggregationBuffer getNewAggregationBuffer() throws HiveException {
+ return new RankBuffer(inputOI.length, isStreamingMode);
+ }
+
+ @Override
+ public void reset(AggregationBuffer agg) throws HiveException {
+ ((RankBuffer) agg).init();
+ }
+
+ @Override
+ public void iterate(AggregationBuffer agg, Object[] parameters) throws HiveException {
+ RankBuffer rb = (RankBuffer) agg;
+ int c = GenericUDAFRank.compare(rb.currVal, outputOI, parameters, inputOI);
+ rb.incrRowNum();
+ if (rb.currentRowNum == 1 || c != 0) {
+ nextRank(rb);
+ rb.currVal =
+ GenericUDAFRank.copyToStandardObject(parameters, inputOI, ObjectInspectorCopyOption.JAVA);
+ }
+ rb.addRank();
+ }
+
+ /*
+ * Called when the value in the partition has changed. Update the currentRank
+ */
+ protected void nextRank(RankBuffer rb) {
+ rb.currentRank = rb.currentRowNum;
+ }
+
+ @Override
+ public Object terminatePartial(AggregationBuffer agg) throws HiveException {
+ throw new HiveException("terminatePartial not supported");
+ }
+
+ @Override
+ public void merge(AggregationBuffer agg, Object partial) throws HiveException {
+ throw new HiveException("merge not supported");
+ }
+
+ @Override
+ public Object terminate(AggregationBuffer agg) throws HiveException {
+ return ((RankBuffer) agg).rowNums;
+ }
+
+ }
+
+ public static class GenericUDAFRankEvaluator extends GenericUDAFAbstractRankEvaluator
+ implements ISupportStreamingModeForWindowing {
@Override
public Object getNextResult(AggregationBuffer agg) throws HiveException {
@@ -215,18 +195,15 @@ public class GenericUDAFRank extends Abs
}
@Override
- public int getRowsRemainingAfterTerminate()
- throws HiveException {
+ public int getRowsRemainingAfterTerminate() throws HiveException {
return 0;
}
}
public static int compare(Object[] o1, ObjectInspector[] oi1, Object[] o2,
- ObjectInspector[] oi2)
- {
+ ObjectInspector[] oi2) {
int c = 0;
- for (int i = 0; i < oi1.length; i++)
- {
+ for (int i = 0; i < oi1.length; i++) {
c = ObjectInspectorUtils.compare(o1[i], oi1[i], o2[i], oi2[i]);
if (c != 0) {
return c;
@@ -235,15 +212,11 @@ public class GenericUDAFRank extends Abs
return c;
}
- public static Object[] copyToStandardObject(Object[] o,
- ObjectInspector[] oi,
- ObjectInspectorCopyOption objectInspectorOption)
- {
+ public static Object[] copyToStandardObject(Object[] o, ObjectInspector[] oi,
+ ObjectInspectorCopyOption objectInspectorOption) {
Object[] out = new Object[o.length];
- for (int i = 0; i < oi.length; i++)
- {
- out[i] = ObjectInspectorUtils.copyToStandardObject(o[i], oi[i],
- objectInspectorOption);
+ for (int i = 0; i < oi.length; i++) {
+ out[i] = ObjectInspectorUtils.copyToStandardObject(o[i], oi[i], objectInspectorOption);
}
return out;
}
Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFRowNumber.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFRowNumber.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFRowNumber.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFRowNumber.java Thu Sep 4 02:49:46 2014
@@ -34,110 +34,89 @@ import org.apache.hadoop.hive.serde2.obj
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.io.IntWritable;
-@WindowFunctionDescription
-(
- description = @Description(
- name = "row_number",
- value = "_FUNC_() - The ROW_NUMBER function assigns a unique number (sequentially, starting from 1, as defined by ORDER BY) to each row within the partition."
- ),
- supportsWindow = false,
- pivotResult = true
+@WindowFunctionDescription(
+ description = @Description(
+ name = "row_number",
+ value = "_FUNC_() - The ROW_NUMBER function assigns a unique number (sequentially, starting "
+ + "from 1, as defined by ORDER BY) to each row within the partition."
+ ),
+ supportsWindow = false,
+ pivotResult = true
)
-public class GenericUDAFRowNumber extends AbstractGenericUDAFResolver
-{
- static final Log LOG = LogFactory.getLog(GenericUDAFRowNumber.class.getName());
-
- @Override
- public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters)
- throws SemanticException
- {
- if (parameters.length != 0)
- {
- throw new UDFArgumentTypeException(parameters.length - 1,
- "No argument is expected.");
- }
- return new GenericUDAFRowNumberEvaluator();
- }
-
- static class RowNumberBuffer implements AggregationBuffer
- {
- ArrayList<IntWritable> rowNums;
- int nextRow;
-
- void init()
- {
- rowNums = new ArrayList<IntWritable>();
- }
-
- RowNumberBuffer()
- {
- init();
- nextRow = 1;
- }
-
- void incr()
- {
- rowNums.add(new IntWritable(nextRow++));
- }
- }
-
- public static class GenericUDAFRowNumberEvaluator extends
- GenericUDAFEvaluator
- {
-
- @Override
- public ObjectInspector init(Mode m, ObjectInspector[] parameters)
- throws HiveException
- {
- super.init(m, parameters);
- if (m != Mode.COMPLETE)
- {
- throw new HiveException("Only COMPLETE mode supported for row_number function");
- }
-
- return ObjectInspectorFactory.getStandardListObjectInspector(
- PrimitiveObjectInspectorFactory.writableIntObjectInspector);
- }
-
- @Override
- public AggregationBuffer getNewAggregationBuffer() throws HiveException
- {
- return new RowNumberBuffer();
- }
-
- @Override
- public void reset(AggregationBuffer agg) throws HiveException
- {
- ((RowNumberBuffer) agg).init();
- }
-
- @Override
- public void iterate(AggregationBuffer agg, Object[] parameters)
- throws HiveException
- {
- ((RowNumberBuffer) agg).incr();
- }
-
- @Override
- public Object terminatePartial(AggregationBuffer agg)
- throws HiveException
- {
- throw new HiveException("terminatePartial not supported");
- }
-
- @Override
- public void merge(AggregationBuffer agg, Object partial)
- throws HiveException
- {
- throw new HiveException("merge not supported");
- }
-
- @Override
- public Object terminate(AggregationBuffer agg) throws HiveException
- {
- return ((RowNumberBuffer) agg).rowNums;
- }
+public class GenericUDAFRowNumber extends AbstractGenericUDAFResolver {
- }
+ static final Log LOG = LogFactory.getLog(GenericUDAFRowNumber.class.getName());
+
+ @Override
+ public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException {
+ if (parameters.length != 0) {
+ throw new UDFArgumentTypeException(parameters.length - 1, "No argument is expected.");
+ }
+ return new GenericUDAFRowNumberEvaluator();
+ }
+
+ static class RowNumberBuffer implements AggregationBuffer {
+
+ ArrayList<IntWritable> rowNums;
+ int nextRow;
+
+ void init() {
+ rowNums = new ArrayList<IntWritable>();
+ }
+
+ RowNumberBuffer() {
+ init();
+ nextRow = 1;
+ }
+
+ void incr() {
+ rowNums.add(new IntWritable(nextRow++));
+ }
+ }
+
+ public static class GenericUDAFRowNumberEvaluator extends GenericUDAFEvaluator {
+
+ @Override
+ public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException {
+ super.init(m, parameters);
+ if (m != Mode.COMPLETE) {
+ throw new HiveException("Only COMPLETE mode supported for row_number function");
+ }
+
+ return ObjectInspectorFactory.getStandardListObjectInspector(
+ PrimitiveObjectInspectorFactory.writableIntObjectInspector);
+ }
+
+ @Override
+ public AggregationBuffer getNewAggregationBuffer() throws HiveException {
+ return new RowNumberBuffer();
+ }
+
+ @Override
+ public void reset(AggregationBuffer agg) throws HiveException {
+ ((RowNumberBuffer) agg).init();
+ }
+
+ @Override
+ public void iterate(AggregationBuffer agg, Object[] parameters) throws HiveException {
+ ((RowNumberBuffer) agg).incr();
+ }
+
+ @Override
+ public Object terminatePartial(AggregationBuffer agg) throws HiveException {
+ throw new HiveException("terminatePartial not supported");
+ }
+
+ @Override
+ public void merge(AggregationBuffer agg, Object partial) throws HiveException {
+ throw new HiveException("merge not supported");
+ }
+
+ @Override
+ public Object terminate(AggregationBuffer agg) throws HiveException {
+ return ((RowNumberBuffer) agg).rowNums;
+ }
+
+ }
}
Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBasePad.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBasePad.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBasePad.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBasePad.java Thu Sep 4 02:49:46 2014
@@ -46,7 +46,7 @@ public abstract class GenericUDFBasePad
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length != 3) {
throw new UDFArgumentException(udfName + " requires three arguments. Found :"
- + arguments.length);
+ + arguments.length);
}
converter1 = checkTextArguments(arguments, 0);
converter2 = checkIntArguments(arguments, 1);
Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseTrim.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseTrim.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseTrim.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseTrim.java Thu Sep 4 02:49:46 2014
@@ -40,14 +40,14 @@ public abstract class GenericUDFBaseTrim
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length != 1) {
throw new UDFArgumentException(udfName + " requires one value argument. Found :"
- + arguments.length);
+ + arguments.length);
}
PrimitiveObjectInspector argumentOI;
if(arguments[0] instanceof PrimitiveObjectInspector) {
argumentOI = (PrimitiveObjectInspector) arguments[0];
} else {
throw new UDFArgumentException(udfName + " takes only primitive types. found "
- + arguments[0].getTypeName());
+ + arguments[0].getTypeName());
}
switch (argumentOI.getPrimitiveCategory()) {
case STRING:
@@ -56,7 +56,7 @@ public abstract class GenericUDFBaseTrim
break;
default:
throw new UDFArgumentException(udfName + " takes only STRING/CHAR/VARCHAR types. Found "
- + argumentOI.getPrimitiveCategory());
+ + argumentOI.getPrimitiveCategory());
}
converter = new TextConverter(argumentOI);
return PrimitiveObjectInspectorFactory.writableStringObjectInspector;
Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBetween.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBetween.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBetween.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBetween.java Thu Sep 4 02:49:46 2014
@@ -51,7 +51,7 @@ public class GenericUDFBetween extends G
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
boolean invert = (Boolean) ((PrimitiveObjectInspector) argumentOIs[0])
- .getPrimitiveJavaObject(arguments[0].get());
+ .getPrimitiveJavaObject(arguments[0].get());
BooleanWritable left = ((BooleanWritable)egt.evaluate(new DeferredObject[] {arguments[1], arguments[2]}));
if (left == null) {
Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateDiff.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateDiff.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateDiff.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateDiff.java Thu Sep 4 02:49:46 2014
@@ -68,7 +68,7 @@ public class GenericUDFDateDiff extends
private transient PrimitiveCategory inputType1;
private transient PrimitiveCategory inputType2;
private IntWritable result = new IntWritable();
-
+
public GenericUDFDateDiff() {
formatter.setTimeZone(TimeZone.getTimeZone("UTC"));
}
@@ -89,8 +89,8 @@ public class GenericUDFDateDiff extends
@Override
public IntWritable evaluate(DeferredObject[] arguments) throws HiveException {
- output = evaluate(convertToDate(inputType1, inputConverter1, arguments[0]),
- convertToDate(inputType2, inputConverter2, arguments[1]));
+ output = evaluate(convertToDate(inputType1, inputConverter1, arguments[0]),
+ convertToDate(inputType2, inputConverter2, arguments[1]));
return output;
}
Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDecode.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDecode.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDecode.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDecode.java Thu Sep 4 02:49:46 2014
@@ -44,8 +44,8 @@ import org.apache.hadoop.io.Text;
@Description(name = "decode",
value = "_FUNC_(bin, str) - Decode the first argument using the second argument character set",
extended = "Possible options for the character set are 'US_ASCII', 'ISO-8859-1',\n" +
- "'UTF-8', 'UTF-16BE', 'UTF-16LE', and 'UTF-16'. If either argument\n" +
- "is null, the result will also be null")
+ "'UTF-8', 'UTF-16BE', 'UTF-16LE', and 'UTF-16'. If either argument\n" +
+ "is null, the result will also be null")
public class GenericUDFDecode extends GenericUDF {
private transient CharsetDecoder decoder = null;
private transient BinaryObjectInspector bytesOI = null;
Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLpad.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLpad.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLpad.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLpad.java Thu Sep 4 02:49:46 2014
@@ -46,7 +46,7 @@ public class GenericUDFLpad extends Gene
// Copy the padding
for (int i = 0; i < pos; i += pad.getLength()) {
for (int j = 0; j < pad.getLength() && j < pos - i; j++) {
- data[i + j] = padTxt[j];
+ data[i + j] = padTxt[j];
}
}
Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFRpad.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFRpad.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFRpad.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFRpad.java Thu Sep 4 02:49:46 2014
@@ -48,7 +48,7 @@ public class GenericUDFRpad extends Gene
// Copy the padding
while (pos < len) {
for (int i = 0; i < pad.getLength() && i < len - pos; i++) {
- data[pos + i] = padTxt[i];
+ data[pos + i] = padTxt[i];
}
pos += pad.getLength();
}
Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToBinary.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToBinary.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToBinary.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToBinary.java Thu Sep 4 02:49:46 2014
@@ -59,7 +59,7 @@ public class GenericUDFToBinary extends
|| (argumentOI instanceof StringObjectInspector)
|| (argumentOI instanceof VoidObjectInspector))){
throw new UDFArgumentException("Only string, char, varchar or binary data can be cast into binary " +
- "data types.");
+ "data types.");
}
baConverter = new BinaryConverter(argumentOI,
PrimitiveObjectInspectorFactory.writableBinaryObjectInspector);
Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFJSONTuple.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFJSONTuple.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFJSONTuple.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFJSONTuple.java Thu Sep 4 02:49:46 2014
@@ -47,7 +47,7 @@ import org.codehaus.jackson.type.JavaTyp
*/
@Description(name = "json_tuple",
value = "_FUNC_(jsonStr, p1, p2, ..., pn) - like get_json_object, but it takes multiple names and return a tuple. " +
- "All the input parameters and output column types are string.")
+ "All the input parameters and output column types are string.")
public class GenericUDTFJSONTuple extends GenericUDTF {
@@ -106,7 +106,7 @@ public class GenericUDTFJSONTuple extend
if (numCols < 1) {
throw new UDFArgumentException("json_tuple() takes at least two arguments: " +
- "the json string and a path expression");
+ "the json string and a path expression");
}
for (int i = 0; i < args.length; ++i) {
Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFParseUrlTuple.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFParseUrlTuple.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFParseUrlTuple.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFParseUrlTuple.java Thu Sep 4 02:49:46 2014
@@ -86,7 +86,7 @@ public class GenericUDTFParseUrlTuple ex
if (numCols < 1) {
throw new UDFArgumentException("parse_url_tuple() takes at least two arguments: " +
- "the url string and a part name");
+ "the url string and a part name");
}
for (int i = 0; i < args.length; ++i) {
Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/MatchPath.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/MatchPath.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/MatchPath.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/MatchPath.java Thu Sep 4 02:49:46 2014
@@ -225,7 +225,7 @@ public class MatchPath extends TableFunc
if ( symbolArgsSz % 2 != 0)
{
throwErrorWithSignature("Symbol Name, Expression need to be specified in pairs: " +
- "there are odd number of symbol args");
+ "there are odd number of symbol args");
}
evaluator.symInfo = new SymbolsInfo(symbolArgsSz/2);
@@ -253,7 +253,7 @@ public class MatchPath extends TableFunc
PrimitiveObjectInspector.PrimitiveCategory.BOOLEAN )
{
throwErrorWithSignature(String.format("Currently a Symbol Expression(%s) " +
- "must be a boolean expression", symolExprArg.getExpressionTreeString()));
+ "must be a boolean expression", symolExprArg.getExpressionTreeString()));
}
evaluator.symInfo.add(symbolName, symolExprArg);
}
Modified: hive/branches/cbo/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java (original)
+++ hive/branches/cbo/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java Thu Sep 4 02:49:46 2014
@@ -42,6 +42,7 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.io.orc.OrcFile.Version;
import org.apache.hadoop.hive.ql.io.sarg.SearchArgument;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
@@ -1684,7 +1685,7 @@ public class TestOrcFile {
}
@Test
- public void testMemoryManagement() throws Exception {
+ public void testMemoryManagementV11() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector = ObjectInspectorFactory.getReflectionObjectInspector
@@ -1699,7 +1700,8 @@ public class TestOrcFile {
.stripeSize(50000)
.bufferSize(100)
.rowIndexStride(0)
- .memory(memory));
+ .memory(memory)
+ .version(Version.V_0_11));
assertEquals(testFilePath, memory.path);
for(int i=0; i < 2500; ++i) {
writer.addRow(new InnerStruct(i*300, Integer.toHexString(10*i)));
@@ -1719,6 +1721,45 @@ public class TestOrcFile {
}
@Test
+ public void testMemoryManagementV12() throws Exception {
+ ObjectInspector inspector;
+ synchronized (TestOrcFile.class) {
+ inspector = ObjectInspectorFactory.getReflectionObjectInspector
+ (InnerStruct.class,
+ ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
+ }
+ MyMemoryManager memory = new MyMemoryManager(conf, 10000, 0.1);
+ Writer writer = OrcFile.createWriter(testFilePath,
+ OrcFile.writerOptions(conf)
+ .inspector(inspector)
+ .compress(CompressionKind.NONE)
+ .stripeSize(50000)
+ .bufferSize(100)
+ .rowIndexStride(0)
+ .memory(memory)
+ .version(Version.V_0_12));
+ assertEquals(testFilePath, memory.path);
+ for(int i=0; i < 2500; ++i) {
+ writer.addRow(new InnerStruct(i*300, Integer.toHexString(10*i)));
+ }
+ writer.close();
+ assertEquals(null, memory.path);
+ Reader reader = OrcFile.createReader(testFilePath,
+ OrcFile.readerOptions(conf).filesystem(fs));
+ int i = 0;
+ for(StripeInformation stripe: reader.getStripes()) {
+ i += 1;
+ assertTrue("stripe " + i + " is too long at " + stripe.getDataLength(),
+ stripe.getDataLength() < 5000);
+ }
+ // with HIVE-7832, the dictionaries will be disabled after writing the first
+ // stripe as there are too many distinct values. Hence only 3 stripes as
+ // compared to 25 stripes in version 0.11 (above test case)
+ assertEquals(3, i);
+ assertEquals(2500, reader.getNumberOfRows());
+ }
+
+ @Test
public void testPredicatePushdown() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
Modified: hive/branches/cbo/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestInitiator.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestInitiator.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestInitiator.java (original)
+++ hive/branches/cbo/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestInitiator.java Thu Sep 4 02:49:46 2014
@@ -30,6 +30,7 @@ import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import java.util.concurrent.TimeUnit;
/**
* Tests for the compactor Initiator thread.
@@ -89,7 +90,7 @@ public class TestInitiator extends Compa
txnHandler.findNextToCompact("nosuchhost-193892");
HiveConf conf = new HiveConf();
- HiveConf.setLongVar(conf, HiveConf.ConfVars.HIVE_COMPACTOR_WORKER_TIMEOUT, 1L);
+ conf.setTimeVar(HiveConf.ConfVars.HIVE_COMPACTOR_WORKER_TIMEOUT, 1L, TimeUnit.MILLISECONDS);
startInitiator(conf);
Modified: hive/branches/cbo/ql/src/test/queries/clientnegative/authorization_public_create.q
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/test/queries/clientnegative/authorization_public_create.q?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/test/queries/clientnegative/authorization_public_create.q (original)
+++ hive/branches/cbo/ql/src/test/queries/clientnegative/authorization_public_create.q Thu Sep 4 02:49:46 2014
@@ -1 +1 @@
-create role PUBLIC;
+create role public;
Modified: hive/branches/cbo/ql/src/test/queries/clientnegative/authorization_public_drop.q
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/test/queries/clientnegative/authorization_public_drop.q?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/test/queries/clientnegative/authorization_public_drop.q (original)
+++ hive/branches/cbo/ql/src/test/queries/clientnegative/authorization_public_drop.q Thu Sep 4 02:49:46 2014
@@ -1 +1 @@
-drop role PUBLIC;
+drop role public;
Modified: hive/branches/cbo/ql/src/test/queries/clientnegative/authorize_grant_public.q
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/test/queries/clientnegative/authorize_grant_public.q?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/test/queries/clientnegative/authorize_grant_public.q (original)
+++ hive/branches/cbo/ql/src/test/queries/clientnegative/authorize_grant_public.q Thu Sep 4 02:49:46 2014
@@ -1 +1 @@
-grant role PUBLIC to user hive_test_user;
+grant role public to user hive_test_user;
Modified: hive/branches/cbo/ql/src/test/queries/clientnegative/authorize_revoke_public.q
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/test/queries/clientnegative/authorize_revoke_public.q?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/test/queries/clientnegative/authorize_revoke_public.q (original)
+++ hive/branches/cbo/ql/src/test/queries/clientnegative/authorize_revoke_public.q Thu Sep 4 02:49:46 2014
@@ -1 +1 @@
-revoke role PUBLIC from user hive_test_user;
+revoke role public from user hive_test_user;
Modified: hive/branches/cbo/ql/src/test/queries/clientpositive/authorization_1.q
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/test/queries/clientpositive/authorization_1.q?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/test/queries/clientpositive/authorization_1.q (original)
+++ hive/branches/cbo/ql/src/test/queries/clientpositive/authorization_1.q Thu Sep 4 02:49:46 2014
@@ -57,33 +57,33 @@ show grant group hive_test_group1 on tab
show grant group hive_test_group1 on table src_autho_test(key);
--role
-create role src_role;
-grant role src_role to user hive_test_user;
+create role sRc_roLE;
+grant role sRc_roLE to user hive_test_user;
show role grant user hive_test_user;
--column grant to role
-grant select(key) on table src_autho_test to role src_role;
+grant select(key) on table src_autho_test to role sRc_roLE;
-show grant role src_role on table src_autho_test;
-show grant role src_role on table src_autho_test(key);
+show grant role sRc_roLE on table src_autho_test;
+show grant role sRc_roLE on table src_autho_test(key);
select key from src_autho_test order by key limit 20;
-revoke select(key) on table src_autho_test from role src_role;
+revoke select(key) on table src_autho_test from role sRc_roLE;
--table grant to role
-grant select on table src_autho_test to role src_role;
+grant select on table src_autho_test to role sRc_roLE;
select key from src_autho_test order by key limit 20;
-show grant role src_role on table src_autho_test;
-show grant role src_role on table src_autho_test(key);
-revoke select on table src_autho_test from role src_role;
+show grant role sRc_roLE on table src_autho_test;
+show grant role sRc_roLE on table src_autho_test(key);
+revoke select on table src_autho_test from role sRc_roLE;
-- drop role
-drop role src_role;
+drop role sRc_roLE;
set hive.security.authorization.enabled=false;
drop table src_autho_test;
\ No newline at end of file
Modified: hive/branches/cbo/ql/src/test/queries/clientpositive/authorization_5.q
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/test/queries/clientpositive/authorization_5.q?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/test/queries/clientpositive/authorization_5.q (original)
+++ hive/branches/cbo/ql/src/test/queries/clientpositive/authorization_5.q Thu Sep 4 02:49:46 2014
@@ -8,13 +8,13 @@ GRANT select ON DATABASE test_db TO USER
SHOW GRANT USER hive_test_user ON DATABASE test_db;
-CREATE ROLE db_test_role;
-GRANT ROLE db_test_role TO USER hive_test_user;
+CREATE ROLE db_TEST_Role;
+GRANT ROLE db_TEST_Role TO USER hive_test_user;
SHOW ROLE GRANT USER hive_test_user;
-GRANT drop ON DATABASE test_db TO ROLE db_test_role;
-GRANT select ON DATABASE test_db TO ROLE db_test_role;
+GRANT drop ON DATABASE test_db TO ROLE db_TEST_Role;
+GRANT select ON DATABASE test_db TO ROLE db_TEST_Role;
-SHOW GRANT ROLE db_test_role ON DATABASE test_db;
+SHOW GRANT ROLE db_TEST_Role ON DATABASE test_db;
DROP DATABASE IF EXISTS test_db;
Modified: hive/branches/cbo/ql/src/test/queries/clientpositive/authorization_grant_public_role.q
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/test/queries/clientpositive/authorization_grant_public_role.q?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/test/queries/clientpositive/authorization_grant_public_role.q (original)
+++ hive/branches/cbo/ql/src/test/queries/clientpositive/authorization_grant_public_role.q Thu Sep 4 02:49:46 2014
@@ -9,10 +9,10 @@ CREATE TABLE t_gpr1(i int);
-- all privileges should have been set for user
-GRANT ALL ON t_gpr1 TO ROLE public;
+GRANT ALL ON t_gpr1 TO ROLE pubLic;
SHOW GRANT USER user1 ON TABLE t_gpr1;
-SHOW GRANT ROLE public ON TABLE t_gpr1;
+SHOW GRANT ROLE pubLic ON TABLE t_gpr1;
set user.name=user2;
SHOW CURRENT ROLES;
Modified: hive/branches/cbo/ql/src/test/queries/clientpositive/authorization_role_grant2.q
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/test/queries/clientpositive/authorization_role_grant2.q?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/test/queries/clientpositive/authorization_role_grant2.q (original)
+++ hive/branches/cbo/ql/src/test/queries/clientpositive/authorization_role_grant2.q Thu Sep 4 02:49:46 2014
@@ -31,7 +31,7 @@ set user.name=user2;
grant src_role_wadmin to role sRc_role2;
set user.name=hive_admin_user;
-set role ADMIN;
+set role ADMIn;
grant src_role2 to user user3;
set user.name=user3;
Modified: hive/branches/cbo/ql/src/test/queries/clientpositive/ql_rewrite_gbtoidx.q
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/test/queries/clientpositive/ql_rewrite_gbtoidx.q?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/test/queries/clientpositive/ql_rewrite_gbtoidx.q (original)
+++ hive/branches/cbo/ql/src/test/queries/clientpositive/ql_rewrite_gbtoidx.q Thu Sep 4 02:49:46 2014
@@ -1,4 +1,4 @@
-set hive.stats.dbclass=counter;
+set hive.stats.dbclass=fs;
set hive.stats.autogather=true;
DROP TABLE lineitem;
Modified: hive/branches/cbo/ql/src/test/results/clientnegative/authorization_public_create.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/test/results/clientnegative/authorization_public_create.q.out?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/test/results/clientnegative/authorization_public_create.q.out (original)
+++ hive/branches/cbo/ql/src/test/results/clientnegative/authorization_public_create.q.out Thu Sep 4 02:49:46 2014
@@ -1,3 +1,3 @@
-PREHOOK: query: create role PUBLIC
+PREHOOK: query: create role public
PREHOOK: type: CREATEROLE
FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. org.apache.hadoop.hive.ql.metadata.HiveException: MetaException(message:public role implictly exists. It can't be created.)
Modified: hive/branches/cbo/ql/src/test/results/clientnegative/authorization_public_drop.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/test/results/clientnegative/authorization_public_drop.q.out?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/test/results/clientnegative/authorization_public_drop.q.out (original)
+++ hive/branches/cbo/ql/src/test/results/clientnegative/authorization_public_drop.q.out Thu Sep 4 02:49:46 2014
@@ -1,3 +1,3 @@
-PREHOOK: query: drop role PUBLIC
+PREHOOK: query: drop role public
PREHOOK: type: DROPROLE
FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. org.apache.hadoop.hive.ql.metadata.HiveException: MetaException(message:public,admin roles can't be dropped.)
Modified: hive/branches/cbo/ql/src/test/results/clientnegative/authorize_grant_public.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/test/results/clientnegative/authorize_grant_public.q.out?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/test/results/clientnegative/authorize_grant_public.q.out (original)
+++ hive/branches/cbo/ql/src/test/results/clientnegative/authorize_grant_public.q.out Thu Sep 4 02:49:46 2014
@@ -1,3 +1,3 @@
-PREHOOK: query: grant role PUBLIC to user hive_test_user
+PREHOOK: query: grant role public to user hive_test_user
PREHOOK: type: GRANT_ROLE
FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. org.apache.hadoop.hive.ql.metadata.HiveException: MetaException(message:No user can be added to public. Since all users implictly belong to public role.)
Modified: hive/branches/cbo/ql/src/test/results/clientnegative/authorize_revoke_public.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/test/results/clientnegative/authorize_revoke_public.q.out?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/test/results/clientnegative/authorize_revoke_public.q.out (original)
+++ hive/branches/cbo/ql/src/test/results/clientnegative/authorize_revoke_public.q.out Thu Sep 4 02:49:46 2014
@@ -1,3 +1,3 @@
-PREHOOK: query: revoke role PUBLIC from user hive_test_user
+PREHOOK: query: revoke role public from user hive_test_user
PREHOOK: type: REVOKE_ROLE
FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. org.apache.hadoop.hive.ql.metadata.HiveException: MetaException(message:public role can't be revoked.)
Modified: hive/branches/cbo/ql/src/test/results/clientpositive/authorization_1.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/test/results/clientpositive/authorization_1.q.out?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/test/results/clientpositive/authorization_1.q.out (original)
+++ hive/branches/cbo/ql/src/test/results/clientpositive/authorization_1.q.out Thu Sep 4 02:49:46 2014
@@ -257,40 +257,40 @@ PREHOOK: type: SHOW_GRANT
POSTHOOK: query: show grant group hive_test_group1 on table src_autho_test(key)
POSTHOOK: type: SHOW_GRANT
PREHOOK: query: --role
-create role src_role
+create role sRc_roLE
PREHOOK: type: CREATEROLE
POSTHOOK: query: --role
-create role src_role
+create role sRc_roLE
POSTHOOK: type: CREATEROLE
-PREHOOK: query: grant role src_role to user hive_test_user
+PREHOOK: query: grant role sRc_roLE to user hive_test_user
PREHOOK: type: GRANT_ROLE
-POSTHOOK: query: grant role src_role to user hive_test_user
+POSTHOOK: query: grant role sRc_roLE to user hive_test_user
POSTHOOK: type: GRANT_ROLE
PREHOOK: query: show role grant user hive_test_user
PREHOOK: type: SHOW_ROLE_GRANT
POSTHOOK: query: show role grant user hive_test_user
POSTHOOK: type: SHOW_ROLE_GRANT
public false -1
-src_role false -1 hive_test_user
+sRc_roLE false -1 hive_test_user
PREHOOK: query: --column grant to role
-grant select(key) on table src_autho_test to role src_role
+grant select(key) on table src_autho_test to role sRc_roLE
PREHOOK: type: GRANT_PRIVILEGE
PREHOOK: Output: default@src_autho_test
POSTHOOK: query: --column grant to role
-grant select(key) on table src_autho_test to role src_role
+grant select(key) on table src_autho_test to role sRc_roLE
POSTHOOK: type: GRANT_PRIVILEGE
POSTHOOK: Output: default@src_autho_test
-PREHOOK: query: show grant role src_role on table src_autho_test
+PREHOOK: query: show grant role sRc_roLE on table src_autho_test
PREHOOK: type: SHOW_GRANT
-POSTHOOK: query: show grant role src_role on table src_autho_test
+POSTHOOK: query: show grant role sRc_roLE on table src_autho_test
POSTHOOK: type: SHOW_GRANT
-PREHOOK: query: show grant role src_role on table src_autho_test(key)
+PREHOOK: query: show grant role sRc_roLE on table src_autho_test(key)
PREHOOK: type: SHOW_GRANT
-POSTHOOK: query: show grant role src_role on table src_autho_test(key)
+POSTHOOK: query: show grant role sRc_roLE on table src_autho_test(key)
POSTHOOK: type: SHOW_GRANT
-default src_autho_test [key] src_role ROLE SELECT false -1 hive_test_user
+default src_autho_test [key] sRc_roLE ROLE SELECT false -1 hive_test_user
PREHOOK: query: select key from src_autho_test order by key limit 20
PREHOOK: type: QUERY
PREHOOK: Input: default@src_autho_test
@@ -319,20 +319,20 @@ POSTHOOK: Input: default@src_autho_test
118
118
119
-PREHOOK: query: revoke select(key) on table src_autho_test from role src_role
+PREHOOK: query: revoke select(key) on table src_autho_test from role sRc_roLE
PREHOOK: type: REVOKE_PRIVILEGE
PREHOOK: Output: default@src_autho_test
-POSTHOOK: query: revoke select(key) on table src_autho_test from role src_role
+POSTHOOK: query: revoke select(key) on table src_autho_test from role sRc_roLE
POSTHOOK: type: REVOKE_PRIVILEGE
POSTHOOK: Output: default@src_autho_test
PREHOOK: query: --table grant to role
-grant select on table src_autho_test to role src_role
+grant select on table src_autho_test to role sRc_roLE
PREHOOK: type: GRANT_PRIVILEGE
PREHOOK: Output: default@src_autho_test
POSTHOOK: query: --table grant to role
-grant select on table src_autho_test to role src_role
+grant select on table src_autho_test to role sRc_roLE
POSTHOOK: type: GRANT_PRIVILEGE
POSTHOOK: Output: default@src_autho_test
PREHOOK: query: select key from src_autho_test order by key limit 20
@@ -363,26 +363,26 @@ POSTHOOK: Input: default@src_autho_test
118
118
119
-PREHOOK: query: show grant role src_role on table src_autho_test
+PREHOOK: query: show grant role sRc_roLE on table src_autho_test
PREHOOK: type: SHOW_GRANT
-POSTHOOK: query: show grant role src_role on table src_autho_test
+POSTHOOK: query: show grant role sRc_roLE on table src_autho_test
POSTHOOK: type: SHOW_GRANT
-default src_autho_test src_role ROLE SELECT false -1 hive_test_user
-PREHOOK: query: show grant role src_role on table src_autho_test(key)
+default src_autho_test sRc_roLE ROLE SELECT false -1 hive_test_user
+PREHOOK: query: show grant role sRc_roLE on table src_autho_test(key)
PREHOOK: type: SHOW_GRANT
-POSTHOOK: query: show grant role src_role on table src_autho_test(key)
+POSTHOOK: query: show grant role sRc_roLE on table src_autho_test(key)
POSTHOOK: type: SHOW_GRANT
-PREHOOK: query: revoke select on table src_autho_test from role src_role
+PREHOOK: query: revoke select on table src_autho_test from role sRc_roLE
PREHOOK: type: REVOKE_PRIVILEGE
PREHOOK: Output: default@src_autho_test
-POSTHOOK: query: revoke select on table src_autho_test from role src_role
+POSTHOOK: query: revoke select on table src_autho_test from role sRc_roLE
POSTHOOK: type: REVOKE_PRIVILEGE
POSTHOOK: Output: default@src_autho_test
PREHOOK: query: -- drop role
-drop role src_role
+drop role sRc_roLE
PREHOOK: type: DROPROLE
POSTHOOK: query: -- drop role
-drop role src_role
+drop role sRc_roLE
POSTHOOK: type: DROPROLE
PREHOOK: query: drop table src_autho_test
PREHOOK: type: DROPTABLE
Modified: hive/branches/cbo/ql/src/test/results/clientpositive/authorization_5.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/test/results/clientpositive/authorization_5.q.out?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/test/results/clientpositive/authorization_5.q.out (original)
+++ hive/branches/cbo/ql/src/test/results/clientpositive/authorization_5.q.out Thu Sep 4 02:49:46 2014
@@ -28,34 +28,34 @@ POSTHOOK: query: SHOW GRANT USER hive_te
POSTHOOK: type: SHOW_GRANT
test_db hive_test_user USER DROP false -1 hive_test_user
test_db hive_test_user USER SELECT false -1 hive_test_user
-PREHOOK: query: CREATE ROLE db_test_role
+PREHOOK: query: CREATE ROLE db_TEST_Role
PREHOOK: type: CREATEROLE
-POSTHOOK: query: CREATE ROLE db_test_role
+POSTHOOK: query: CREATE ROLE db_TEST_Role
POSTHOOK: type: CREATEROLE
-PREHOOK: query: GRANT ROLE db_test_role TO USER hive_test_user
+PREHOOK: query: GRANT ROLE db_TEST_Role TO USER hive_test_user
PREHOOK: type: GRANT_ROLE
-POSTHOOK: query: GRANT ROLE db_test_role TO USER hive_test_user
+POSTHOOK: query: GRANT ROLE db_TEST_Role TO USER hive_test_user
POSTHOOK: type: GRANT_ROLE
PREHOOK: query: SHOW ROLE GRANT USER hive_test_user
PREHOOK: type: SHOW_ROLE_GRANT
POSTHOOK: query: SHOW ROLE GRANT USER hive_test_user
POSTHOOK: type: SHOW_ROLE_GRANT
-db_test_role false -1 hive_test_user
+db_TEST_Role false -1 hive_test_user
public false -1
-PREHOOK: query: GRANT drop ON DATABASE test_db TO ROLE db_test_role
+PREHOOK: query: GRANT drop ON DATABASE test_db TO ROLE db_TEST_Role
PREHOOK: type: GRANT_PRIVILEGE
-POSTHOOK: query: GRANT drop ON DATABASE test_db TO ROLE db_test_role
+POSTHOOK: query: GRANT drop ON DATABASE test_db TO ROLE db_TEST_Role
POSTHOOK: type: GRANT_PRIVILEGE
-PREHOOK: query: GRANT select ON DATABASE test_db TO ROLE db_test_role
+PREHOOK: query: GRANT select ON DATABASE test_db TO ROLE db_TEST_Role
PREHOOK: type: GRANT_PRIVILEGE
-POSTHOOK: query: GRANT select ON DATABASE test_db TO ROLE db_test_role
+POSTHOOK: query: GRANT select ON DATABASE test_db TO ROLE db_TEST_Role
POSTHOOK: type: GRANT_PRIVILEGE
-PREHOOK: query: SHOW GRANT ROLE db_test_role ON DATABASE test_db
+PREHOOK: query: SHOW GRANT ROLE db_TEST_Role ON DATABASE test_db
PREHOOK: type: SHOW_GRANT
-POSTHOOK: query: SHOW GRANT ROLE db_test_role ON DATABASE test_db
+POSTHOOK: query: SHOW GRANT ROLE db_TEST_Role ON DATABASE test_db
POSTHOOK: type: SHOW_GRANT
-test_db db_test_role ROLE DROP false -1 hive_test_user
-test_db db_test_role ROLE SELECT false -1 hive_test_user
+test_db db_TEST_Role ROLE DROP false -1 hive_test_user
+test_db db_TEST_Role ROLE SELECT false -1 hive_test_user
PREHOOK: query: DROP DATABASE IF EXISTS test_db
PREHOOK: type: DROPDATABASE
PREHOOK: Input: database:test_db
Modified: hive/branches/cbo/ql/src/test/results/clientpositive/authorization_grant_public_role.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/test/results/clientpositive/authorization_grant_public_role.q.out?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/test/results/clientpositive/authorization_grant_public_role.q.out (original)
+++ hive/branches/cbo/ql/src/test/results/clientpositive/authorization_grant_public_role.q.out Thu Sep 4 02:49:46 2014
@@ -12,12 +12,12 @@ POSTHOOK: Output: database:default
POSTHOOK: Output: default@t_gpr1
PREHOOK: query: -- all privileges should have been set for user
-GRANT ALL ON t_gpr1 TO ROLE public
+GRANT ALL ON t_gpr1 TO ROLE pubLic
PREHOOK: type: GRANT_PRIVILEGE
PREHOOK: Output: default@t_gpr1
POSTHOOK: query: -- all privileges should have been set for user
-GRANT ALL ON t_gpr1 TO ROLE public
+GRANT ALL ON t_gpr1 TO ROLE pubLic
POSTHOOK: type: GRANT_PRIVILEGE
POSTHOOK: Output: default@t_gpr1
PREHOOK: query: SHOW GRANT USER user1 ON TABLE t_gpr1
@@ -28,9 +28,9 @@ default t_gpr1 user1 USER DELETE true
default t_gpr1 user1 USER INSERT true -1 user1
default t_gpr1 user1 USER SELECT true -1 user1
default t_gpr1 user1 USER UPDATE true -1 user1
-PREHOOK: query: SHOW GRANT ROLE public ON TABLE t_gpr1
+PREHOOK: query: SHOW GRANT ROLE pubLic ON TABLE t_gpr1
PREHOOK: type: SHOW_GRANT
-POSTHOOK: query: SHOW GRANT ROLE public ON TABLE t_gpr1
+POSTHOOK: query: SHOW GRANT ROLE pubLic ON TABLE t_gpr1
POSTHOOK: type: SHOW_GRANT
default t_gpr1 public ROLE DELETE false -1 user1
default t_gpr1 public ROLE INSERT false -1 user1
Modified: hive/branches/cbo/ql/src/test/results/clientpositive/authorization_role_grant2.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/test/results/clientpositive/authorization_role_grant2.q.out?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/test/results/clientpositive/authorization_role_grant2.q.out (original)
+++ hive/branches/cbo/ql/src/test/results/clientpositive/authorization_role_grant2.q.out Thu Sep 4 02:49:46 2014
@@ -60,9 +60,9 @@ PREHOOK: type: GRANT_ROLE
POSTHOOK: query: -- grant role to another role
grant src_role_wadmin to role sRc_role2
POSTHOOK: type: GRANT_ROLE
-PREHOOK: query: set role ADMIN
+PREHOOK: query: set role ADMIn
PREHOOK: type: SHOW_ROLES
-POSTHOOK: query: set role ADMIN
+POSTHOOK: query: set role ADMIn
POSTHOOK: type: SHOW_ROLES
PREHOOK: query: grant src_role2 to user user3
PREHOOK: type: GRANT_ROLE
Modified: hive/branches/cbo/ql/src/test/results/clientpositive/show_conf.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/test/results/clientpositive/show_conf.q.out?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/test/results/clientpositive/show_conf.q.out (original)
+++ hive/branches/cbo/ql/src/test/results/clientpositive/show_conf.q.out Thu Sep 4 02:49:46 2014
@@ -7,4 +7,4 @@ PREHOOK: query: show conf "hive.stats.re
PREHOOK: type: SHOWCONF
POSTHOOK: query: show conf "hive.stats.retries.wait"
POSTHOOK: type: SHOWCONF
-3000 INT The base waiting window (in milliseconds) before the next retry. The actual wait time is calculated by baseWindow * failures baseWindow * (failure 1) * (random number between [0.0,1.0]).
+3000ms STRING(TIME) Expects a time value with unit (d/day, h/hour, m/min, s/sec, ms/msec, us/usec, ns/nsec), which is msec if not specified. The base waiting window before the next retry. The actual wait time is calculated by baseWindow * failures baseWindow * (failure + 1) * (random number between [0.0,1.0]).
Modified: hive/branches/cbo/ql/src/test/results/clientpositive/tez/dynpart_sort_opt_vectorization.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/test/results/clientpositive/tez/dynpart_sort_opt_vectorization.q.out?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/test/results/clientpositive/tez/dynpart_sort_opt_vectorization.q.out (original)
+++ hive/branches/cbo/ql/src/test/results/clientpositive/tez/dynpart_sort_opt_vectorization.q.out Thu Sep 4 02:49:46 2014
@@ -14,6 +14,7 @@ PREHOOK: query: create table over1k(
fields terminated by '|'
PREHOOK: type: CREATETABLE
PREHOOK: Output: database:default
+PREHOOK: Output: default@over1k
POSTHOOK: query: create table over1k(
t tinyint,
si smallint,
@@ -42,6 +43,7 @@ POSTHOOK: Output: default@over1k
PREHOOK: query: create table over1k_orc like over1k
PREHOOK: type: CREATETABLE
PREHOOK: Output: database:default
+PREHOOK: Output: default@over1k_orc
POSTHOOK: query: create table over1k_orc like over1k
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: database:default
@@ -81,6 +83,7 @@ PREHOOK: query: create table over1k_part
partitioned by (ds string, t tinyint) stored as orc
PREHOOK: type: CREATETABLE
PREHOOK: Output: database:default
+PREHOOK: Output: default@over1k_part_orc
POSTHOOK: query: create table over1k_part_orc(
si smallint,
i int,
@@ -93,6 +96,7 @@ POSTHOOK: Output: default@over1k_part_or
PREHOOK: query: create table over1k_part_limit_orc like over1k_part_orc
PREHOOK: type: CREATETABLE
PREHOOK: Output: database:default
+PREHOOK: Output: default@over1k_part_limit_orc
POSTHOOK: query: create table over1k_part_limit_orc like over1k_part_orc
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: database:default
@@ -114,6 +118,7 @@ PREHOOK: query: create table over1k_part
clustered by (si) into 4 buckets stored as orc
PREHOOK: type: CREATETABLE
PREHOOK: Output: database:default
+PREHOOK: Output: default@over1k_part_buck_orc
POSTHOOK: query: create table over1k_part_buck_orc(
si smallint,
i int,
@@ -134,6 +139,7 @@ PREHOOK: query: create table over1k_part
sorted by (f) into 4 buckets stored as orc
PREHOOK: type: CREATETABLE
PREHOOK: Output: database:default
+PREHOOK: Output: default@over1k_part_buck_sort_orc
POSTHOOK: query: create table over1k_part_buck_sort_orc(
si smallint,
i int,
@@ -195,6 +201,7 @@ STAGE PLANS:
Map-reduce partition columns: _col4 (type: tinyint)
Statistics: Num rows: 1048 Data size: 310873 Basic stats: COMPLETE Column stats: NONE
value expressions: _col0 (type: smallint), _col1 (type: int), _col2 (type: bigint), _col3 (type: float), _col4 (type: tinyint)
+ Execution mode: vectorized
Reducer 3
Reduce Operator Tree:
Extract
@@ -280,6 +287,7 @@ STAGE PLANS:
Map-reduce partition columns: _col4 (type: tinyint)
Statistics: Num rows: 10 Data size: 2960 Basic stats: COMPLETE Column stats: NONE
value expressions: _col0 (type: smallint), _col1 (type: int), _col2 (type: bigint), _col3 (type: float), _col4 (type: tinyint)
+ Execution mode: vectorized
Reducer 3
Reduce Operator Tree:
Extract
@@ -564,6 +572,7 @@ STAGE PLANS:
Map-reduce partition columns: _col4 (type: tinyint)
Statistics: Num rows: 1048 Data size: 310873 Basic stats: COMPLETE Column stats: NONE
value expressions: _col0 (type: smallint), _col1 (type: int), _col2 (type: bigint), _col3 (type: float), _col4 (type: tinyint)
+ Execution mode: vectorized
Reducer 3
Reduce Operator Tree:
Extract
@@ -649,6 +658,7 @@ STAGE PLANS:
Map-reduce partition columns: _col4 (type: tinyint)
Statistics: Num rows: 10 Data size: 2960 Basic stats: COMPLETE Column stats: NONE
value expressions: _col0 (type: smallint), _col1 (type: int), _col2 (type: bigint), _col3 (type: float), _col4 (type: tinyint)
+ Execution mode: vectorized
Reducer 3
Reduce Operator Tree:
Extract
@@ -1292,6 +1302,7 @@ create table over1k_part2_orc(
partitioned by (ds string, t tinyint)
PREHOOK: type: CREATETABLE
PREHOOK: Output: database:default
+PREHOOK: Output: default@over1k_part2_orc
POSTHOOK: query: -- tests for HIVE-6883
create table over1k_part2_orc(
si smallint,
@@ -1419,6 +1430,7 @@ STAGE PLANS:
Map-reduce partition columns: _col4 (type: tinyint)
Statistics: Num rows: 1048 Data size: 310873 Basic stats: COMPLETE Column stats: NONE
value expressions: _col0 (type: smallint), _col1 (type: int), _col2 (type: bigint), _col3 (type: float), _col4 (type: tinyint)
+ Execution mode: vectorized
Reducer 3
Reduce Operator Tree:
Extract
@@ -1762,6 +1774,7 @@ create table over1k_part_buck_sort2_orc(
sorted by (f) into 1 buckets
PREHOOK: type: CREATETABLE
PREHOOK: Output: database:default
+PREHOOK: Output: default@over1k_part_buck_sort2_orc
POSTHOOK: query: -- hadoop-1 does not honor number of reducers in local mode. There is always only 1 reducer irrespective of the number of buckets.
-- Hence all records go to one bucket and all other buckets will be empty. Similar to HIVE-6867. However, hadoop-2 honors number
-- of reducers and records are spread across all reducers. To avoid this inconsistency we will make number of buckets to 1 for this test.
Modified: hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java (original)
+++ hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java Thu Sep 4 02:49:46 2014
@@ -69,9 +69,7 @@ import org.apache.hadoop.hive.serde2.obj
import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableStringObjectInspector;
-import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
-import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.util.StringUtils;
@@ -108,7 +106,7 @@ public final class ObjectInspectorUtils
PrimitiveObjectInspector poi = (PrimitiveObjectInspector) oi;
if (!(poi instanceof AbstractPrimitiveWritableObjectInspector)) {
return PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
- (PrimitiveTypeInfo)poi.getTypeInfo());
+ poi.getTypeInfo());
}
}
return oi;
@@ -292,24 +290,21 @@ public final class ObjectInspectorUtils
switch (oi.getCategory()) {
case PRIMITIVE: {
PrimitiveObjectInspector loi = (PrimitiveObjectInspector) oi;
- switch (objectInspectorOption) {
- case DEFAULT: {
- if (loi.preferWritable()) {
- result = loi.getPrimitiveWritableObject(loi.copyObject(o));
- } else {
- result = loi.getPrimitiveJavaObject(o);
- }
- break;
+ if (objectInspectorOption == ObjectInspectorCopyOption.DEFAULT) {
+ objectInspectorOption = loi.preferWritable() ?
+ ObjectInspectorCopyOption.WRITABLE : ObjectInspectorCopyOption.JAVA;
}
- case JAVA: {
+ switch (objectInspectorOption) {
+ case JAVA:
result = loi.getPrimitiveJavaObject(o);
+ if (loi.getPrimitiveCategory() == PrimitiveObjectInspector.PrimitiveCategory.TIMESTAMP) {
+ result = PrimitiveObjectInspectorFactory.javaTimestampObjectInspector.copyObject(result);
+ }
break;
- }
- case WRITABLE: {
+ case WRITABLE:
result = loi.getPrimitiveWritableObject(loi.copyObject(o));
break;
}
- }
break;
}
case LIST: {
Modified: hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaBinaryObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaBinaryObjectInspector.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaBinaryObjectInspector.java (original)
+++ hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaBinaryObjectInspector.java Thu Sep 4 02:49:46 2014
@@ -32,17 +32,6 @@ public class JavaBinaryObjectInspector e
}
@Override
- public byte[] copyObject(Object o) {
- if (null == o){
- return null;
- }
- byte[] incoming = (byte[])o;
- byte[] outgoing = new byte[incoming.length];
- System.arraycopy(incoming, 0, outgoing, 0, incoming.length);
- return outgoing;
- }
-
- @Override
public BytesWritable getPrimitiveWritableObject(Object o) {
return o == null ? null : new BytesWritable((byte[])o);
}
Modified: hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveCharObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveCharObjectInspector.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveCharObjectInspector.java (original)
+++ hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveCharObjectInspector.java Thu Sep 4 02:49:46 2014
@@ -64,15 +64,16 @@ public class JavaHiveCharObjectInspector
}
public Object set(Object o, HiveChar value) {
- HiveChar setValue = (HiveChar) o;
- setValue.setValue(value, getMaxLength());
- return setValue;
+ if (BaseCharUtils.doesPrimitiveMatchTypeParams(value,
+ (CharTypeInfo) typeInfo)) {
+ return value;
+ } else {
+ return new HiveChar(value, getMaxLength());
+ }
}
public Object set(Object o, String value) {
- HiveChar setValue = (HiveChar) o;
- setValue.setValue(value, getMaxLength());
- return setValue;
+ return new HiveChar(value, getMaxLength());
}
public Object create(HiveChar value) {
Modified: hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveVarcharObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveVarcharObjectInspector.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveVarcharObjectInspector.java (original)
+++ hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveVarcharObjectInspector.java Thu Sep 4 02:49:46 2014
@@ -19,11 +19,11 @@ package org.apache.hadoop.hive.serde2.ob
import org.apache.hadoop.hive.common.type.HiveVarchar;
import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
-import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.BaseCharUtils;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
public class JavaHiveVarcharObjectInspector extends AbstractPrimitiveJavaObjectInspector
-implements SettableHiveVarcharObjectInspector {
+ implements SettableHiveVarcharObjectInspector {
// no-arg ctor required for Kyro serialization
public JavaHiveVarcharObjectInspector() {
@@ -38,9 +38,8 @@ implements SettableHiveVarcharObjectInsp
if (o == null) {
return null;
}
- HiveVarchar value = (HiveVarchar)o;
- if (BaseCharUtils.doesPrimitiveMatchTypeParams(
- value, (VarcharTypeInfo)typeInfo)) {
+ HiveVarchar value = (HiveVarchar) o;
+ if (BaseCharUtils.doesPrimitiveMatchTypeParams(value, (VarcharTypeInfo) typeInfo)) {
return value;
}
// value needs to be converted to match the type params (length, etc).
@@ -52,40 +51,27 @@ implements SettableHiveVarcharObjectInsp
if (o == null) {
return null;
}
- return getWritableWithParams((HiveVarchar)o);
- }
-
- private HiveVarchar getPrimitiveWithParams(HiveVarchar val) {
- HiveVarchar hv = new HiveVarchar(val, getMaxLength());
- return hv;
- }
-
- private HiveVarcharWritable getWritableWithParams(HiveVarchar val) {
- HiveVarcharWritable newValue = new HiveVarcharWritable();
- newValue.set(val, getMaxLength());
- return newValue;
+ return getWritableWithParams((HiveVarchar) o);
}
@Override
public Object set(Object o, HiveVarchar value) {
- if (BaseCharUtils.doesPrimitiveMatchTypeParams(
- value, (VarcharTypeInfo)typeInfo)) {
- return o = value;
+ if (BaseCharUtils.doesPrimitiveMatchTypeParams(value, (VarcharTypeInfo) typeInfo)) {
+ return value;
} else {
// Otherwise value may be too long, convert to appropriate value based on params
- return o = new HiveVarchar(value, getMaxLength());
+ return new HiveVarchar(value, getMaxLength());
}
}
@Override
public Object set(Object o, String value) {
- return o = new HiveVarchar(value, getMaxLength());
+ return new HiveVarchar(value, getMaxLength());
}
@Override
public Object create(HiveVarchar value) {
- HiveVarchar hc = new HiveVarchar(value, getMaxLength());
- return hc;
+ return new HiveVarchar(value, getMaxLength());
}
public int getMaxLength() {
@@ -93,4 +79,14 @@ implements SettableHiveVarcharObjectInsp
return ti.getLength();
}
+ private HiveVarchar getPrimitiveWithParams(HiveVarchar val) {
+ return new HiveVarchar(val, getMaxLength());
+ }
+
+ private HiveVarcharWritable getWritableWithParams(HiveVarchar val) {
+ HiveVarcharWritable newValue = new HiveVarcharWritable();
+ newValue.set(val, getMaxLength());
+ return newValue;
+ }
+
}
Modified: hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaTimestampObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaTimestampObjectInspector.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaTimestampObjectInspector.java (original)
+++ hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaTimestampObjectInspector.java Thu Sep 4 02:49:46 2014
@@ -39,6 +39,17 @@ public class JavaTimestampObjectInspecto
return o == null ? null : (Timestamp) o;
}
+ @Override
+ public Object copyObject(Object o) {
+ if (o == null) {
+ return null;
+ }
+ Timestamp source = (Timestamp) o;
+ Timestamp copy = new Timestamp(source.getTime());
+ copy.setNanos(source.getNanos());
+ return copy;
+ }
+
public Timestamp get(Object o) {
return (Timestamp) o;
}
Modified: hive/branches/cbo/service/src/java/org/apache/hadoop/hive/service/HiveServer.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/service/src/java/org/apache/hadoop/hive/service/HiveServer.java?rev=1622396&r1=1622395&r2=1622396&view=diff
==============================================================================
--- hive/branches/cbo/service/src/java/org/apache/hadoop/hive/service/HiveServer.java (original)
+++ hive/branches/cbo/service/src/java/org/apache/hadoop/hive/service/HiveServer.java Thu Sep 4 02:49:46 2014
@@ -30,6 +30,7 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Properties;
+import java.util.concurrent.TimeUnit;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.logging.Log;
@@ -62,8 +63,6 @@ import org.apache.thrift.transport.TServ
import org.apache.thrift.transport.TServerTransport;
import org.apache.thrift.transport.TTransport;
import org.apache.thrift.transport.TTransportFactory;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
import com.facebook.fb303.fb_status;
/**
@@ -670,8 +669,11 @@ public class HiveServer extends ThriftHi
boolean tcpKeepAlive = conf.getBoolVar(HiveConf.ConfVars.SERVER_TCP_KEEP_ALIVE);
+ int timeout = (int) HiveConf.getTimeVar(
+ conf, HiveConf.ConfVars.SERVER_READ_SOCKET_TIMEOUT, TimeUnit.MILLISECONDS);
- TServerTransport serverTransport = tcpKeepAlive ? new TServerSocketKeepAlive(cli.port) : new TServerSocket(cli.port, 1000 * conf.getIntVar(HiveConf.ConfVars.SERVER_READ_SOCKET_TIMEOUT));
+ TServerTransport serverTransport =
+ tcpKeepAlive ? new TServerSocketKeepAlive(cli.port) : new TServerSocket(cli.port, timeout);
// set all properties specified on the command line
for (Map.Entry<Object, Object> item : hiveconf.entrySet()) {