You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by mg...@apache.org on 2019/11/13 07:59:49 UTC
[hive] branch master updated: HIVE-20256 Remove unused classes from
Hive QL (Miklos Gergely, reviewed by David Mollitor)
This is an automated email from the ASF dual-hosted git repository.
mgergely pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git
The following commit(s) were added to refs/heads/master by this push:
new 90fa906 HIVE-20256 Remove unused classes from Hive QL (Miklos Gergely, reviewed by David Mollitor)
90fa906 is described below
commit 90fa9064f2c6907fbe6237cb46d5937eebd8ea31
Author: miklosgergely <mi...@gmail.com>
AuthorDate: Fri Mar 15 14:50:19 2019 +0100
HIVE-20256 Remove unused classes from Hive QL (Miklos Gergely, reviewed by David Mollitor)
---
.../apache/hadoop/hive/ql/exec/NumericUDAF.java | 33 --
.../hive/ql/exec/vector/AggregateDefinition.java | 52 --
.../ql/exec/vector/VectorColumnAssignFactory.java | 608 ---------------------
.../fast/VectorMapJoinFastBytesHashUtil.java | 37 --
.../mapjoin/fast/VectorMapJoinFastHashMap.java | 40 --
.../hadoop/hive/ql/metadata/RandomDimension.java | 41 --
.../hive/ql/optimizer/ppr/ExprPrunerInfo.java | 41 --
.../hadoop/hive/ql/parse/InputSignature.java | 119 ----
.../hadoop/hive/ql/parse/PrintOpTreeProcessor.java | 95 ----
.../org/apache/hadoop/hive/ql/parse/TezWalker.java | 66 ---
.../repl/dump/io/VersionCompatibleSerializer.java | 37 --
.../apache/hadoop/hive/ql/plan/ExplosionDesc.java | 58 --
.../org/apache/hadoop/hive/ql/plan/SchemaDesc.java | 46 --
13 files changed, 1273 deletions(-)
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/NumericUDAF.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/NumericUDAF.java
deleted file mode 100644
index 0e96d07..0000000
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/NumericUDAF.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.exec;
-
-/**
- * Base class of numeric UDAFs like sum and avg which need a
- * NumericUDAFEvaluatorResolver.
- */
-public class NumericUDAF extends UDAF {
-
- /**
- * Constructor.
- */
- public NumericUDAF() {
- setResolver(new NumericUDAFEvaluatorResolver(this.getClass()));
- }
-}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/AggregateDefinition.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/AggregateDefinition.java
deleted file mode 100644
index 3772979..0000000
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/AggregateDefinition.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.exec.vector;
-
-import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorAggregateExpression;
-import org.apache.hadoop.hive.ql.plan.GroupByDesc;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
-
-class AggregateDefinition {
-
- private String name;
- private VectorExpressionDescriptor.ArgumentType type;
- private GenericUDAFEvaluator.Mode udafEvaluatorMode;
- private Class<? extends VectorAggregateExpression> aggClass;
-
- AggregateDefinition(String name, VectorExpressionDescriptor.ArgumentType type,
- GenericUDAFEvaluator.Mode udafEvaluatorMode, Class<? extends VectorAggregateExpression> aggClass) {
- this.name = name;
- this.type = type;
- this.udafEvaluatorMode = udafEvaluatorMode;
- this.aggClass = aggClass;
- }
-
- String getName() {
- return name;
- }
- VectorExpressionDescriptor.ArgumentType getType() {
- return type;
- }
- GenericUDAFEvaluator.Mode getUdafEvaluatorMode() {
- return udafEvaluatorMode;
- }
- Class<? extends VectorAggregateExpression> getAggClass() {
- return aggClass;
- }
-}
\ No newline at end of file
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssignFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssignFactory.java
deleted file mode 100644
index 39a124f..0000000
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssignFactory.java
+++ /dev/null
@@ -1,608 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.exec.vector;
-
-import java.util.Arrays;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.hadoop.hive.common.type.HiveChar;
-import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
-import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.Timestamp;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
-import org.apache.hadoop.hive.serde2.io.DoubleWritable;
-import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
-import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
-import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
-import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
-import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
-import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
-import org.apache.hadoop.hive.serde2.objectinspector.StructField;
-import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
-import org.apache.hadoop.io.BooleanWritable;
-import org.apache.hadoop.io.BytesWritable;
-import org.apache.hadoop.io.FloatWritable;
-import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.io.Writable;
-
-/**
- * This class is used as a static factory for VectorColumnAssign.
- * Is capable of building assigners from expression nodes or from object inspectors.
- */
-public class VectorColumnAssignFactory {
-
- private static abstract class VectorColumnAssignVectorBase<T extends ColumnVector>
- implements VectorColumnAssign {
- protected VectorizedRowBatch outBatch;
- protected T outCol;
-
- protected void copyValue(T in, int srcIndex, int destIndex) throws HiveException {
- throw new HiveException("Internal error: should not reach here");
- }
-
- @SuppressWarnings("unchecked")
- @Override
- public void assignVectorValue(VectorizedRowBatch inBatch, int batchIndex,
- int valueColumnIndex, int destIndex) throws HiveException {
- T in = (T) inBatch.cols[valueColumnIndex];
- if (in.isRepeating) {
- if (in.noNulls) {
- copyValue(in, 0, destIndex);
- }
- else {
- assignNull(destIndex);
- }
- }
- else {
- int srcIndex = inBatch.selectedInUse ? inBatch.selected[batchIndex] : batchIndex;
- if (in.noNulls || !in.isNull[srcIndex]) {
- copyValue(in, srcIndex, destIndex);
- }
- else {
- assignNull(destIndex);
- }
- }
- }
-
- public VectorColumnAssign init(VectorizedRowBatch out, T cv) {
- this.outBatch = out;
- this.outCol = cv;
- return this;
- }
-
- protected void assignNull(int index) {
- VectorizedBatchUtil.setNullColIsNullValue(outCol, index);
- }
-
- @Override
- public void reset() {
- }
-
- @Override
- public void assignObjectValue(Object value, int destIndex) throws HiveException {
- throw new HiveException("Internal error: should not reach here");
- }
- }
-
- private static abstract class VectorLongColumnAssign
- extends VectorColumnAssignVectorBase<LongColumnVector> {
- protected void assignLong(long value, int destIndex) {
- outCol.vector[destIndex] = value;
- }
- }
-
- private static abstract class VectorDoubleColumnAssign
- extends VectorColumnAssignVectorBase<DoubleColumnVector> {
-
- protected void assignDouble(double value, int destIndex) {
- outCol.vector[destIndex] = value;
- }
- }
-
- private static abstract class VectorBytesColumnAssign
- extends VectorColumnAssignVectorBase<BytesColumnVector> {
- byte[] pad = new byte[BytesColumnVector.DEFAULT_BUFFER_SIZE];
- int padUsed = 0;
-
- protected void assignBytes(byte[] buffer, int start, int length, int destIndex) {
- if (padUsed + length <= pad.length) {
- System.arraycopy(buffer, start,
- pad, padUsed, length);
- outCol.vector[destIndex] = pad;
- outCol.start[destIndex] = padUsed;
- outCol.length[destIndex] = length;
- padUsed += length;
- }
- else {
- outCol.vector[destIndex] = Arrays.copyOfRange(buffer,
- start, length);
- outCol.start[destIndex] = 0;
- outCol.length[destIndex] = length;
- }
- }
-
- @Override
- public void reset() {
- super.reset();
- padUsed = 0;
- }
- }
-
- private static abstract class VectorDecimalColumnAssign
- extends VectorColumnAssignVectorBase<DecimalColumnVector> {
-
- protected void assignDecimal(HiveDecimal value, int index) {
- outCol.set(index, value);
- }
- protected void assignDecimal(HiveDecimalWritable hdw, int index) {
- outCol.set(index, hdw);
- }
- }
-
- private static abstract class VectorTimestampColumnAssign
- extends VectorColumnAssignVectorBase<TimestampColumnVector> {
-
- protected void assignTimestamp(Timestamp value, int index) {
- outCol.set(index, value.toSqlTimestamp());
- }
- protected void assignTimestamp(TimestampWritableV2 tw, int index) {
- outCol.set(index, tw.getTimestamp().toSqlTimestamp());
- }
- }
-
- private static abstract class VectorIntervalDayTimeColumnAssign
- extends VectorColumnAssignVectorBase<IntervalDayTimeColumnVector> {
-
- protected void assignIntervalDayTime(HiveIntervalDayTime value, int index) {
- outCol.set(index, value);
- }
- protected void assignIntervalDayTime(HiveIntervalDayTimeWritable tw, int index) {
- outCol.set(index, tw.getHiveIntervalDayTime());
- }
- }
-
- public static VectorColumnAssign[] buildAssigners(VectorizedRowBatch outputBatch)
- throws HiveException {
- VectorColumnAssign[] vca = new VectorColumnAssign[outputBatch.cols.length];
- for(int i=0; i<vca.length; ++i) {
- ColumnVector cv = outputBatch.cols[i];
- if (cv == null) {
- continue;
- }
- else if (cv instanceof LongColumnVector) {
- vca[i] = new VectorLongColumnAssign() {
- @Override
- protected void copyValue(LongColumnVector in, int srcIndex, int destIndex) {
- assignLong(in.vector[srcIndex], destIndex);
- }
- }.init(outputBatch, (LongColumnVector) cv);
- }
- else if (cv instanceof DoubleColumnVector) {
- vca[i] = new VectorDoubleColumnAssign() {
-
- @Override
- protected void copyValue(DoubleColumnVector in, int srcIndex, int destIndex) {
- assignDouble(in.vector[srcIndex], destIndex);
- }
- }.init(outputBatch, (DoubleColumnVector) cv);
- }
- else if (cv instanceof BytesColumnVector) {
- vca[i] = new VectorBytesColumnAssign() {
- @Override
- protected void copyValue(BytesColumnVector src, int srcIndex, int destIndex) {
- assignBytes(src.vector[srcIndex], src.start[srcIndex], src.length[srcIndex], destIndex);
- }
- }.init(outputBatch, (BytesColumnVector) cv);
- }
- else if (cv instanceof DecimalColumnVector) {
- vca[i] = new VectorDecimalColumnAssign() {
- @Override
- protected void copyValue(DecimalColumnVector src, int srcIndex, int destIndex) {
- assignDecimal(src.vector[srcIndex], destIndex);
- }
- };
- }
- else {
- throw new HiveException("Unimplemented vector column type: " + cv.getClass().getName());
- }
- }
- return vca;
- }
-
- public static VectorColumnAssign buildObjectAssign(VectorizedRowBatch outputBatch,
- int outColIndex, ObjectInspector objInspector) throws HiveException {
- PrimitiveObjectInspector poi = (PrimitiveObjectInspector) objInspector;
- return buildObjectAssign(outputBatch, outColIndex, poi.getPrimitiveCategory());
- }
-
- public static VectorColumnAssign buildObjectAssign(VectorizedRowBatch outputBatch,
- int outColIndex, PrimitiveCategory category) throws HiveException {
- VectorColumnAssign outVCA = null;
- ColumnVector destCol = outputBatch.cols[outColIndex];
- if (destCol == null) {
- switch(category) {
- case VOID:
- outVCA = new VectorLongColumnAssign() {
- // This is a dummy assigner
- @Override
- public void assignObjectValue(Object val, int destIndex) throws HiveException {
- // This is no-op, there is no column to assign to and val is expected to be null
- assert (val == null);
- }
- };
- break;
- default:
- throw new HiveException("Incompatible (null) vector column and primitive category " +
- category);
- }
- } else if (destCol instanceof LongColumnVector) {
- switch(category) {
- case BOOLEAN:
- outVCA = new VectorLongColumnAssign() {
- @Override
- public void assignObjectValue(Object val, int destIndex) throws HiveException {
- if (val == null) {
- assignNull(destIndex);
- }
- else {
- BooleanWritable bw = (BooleanWritable) val;
- assignLong(bw.get() ? 1:0, destIndex);
- }
- }
- }.init(outputBatch, (LongColumnVector) destCol);
- break;
- case BYTE:
- outVCA = new VectorLongColumnAssign() {
- @Override
- public void assignObjectValue(Object val, int destIndex) throws HiveException {
- if (val == null) {
- assignNull(destIndex);
- }
- else {
- ByteWritable bw = (ByteWritable) val;
- assignLong(bw.get(), destIndex);
- }
- }
- }.init(outputBatch, (LongColumnVector) destCol);
- break;
- case SHORT:
- outVCA = new VectorLongColumnAssign() {
- @Override
- public void assignObjectValue(Object val, int destIndex) throws HiveException {
- if (val == null) {
- assignNull(destIndex);
- }
- else {
- ShortWritable bw = (ShortWritable) val;
- assignLong(bw.get(), destIndex);
- }
- }
- }.init(outputBatch, (LongColumnVector) destCol);
- break;
- case INT:
- outVCA = new VectorLongColumnAssign() {
- @Override
- public void assignObjectValue(Object val, int destIndex) throws HiveException {
- if (val == null) {
- assignNull(destIndex);
- }
- else {
- IntWritable bw = (IntWritable) val;
- assignLong(bw.get(), destIndex);
- }
- }
- }.init(outputBatch, (LongColumnVector) destCol);
- break;
- case LONG:
- outVCA = new VectorLongColumnAssign() {
- @Override
- public void assignObjectValue(Object val, int destIndex) throws HiveException {
- if (val == null) {
- assignNull(destIndex);
- }
- else {
- LongWritable bw = (LongWritable) val;
- assignLong(bw.get(), destIndex);
- }
- }
- }.init(outputBatch, (LongColumnVector) destCol);
- break;
- case TIMESTAMP:
- outVCA = new VectorTimestampColumnAssign() {
- @Override
- public void assignObjectValue(Object val, int destIndex) throws HiveException {
- if (val == null) {
- assignNull(destIndex);
- }
- else {
- assignTimestamp((TimestampWritableV2) val, destIndex);
- }
- }
- }.init(outputBatch, (TimestampColumnVector) destCol);
- break;
- case DATE:
- outVCA = new VectorLongColumnAssign() {
- @Override
- public void assignObjectValue(Object val, int destIndex) throws HiveException {
- if (val == null) {
- assignNull(destIndex);
- }
- else {
- DateWritableV2 bw = (DateWritableV2) val;
- assignLong(bw.getDays(), destIndex);
- }
- }
- }.init(outputBatch, (LongColumnVector) destCol);
- break;
- case INTERVAL_YEAR_MONTH:
- outVCA = new VectorLongColumnAssign() {
- @Override
- public void assignObjectValue(Object val, int destIndex) throws HiveException {
- if (val == null) {
- assignNull(destIndex);
- }
- else {
- HiveIntervalYearMonthWritable bw = (HiveIntervalYearMonthWritable) val;
- assignLong(bw.getHiveIntervalYearMonth().getTotalMonths(), destIndex);
- }
- }
- }.init(outputBatch, (LongColumnVector) destCol);
- break;
- case INTERVAL_DAY_TIME:outVCA = new VectorIntervalDayTimeColumnAssign() {
- @Override
- public void assignObjectValue(Object val, int destIndex) throws HiveException {
- if (val == null) {
- assignNull(destIndex);
- }
- else {
- HiveIntervalDayTimeWritable bw = (HiveIntervalDayTimeWritable) val;
- assignIntervalDayTime(
- bw.getHiveIntervalDayTime(),
- destIndex);
- }
- }
- }.init(outputBatch, (IntervalDayTimeColumnVector) destCol);
- break;
- default:
- throw new HiveException("Incompatible Long vector column and primitive category " +
- category);
- }
- }
- else if (destCol instanceof DoubleColumnVector) {
- switch(category) {
- case DOUBLE:
- outVCA = new VectorDoubleColumnAssign() {
- @Override
- public void assignObjectValue(Object val, int destIndex) throws HiveException {
- if (val == null) {
- assignNull(destIndex);
- }
- else {
- DoubleWritable bw = (DoubleWritable) val;
- assignDouble(bw.get(), destIndex);
- }
- }
- }.init(outputBatch, (DoubleColumnVector) destCol);
- break;
- case FLOAT:
- outVCA = new VectorDoubleColumnAssign() {
- @Override
- public void assignObjectValue(Object val, int destIndex) throws HiveException {
- if (val == null) {
- assignNull(destIndex);
- }
- else {
- FloatWritable bw = (FloatWritable) val;
- assignDouble(bw.get(), destIndex);
- }
- }
- }.init(outputBatch, (DoubleColumnVector) destCol);
- break;
- default:
- throw new HiveException("Incompatible Double vector column and primitive category " +
- category);
- }
- }
- else if (destCol instanceof BytesColumnVector) {
- switch(category) {
- case BINARY:
- outVCA = new VectorBytesColumnAssign() {
- @Override
- public void assignObjectValue(Object val, int destIndex) throws HiveException {
- if (val == null) {
- assignNull(destIndex);
- }
- else {
- BytesWritable bw = (BytesWritable) val;
- byte[] bytes = bw.getBytes();
- assignBytes(bytes, 0, bw.getLength(), destIndex);
- }
- }
- }.init(outputBatch, (BytesColumnVector) destCol);
- break;
- case STRING:
- outVCA = new VectorBytesColumnAssign() {
- @Override
- public void assignObjectValue(Object val, int destIndex) throws HiveException {
- if (val == null) {
- assignNull(destIndex);
- } else {
- Text bw = (Text) val;
- byte[] bytes = bw.getBytes();
- assignBytes(bytes, 0, bw.getLength(), destIndex);
- }
- }
- }.init(outputBatch, (BytesColumnVector) destCol);
- break;
- case VARCHAR:
- outVCA = new VectorBytesColumnAssign() {
- @Override
- public void assignObjectValue(Object val, int destIndex) throws HiveException {
- if (val == null) {
- assignNull(destIndex);
- } else {
- // We store VARCHAR type stripped of pads.
- HiveVarchar hiveVarchar;
- if (val instanceof HiveVarchar) {
- hiveVarchar = (HiveVarchar) val;
- } else {
- hiveVarchar = ((HiveVarcharWritable) val).getHiveVarchar();
- }
- byte[] bytes = hiveVarchar.getValue().getBytes();
- assignBytes(bytes, 0, bytes.length, destIndex);
- }
- }
- }.init(outputBatch, (BytesColumnVector) destCol);
- break;
- case CHAR:
- outVCA = new VectorBytesColumnAssign() {
- @Override
- public void assignObjectValue(Object val, int destIndex) throws HiveException {
- if (val == null) {
- assignNull(destIndex);
- } else {
- // We store CHAR type stripped of pads.
- HiveChar hiveChar;
- if (val instanceof HiveChar) {
- hiveChar = (HiveChar) val;
- } else {
- hiveChar = ((HiveCharWritable) val).getHiveChar();
- }
- byte[] bytes = hiveChar.getStrippedValue().getBytes();
- assignBytes(bytes, 0, bytes.length, destIndex);
- }
- }
- }.init(outputBatch, (BytesColumnVector) destCol);
- break;
- default:
- throw new HiveException("Incompatible Bytes vector column and primitive category " +
- category);
- }
- }
- else if (destCol instanceof DecimalColumnVector) {
- switch(category) {
- case DECIMAL:
- outVCA = new VectorDecimalColumnAssign() {
- @Override
- public void assignObjectValue(Object val, int destIndex) throws HiveException {
- if (val == null) {
- assignNull(destIndex);
- }
- else {
- if (val instanceof HiveDecimal) {
- assignDecimal((HiveDecimal) val, destIndex);
- } else {
- assignDecimal((HiveDecimalWritable) val, destIndex);
- }
-
- }
- }
- }.init(outputBatch, (DecimalColumnVector) destCol);
- break;
- default:
- throw new HiveException("Incompatible Decimal vector column and primitive category " +
- category);
- }
- }
- else {
- throw new HiveException("Unknown vector column type " + destCol.getClass().getName());
- }
- return outVCA;
- }
-
- /**
- * Builds the assigners from an object inspector and from a list of columns.
- * @param outputBatch The batch to which the assigners are bound
- * @param outputOI The row object inspector
- * @param columnMap Vector column map
- * @param outputColumnNames Column names, used both to find the vector columns and the
- * @return
- * @throws HiveException
- */
- public static VectorColumnAssign[] buildAssigners(VectorizedRowBatch outputBatch,
- ObjectInspector outputOI,
- Map<String, Integer> columnMap,
- List<String> outputColumnNames) throws HiveException {
- StructObjectInspector soi = (StructObjectInspector) outputOI;
- VectorColumnAssign[] vcas = new VectorColumnAssign[outputColumnNames.size()];
- for (int i=0; i<outputColumnNames.size(); ++i) {
- String columnName = outputColumnNames.get(i);
- Integer columnIndex = columnMap.get(columnName);
- StructField columnRef = soi.getStructFieldRef(columnName);
- ObjectInspector valueOI = columnRef.getFieldObjectInspector();
- vcas[i] = buildObjectAssign(outputBatch, columnIndex, valueOI);
- }
- return vcas;
- }
-
- public static VectorColumnAssign[] buildAssigners(VectorizedRowBatch outputBatch,
- Writable[] writables) throws HiveException {
- VectorColumnAssign[] vcas = new VectorColumnAssign[outputBatch.numCols];
- for (int i = 0; i < writables.length; ++i) {
- if (writables[i] == null) {
- assert(outputBatch.cols[i] == null);
- vcas[i] = buildObjectAssign(outputBatch, i, PrimitiveCategory.VOID);
- } else if (writables[i] instanceof ByteWritable) {
- vcas[i] = buildObjectAssign(outputBatch, i, PrimitiveCategory.BYTE);
- } else if (writables[i] instanceof ShortWritable) {
- vcas[i] = buildObjectAssign(outputBatch, i, PrimitiveCategory.SHORT);
- } else if (writables[i] instanceof IntWritable) {
- vcas[i] = buildObjectAssign(outputBatch, i, PrimitiveCategory.INT);
- } else if (writables[i] instanceof LongWritable) {
- vcas[i] = buildObjectAssign(outputBatch, i, PrimitiveCategory.LONG);
- } else if (writables[i] instanceof FloatWritable) {
- vcas[i] = buildObjectAssign(outputBatch, i, PrimitiveCategory.FLOAT);
- } else if (writables[i] instanceof DoubleWritable) {
- vcas[i] = buildObjectAssign(outputBatch, i, PrimitiveCategory.DOUBLE);
- } else if (writables[i] instanceof Text) {
- vcas[i] = buildObjectAssign(outputBatch, i, PrimitiveCategory.STRING);
- } else if (writables[i] instanceof BytesWritable) {
- vcas[i] = buildObjectAssign(outputBatch, i, PrimitiveCategory.BINARY);
- } else if (writables[i] instanceof TimestampWritableV2) {
- vcas[i] = buildObjectAssign(outputBatch, i, PrimitiveCategory.TIMESTAMP);
- } else if (writables[i] instanceof HiveIntervalYearMonthWritable) {
- vcas[i] = buildObjectAssign(outputBatch, i, PrimitiveCategory.INTERVAL_YEAR_MONTH);
- } else if (writables[i] instanceof HiveIntervalDayTimeWritable) {
- vcas[i] = buildObjectAssign(outputBatch, i, PrimitiveCategory.INTERVAL_DAY_TIME);
- } else if (writables[i] instanceof BooleanWritable) {
- vcas[i] = buildObjectAssign(outputBatch, i, PrimitiveCategory.BOOLEAN);
- } else if (writables[i] instanceof HiveDecimalWritable) {
- vcas[i] = buildObjectAssign(outputBatch, i, PrimitiveCategory.DECIMAL);
- } else if (writables[i] instanceof HiveCharWritable) {
- vcas[i] = buildObjectAssign(outputBatch, i, PrimitiveCategory.CHAR);
- } else if (writables[i] instanceof HiveVarcharWritable) {
- vcas[i] = buildObjectAssign(outputBatch, i, PrimitiveCategory.VARCHAR);
- } else {
- throw new HiveException("Unimplemented vector assigner for writable type " +
- writables[i].getClass());
- }
- }
- return vcas;
- }
-}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashUtil.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashUtil.java
deleted file mode 100644
index 7a3b3e2..0000000
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashUtil.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast;
-
-import org.apache.hadoop.hive.serde2.WriteBuffers;
-
-public class VectorMapJoinFastBytesHashUtil {
-
- public static String displayBytes(byte[] bytes, int start, int length) {
- StringBuilder sb = new StringBuilder();
- for (int i = start; i < start + length; i++) {
- char ch = (char) bytes[i];
- if (ch < ' ' || ch > '~') {
- sb.append(String.format("\\%03d", (int) (bytes[i] & 0xff)));
- } else {
- sb.append(ch);
- }
- }
- return sb.toString();
- }
-}
\ No newline at end of file
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastHashMap.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastHashMap.java
deleted file mode 100644
index 806e075..0000000
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastHashMap.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast;
-
-import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinHashMap;
-import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinHashMapResult;
-
-public abstract class VectorMapJoinFastHashMap
- extends VectorMapJoinFastHashTable
- implements VectorMapJoinHashMap {
-
- @Override
- public VectorMapJoinHashMapResult createHashMapResult() {
- return new VectorMapJoinFastValueStore.HashMapResult();
- }
-
- public VectorMapJoinFastHashMap(
- boolean isFullOuter,
- int initialCapacity, float loadFactor, int writeBuffersSize, long estimatedKeyCount) {
- super(
- isFullOuter,
- initialCapacity, loadFactor, writeBuffersSize, estimatedKeyCount);
- }
-}
\ No newline at end of file
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/RandomDimension.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/RandomDimension.java
deleted file mode 100644
index c8e85de..0000000
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/RandomDimension.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.metadata;
-
-import java.util.Random;
-
-/**
- * A random dimension is an abstract dimension. It is implicitly associated with
- * every row in data and has a random value
- *
- **/
-public class RandomDimension extends Dimension {
-
- Random r;
-
- public RandomDimension(Class t, String id) {
- super(t, id);
- r = new Random();
- }
-
- @Override
- public int hashCode(Object o) {
- return r.nextInt();
- }
-}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/ExprPrunerInfo.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/ExprPrunerInfo.java
deleted file mode 100644
index ef9e1f0..0000000
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/ExprPrunerInfo.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.optimizer.ppr;
-
-import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
-
-/**
- * The processor context for partition pruner. This contains the table alias
- * that is being currently processed.
- */
-public class ExprPrunerInfo implements NodeProcessorCtx {
-
- /**
- * The table alias that is being currently processed.
- */
- String tabAlias;
-
- public String getTabAlias() {
- return tabAlias;
- }
-
- public void setTabAlias(String tabAlias) {
- this.tabAlias = tabAlias;
- }
-}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/InputSignature.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/InputSignature.java
deleted file mode 100644
index 9d1bb16..0000000
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/InputSignature.java
+++ /dev/null
@@ -1,119 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.parse;
-
-import java.util.ArrayList;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
-
-/**
- * The input signature of a function or operator. The signature basically
- * consists of name, list of parameter types.
- *
- **/
-
-public class InputSignature {
- private final String name;
- private final ArrayList<TypeInfo> typeArray;
-
- @SuppressWarnings("unused")
- private static final Logger LOG = LoggerFactory.getLogger(InputSignature.class
- .getName());
-
- public InputSignature(String name) {
- this.name = name;
- typeArray = new ArrayList<TypeInfo>();
- }
-
- public InputSignature(String name, TypeInfo... classList) {
- this(name);
-
- if (classList.length != 0) {
- for (TypeInfo cl : classList) {
- typeArray.add(cl);
- }
- }
- }
-
- public InputSignature(String name, Class<?>... classList) {
- this(name);
-
- if (classList.length != 0) {
- for (Class<?> cl : classList) {
- typeArray.add(TypeInfoFactory
- .getPrimitiveTypeInfoFromPrimitiveWritable(cl));
- }
- }
- }
-
- public void add(TypeInfo paramType) {
- typeArray.add(paramType);
- }
-
- public String getName() {
- return name.toUpperCase();
- }
-
- public ArrayList<TypeInfo> getTypeArray() {
- return typeArray;
- }
-
- @Override
- public boolean equals(Object obj) {
- if (obj == null) {
- return false;
- }
-
- InputSignature other = null;
- try {
- other = (InputSignature) obj;
- } catch (ClassCastException cce) {
- return false;
- }
-
- return name.equalsIgnoreCase(other.getName())
- && (other.typeArray.equals(typeArray));
- }
-
- @Override
- public int hashCode() {
- return toString().hashCode();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder();
- sb.append(getName());
- sb.append("(");
- boolean isfirst = true;
- for (TypeInfo cls : getTypeArray()) {
- if (!isfirst) {
- sb.append(",");
- }
- sb.append(cls.toString());
- isfirst = false;
- }
-
- sb.append(")");
- return sb.toString();
- }
-}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/PrintOpTreeProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/PrintOpTreeProcessor.java
deleted file mode 100644
index 1416113..0000000
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/PrintOpTreeProcessor.java
+++ /dev/null
@@ -1,95 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.parse;
-
-import java.io.PrintStream;
-import java.util.HashMap;
-import java.util.Stack;
-
-import org.apache.hadoop.hive.ql.exec.Operator;
-import org.apache.hadoop.hive.ql.lib.Node;
-import org.apache.hadoop.hive.ql.lib.NodeProcessor;
-import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
-import org.apache.hadoop.hive.ql.plan.OperatorDesc;
-
-/**
- * PrintOpTreeProcessor.
- *
- */
-public class PrintOpTreeProcessor implements NodeProcessor {
-
- private final PrintStream out;
- private final HashMap<Operator<? extends OperatorDesc>, Integer> opMap =
- new HashMap<Operator<? extends OperatorDesc>, Integer>();
- private Integer curNum = 0;
-
- public PrintOpTreeProcessor() {
- out = System.out;
- }
-
- public PrintOpTreeProcessor(PrintStream o) {
- out = o;
- }
-
- private String getParents(Operator<? extends OperatorDesc> op) {
- StringBuilder ret = new StringBuilder("[");
- boolean first = true;
- if (op.getParentOperators() != null) {
- for (Operator<? extends OperatorDesc> parent : op.getParentOperators()) {
- if (!first) {
- ret.append(",");
- }
- ret.append(opMap.get(parent));
- first = false;
- }
- }
- ret.append("]");
- return ret.toString();
- }
-
- private String getChildren(Operator<? extends OperatorDesc> op) {
- StringBuilder ret = new StringBuilder("[");
- boolean first = true;
- if (op.getChildOperators() != null) {
- for (Operator<? extends OperatorDesc> child : op.getChildOperators()) {
- if (!first) {
- ret.append(",");
- }
- ret.append(opMap.get(child));
- first = false;
- }
- }
- ret.append("]");
- return ret.toString();
- }
-
- public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx ctx,
- Object... nodeOutputs) throws SemanticException {
- Operator<? extends OperatorDesc> op = (Operator<? extends OperatorDesc>) nd;
- if (opMap.get(op) == null) {
- opMap.put(op, curNum++);
- }
- out.println("[" + opMap.get(op) + "] " + op.getClass().getName() + " =p=> "
- + getParents(op) + " =c=> " + getChildren(op));
- if (op.getConf() == null) {
- return null;
- }
- return null;
- }
-}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TezWalker.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/TezWalker.java
deleted file mode 100644
index 6f9e896..0000000
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TezWalker.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.parse;
-
-import java.util.List;
-
-import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker;
-import org.apache.hadoop.hive.ql.lib.Dispatcher;
-import org.apache.hadoop.hive.ql.lib.Node;
-
-/**
- * Walks the operator tree in DFS fashion.
- */
-public class TezWalker extends DefaultGraphWalker {
-
- /**
- * constructor of the walker - the dispatcher is passed.
- *
- * @param disp
- * the dispatcher to be called for each node visited
- */
- public TezWalker(Dispatcher disp) {
- super(disp);
- }
-
- /**
- * Walk the given operator.
- *
- * @param nd
- * operator being walked
- */
- @Override
- protected void walk(Node nd) throws SemanticException {
- List<? extends Node> children = nd.getChildren();
-
- // maintain the stack of operators encountered
- opStack.push(nd);
- Boolean skip = dispatchAndReturn(nd, opStack);
-
- if (skip == null || !skip) {
- // move all the children to the front of queue
- for (Node ch : children) {
- walk(ch);
- }
- }
-
- // done with this operator
- opStack.pop();
- }
-}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/VersionCompatibleSerializer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/VersionCompatibleSerializer.java
deleted file mode 100644
index 8201173..0000000
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/VersionCompatibleSerializer.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.ql.parse.repl.dump.io;
-
-import org.apache.hadoop.hive.ql.parse.ReplicationSpec;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
-
-import java.io.IOException;
-
-import static org.apache.hadoop.hive.ql.parse.EximUtil.METADATA_FORMAT_FORWARD_COMPATIBLE_VERSION;
-
-/**
- * This is not used as of now as the conditional which lead to its usage is always false
- * hence we have removed the conditional and the usage of this class, but might be required in future.
- */
-public class VersionCompatibleSerializer implements JsonWriter.Serializer {
- @Override
- public void writeTo(JsonWriter writer, ReplicationSpec additionalPropertiesProvider)
- throws SemanticException, IOException {
- writer.jsonGenerator.writeStringField("fcversion", METADATA_FORMAT_FORWARD_COMPATIBLE_VERSION);
- }
-}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ExplosionDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ExplosionDesc.java
deleted file mode 100644
index e3a71ca..0000000
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ExplosionDesc.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.plan;
-
-import java.io.Serializable;
-import org.apache.hadoop.hive.ql.plan.Explain.Level;
-
-
-/**
- * ExplosionDesc.
- *
- */
-@Explain(displayName = "Explosion", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
-public class ExplosionDesc implements Serializable {
- private static final long serialVersionUID = 1L;
- private String fieldName;
- private int position;
-
- public ExplosionDesc() {
- }
-
- public ExplosionDesc(final String fieldName, final int position) {
- this.fieldName = fieldName;
- this.position = position;
- }
-
- public String getFieldName() {
- return fieldName;
- }
-
- public void setFieldName(final String fieldName) {
- this.fieldName = fieldName;
- }
-
- public int getPosition() {
- return position;
- }
-
- public void setPosition(final int position) {
- this.position = position;
- }
-}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/SchemaDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/SchemaDesc.java
deleted file mode 100644
index eb19b44..0000000
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/SchemaDesc.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.plan;
-
-import java.io.Serializable;
-
-/**
- * SchemaDesc.
- *
- */
-public class SchemaDesc implements Serializable {
- private static final long serialVersionUID = 1L;
- private String schema;
-
- public SchemaDesc() {
- }
-
- public SchemaDesc(final String schema) {
- this.schema = schema;
- }
-
- public String getSchema() {
- return schema;
- }
-
- public void setSchema(final String schema) {
- this.schema = schema;
- }
-
-}