You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by dh...@apache.org on 2008/09/18 19:38:04 UTC
svn commit: r696736 [2/7] - in /hadoop/core/trunk: ./ src/contrib/hive/
src/contrib/hive/ql/ src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/
src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/
src/contrib/hive/ql/src/java/org/apache/ha...
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/createTableDesc.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/createTableDesc.java?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/createTableDesc.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/createTableDesc.java Thu Sep 18 10:37:59 2008
@@ -22,7 +22,9 @@
import java.util.List;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.ql.exec.Utilities;
+@explain(displayName="Create Table")
public class createTableDesc extends ddlDesc implements Serializable
{
private static final long serialVersionUID = 1L;
@@ -65,6 +67,7 @@
this.partCols = partCols;
}
+ @explain(displayName="name")
public String getTableName() {
return tableName;
}
@@ -77,6 +80,11 @@
return cols;
}
+ @explain(displayName="columns")
+ public List<String> getColsString() {
+ return Utilities.getFieldSchemaString(getCols());
+ }
+
public void setCols(List<FieldSchema> cols) {
this.cols = cols;
}
@@ -85,10 +93,16 @@
return partCols;
}
+ @explain(displayName="partition columns")
+ public List<String> getPartColsString() {
+ return Utilities.getFieldSchemaString(getPartCols());
+ }
+
public void setPartCols(List<FieldSchema> partCols) {
this.partCols = partCols;
}
+ @explain(displayName="bucket columns")
public List<String> getBucketCols() {
return bucketCols;
}
@@ -97,6 +111,7 @@
this.bucketCols = bucketCols;
}
+ @explain(displayName="# buckets")
public int getNumBuckets() {
return numBuckets;
}
@@ -105,6 +120,7 @@
this.numBuckets = numBuckets;
}
+ @explain(displayName="field delimiter")
public String getFieldDelim() {
return fieldDelim;
}
@@ -113,6 +129,7 @@
this.fieldDelim = fieldDelim;
}
+ @explain(displayName="collection delimiter")
public String getCollItemDelim() {
return collItemDelim;
}
@@ -121,6 +138,7 @@
this.collItemDelim = collItemDelim;
}
+ @explain(displayName="map key delimiter")
public String getMapKeyDelim() {
return mapKeyDelim;
}
@@ -129,6 +147,7 @@
this.mapKeyDelim = mapKeyDelim;
}
+ @explain(displayName="line delimiter")
public String getLineDelim() {
return lineDelim;
}
@@ -137,6 +156,7 @@
this.lineDelim = lineDelim;
}
+ @explain(displayName="comment")
public String getComment() {
return comment;
}
@@ -145,6 +165,7 @@
this.comment = comment;
}
+ @explain(displayName="isCompressed")
public boolean isCompressed() {
return isCompressed;
}
@@ -153,6 +174,7 @@
this.isCompressed = isCompressed;
}
+ @explain(displayName="location")
public String getLocation() {
return location;
}
@@ -161,6 +183,7 @@
this.location = location;
}
+ @explain(displayName="isExternal")
public boolean isExternal() {
return isExternal;
}
@@ -172,6 +195,7 @@
/**
* @return the sortCols
*/
+ @explain(displayName="sort columns")
public List<String> getSortCols() {
return sortCols;
}
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/descTableDesc.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/descTableDesc.java?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/descTableDesc.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/descTableDesc.java Thu Sep 18 10:37:59 2008
@@ -19,8 +19,10 @@
package org.apache.hadoop.hive.ql.plan;
import java.io.File;
+import java.io.IOException;
import java.io.Serializable;
+@explain(displayName="Describe Table")
public class descTableDesc extends ddlDesc implements Serializable
{
private static final long serialVersionUID = 1L;
@@ -37,6 +39,7 @@
this.tableName = tableName;
}
+ @explain(displayName="table")
public String getTableName() {
return tableName;
}
@@ -52,6 +55,16 @@
return resFile;
}
+ @explain(displayName="result file", normalExplain=false)
+ public String getResFileString() {
+ try {
+ return getResFile().getCanonicalPath();
+ }
+ catch (IOException ioe) {
+ return "error";
+ }
+ }
+
/**
* @param resFile the resFile to set
*/
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/dropTableDesc.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/dropTableDesc.java?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/dropTableDesc.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/dropTableDesc.java Thu Sep 18 10:37:59 2008
@@ -20,6 +20,7 @@
import java.io.Serializable;
+@explain(displayName="Drop Table")
public class dropTableDesc extends ddlDesc implements Serializable
{
private static final long serialVersionUID = 1L;
@@ -36,6 +37,7 @@
/**
* @return the tableName
*/
+ @explain(displayName="table")
public String getTableName() {
return tableName;
}
Added: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/explain.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/explain.java?rev=696736&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/explain.java (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/explain.java Thu Sep 18 10:37:59 2008
@@ -0,0 +1,28 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.plan;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+@Retention(RetentionPolicy.RUNTIME)
+public @interface explain {
+ String displayName() default "";
+ boolean normalExplain() default true;
+}
Added: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/explainWork.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/explainWork.java?rev=696736&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/explainWork.java (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/explainWork.java Thu Sep 18 10:37:59 2008
@@ -0,0 +1,79 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.plan;
+
+import java.io.File;
+import java.io.Serializable;
+import java.util.List;
+
+import org.apache.hadoop.hive.ql.exec.Task;
+
+public class explainWork implements Serializable {
+ private static final long serialVersionUID = 1L;
+
+ private File resFile;
+ private List<Task<? extends Serializable>> rootTasks;
+ private String astStringTree;
+ boolean extended;
+
+ public explainWork() { }
+
+ public explainWork(File resFile,
+ List<Task<? extends Serializable>> rootTasks,
+ String astStringTree,
+ boolean extended) {
+ this.resFile = resFile;
+ this.rootTasks = rootTasks;
+ this.astStringTree = astStringTree;
+ this.extended = extended;
+ }
+
+ public File getResFile() {
+ return resFile;
+ }
+
+ public void setResFile(File resFile) {
+ this.resFile = resFile;
+ }
+
+ public List<Task<? extends Serializable>> getRootTasks() {
+ return rootTasks;
+ }
+
+ public void setRootTasks(List<Task<? extends Serializable>> rootTasks) {
+ this.rootTasks = rootTasks;
+ }
+
+ public String getAstStringTree() {
+ return astStringTree;
+ }
+
+ public void setAstStringTree(String astStringTree) {
+ this.astStringTree = astStringTree;
+ }
+
+ public boolean getExtended() {
+ return extended;
+ }
+
+ public void setExtended(boolean extended) {
+ this.extended = extended;
+ }
+}
+
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/explosionDesc.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/explosionDesc.java?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/explosionDesc.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/explosionDesc.java Thu Sep 18 10:37:59 2008
@@ -21,6 +21,7 @@
import java.io.Serializable;
+@explain(displayName="Explosion")
public class explosionDesc implements Serializable {
private static final long serialVersionUID = 1L;
private String fieldName;
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeColumnDesc.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeColumnDesc.java?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeColumnDesc.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeColumnDesc.java Thu Sep 18 10:37:59 2008
@@ -44,4 +44,10 @@
public String toString() {
return "Column[" + column + "]";
}
+
+ @explain(displayName="expr")
+ @Override
+ public String getExprString() {
+ return getColumn();
+ }
}
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeConstantDesc.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeConstantDesc.java?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeConstantDesc.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeConstantDesc.java Thu Sep 18 10:37:59 2008
@@ -50,4 +50,18 @@
return "Const " + typeInfo.toString() + " " + value;
}
+ @explain(displayName="expr")
+ @Override
+ public String getExprString() {
+ if (value == null) {
+ return "null";
+ }
+
+ if (typeInfo.getPrimitiveClass() == String.class) {
+ return "'" + value.toString() + "'";
+ }
+ else {
+ return value.toString();
+ }
+ }
}
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeDesc.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeDesc.java?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeDesc.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeDesc.java Thu Sep 18 10:37:59 2008
@@ -38,5 +38,13 @@
this.typeInfo = typeInfo;
}
-
+ public String getExprString() {
+ assert(false);
+ return null;
+ }
+
+ @explain(displayName="type")
+ public String getTypeString() {
+ return typeInfo.getTypeString();
+ }
}
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFieldDesc.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFieldDesc.java?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFieldDesc.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFieldDesc.java Thu Sep 18 10:37:59 2008
@@ -50,5 +50,10 @@
public String toString() {
return this.desc.toString() + "." + this.fieldName;
}
-
+
+ @explain(displayName="expr")
+ @Override
+ public String getExprString() {
+ return this.desc.getExprString() + "." + this.fieldName;
+ }
}
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFuncDesc.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFuncDesc.java?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFuncDesc.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFuncDesc.java Thu Sep 18 10:37:59 2008
@@ -24,6 +24,8 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.ql.exec.FunctionInfo;
+import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
import org.apache.hadoop.hive.ql.parse.TypeInfo;
/**
@@ -79,4 +81,56 @@
sb.append(")");
return sb.toString();
}
+
+ @explain(displayName="expr")
+ @Override
+ public String getExprString() {
+ FunctionInfo fI = FunctionRegistry.getInfo(UDFClass);
+ StringBuilder sb = new StringBuilder();
+
+ if (fI.getOpType() == FunctionInfo.OperatorType.PREFIX ||
+ fI.isAggFunction()) {
+ sb.append(fI.getDisplayName());
+ if (!fI.isOperator()) {
+ sb.append("(");
+ }
+ else {
+ sb.append(" ");
+ }
+
+ boolean first = true;
+ for(exprNodeDesc chld: children) {
+ if (!first) {
+ sb.append(", ");
+ }
+ first = false;
+
+ sb.append(chld.getExprString());
+ }
+
+ if(!fI.isOperator()) {
+ sb.append(")");
+ }
+ }
+ else if (fI.getOpType() == FunctionInfo.OperatorType.INFIX) {
+ // assert that this has only 2 children
+ assert(children.size() == 2);
+ sb.append("(");
+ sb.append(children.get(0).getExprString());
+ sb.append(" ");
+ sb.append(fI.getDisplayName());
+ sb.append(" ");
+ sb.append(children.get(1).getExprString());
+ sb.append(")");
+ }
+ else if (fI.getOpType() == FunctionInfo.OperatorType.POSTFIX) {
+ // assert for now as there should be no such case
+ assert(children.size() == 1);
+ sb.append(children.get(0).getExprString());
+ sb.append(" ");
+ sb.append(fI.getDisplayName());
+ }
+
+ return sb.toString();
+ }
}
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeIndexDesc.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeIndexDesc.java?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeIndexDesc.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeIndexDesc.java Thu Sep 18 10:37:59 2008
@@ -50,5 +50,10 @@
public String toString() {
return this.desc.toString() + "[" + this.index + "]";
}
-
+
+ @explain(displayName="expr")
+ @Override
+ public String getExprString() {
+ return this.desc.getExprString() + "[" + this.index.getExprString() + "]";
+ }
}
\ No newline at end of file
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeNullDesc.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeNullDesc.java?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeNullDesc.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeNullDesc.java Thu Sep 18 10:37:59 2008
@@ -23,6 +23,8 @@
import org.apache.hadoop.hive.ql.parse.TypeInfo;
public class exprNodeNullDesc extends exprNodeDesc implements Serializable {
+
+ private static final long serialVersionUID = 1L;
public exprNodeNullDesc() {
super(new TypeInfo(Void.class));
@@ -32,4 +34,9 @@
return null;
}
+ @explain(displayName="expr")
+ @Override
+ public String getExprString() {
+ return "null";
+ }
}
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/extractDesc.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/extractDesc.java?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/extractDesc.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/extractDesc.java Thu Sep 18 10:37:59 2008
@@ -20,6 +20,7 @@
import java.io.Serializable;
+@explain(displayName="Extract")
public class extractDesc implements Serializable {
private static final long serialVersionUID = 1L;
private exprNodeDesc col;
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/fileSinkDesc.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/fileSinkDesc.java?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/fileSinkDesc.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/fileSinkDesc.java Thu Sep 18 10:37:59 2008
@@ -20,6 +20,7 @@
import java.io.Serializable;
+@explain(displayName="File Output Operator")
public class fileSinkDesc implements Serializable {
private static final long serialVersionUID = 1L;
private String dirName;
@@ -33,12 +34,16 @@
this.dirName = dirName;
this.tableInfo = tableInfo;
}
+
+ @explain(displayName="directory", normalExplain=false)
public String getDirName() {
return this.dirName;
}
public void setDirName(final String dirName) {
this.dirName = dirName;
}
+
+ @explain(displayName="table")
public tableDesc getTableInfo() {
return this.tableInfo;
}
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/filterDesc.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/filterDesc.java?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/filterDesc.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/filterDesc.java Thu Sep 18 10:37:59 2008
@@ -20,6 +20,7 @@
import java.io.Serializable;
+@explain(displayName="Filter Operator")
public class filterDesc implements Serializable {
private static final long serialVersionUID = 1L;
private org.apache.hadoop.hive.ql.plan.exprNodeDesc predicate;
@@ -28,6 +29,7 @@
final org.apache.hadoop.hive.ql.plan.exprNodeDesc predicate) {
this.predicate = predicate;
}
+ @explain(displayName="predicate")
public org.apache.hadoop.hive.ql.plan.exprNodeDesc getPredicate() {
return this.predicate;
}
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/forwardDesc.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/forwardDesc.java?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/forwardDesc.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/forwardDesc.java Thu Sep 18 10:37:59 2008
@@ -20,6 +20,7 @@
import java.io.Serializable;
+@explain(displayName="Forward")
public class forwardDesc implements Serializable {
private static final long serialVersionUID = 1L;
@SuppressWarnings("nls")
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/groupByDesc.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/groupByDesc.java?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/groupByDesc.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/groupByDesc.java Thu Sep 18 10:37:59 2008
@@ -18,6 +18,7 @@
package org.apache.hadoop.hive.ql.plan;
+@explain(displayName="Group By Operator")
public class groupByDesc implements java.io.Serializable {
/** Group-by Mode:
* COMPLETE: complete 1-phase aggregation: aggregate, evaluate
@@ -42,15 +43,33 @@
public Mode getMode() {
return this.mode;
}
+ @explain(displayName="mode")
+ public String getModeString() {
+ switch(mode) {
+ case COMPLETE:
+ return "complete";
+ case PARTIAL1:
+ return "partial1";
+ case PARTIAL2:
+ return "partial2";
+ case HASH:
+ return "hash";
+ }
+
+ return "unknown";
+ }
public void setMode(final Mode mode) {
this.mode = mode;
}
+ @explain(displayName="keys")
public java.util.ArrayList<exprNodeDesc> getKeys() {
return this.keys;
}
public void setKeys(final java.util.ArrayList<exprNodeDesc> keys) {
this.keys = keys;
}
+
+ @explain(displayName="")
public java.util.ArrayList<org.apache.hadoop.hive.ql.plan.aggregationDesc> getAggregators() {
return this.aggregators;
}
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/joinCond.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/joinCond.java?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/joinCond.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/joinCond.java Thu Sep 18 10:37:59 2008
@@ -77,4 +77,33 @@
public void setType(final int type) {
this.type = type;
}
+
+ @explain
+ public String getJoinCondString() {
+ StringBuilder sb = new StringBuilder();
+
+ switch(type) {
+ case joinDesc.INNER_JOIN:
+ sb.append("Inner Join ");
+ break;
+ case joinDesc.FULL_OUTER_JOIN:
+ sb.append("Outer Join ");
+ break;
+ case joinDesc.LEFT_OUTER_JOIN:
+ sb.append("Left Outer Join");
+ break;
+ case joinDesc.RIGHT_OUTER_JOIN:
+ sb.append("Right Outer Join");
+ break;
+ default:
+ sb.append("Unknow Join");
+ break;
+ }
+
+ sb.append(left);
+ sb.append(" to ");
+ sb.append(right);
+
+ return sb.toString();
+ }
}
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/joinDesc.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/joinDesc.java?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/joinDesc.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/joinDesc.java Thu Sep 18 10:37:59 2008
@@ -22,12 +22,15 @@
import org.apache.hadoop.hive.ql.plan.exprNodeDesc;
import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.List;
import java.util.Map;
/**
* Join operator Descriptor implementation.
*
*/
+@explain(displayName="Join Operator")
public class joinDesc implements Serializable {
private static final long serialVersionUID = 1L;
public static final int INNER_JOIN = 0;
@@ -67,6 +70,35 @@
return this.exprs;
}
+ @explain(displayName="condition expressions")
+ public Map<Byte, String> getExprsStringMap() {
+ if (getExprs() == null) {
+ return null;
+ }
+
+ LinkedHashMap<Byte, String> ret = new LinkedHashMap<Byte, String>();
+
+ for(Map.Entry<Byte, ArrayList<exprNodeDesc>> ent: getExprs().entrySet()) {
+ StringBuilder sb = new StringBuilder();
+ boolean first = true;
+ if (ent.getValue() != null) {
+ for(exprNodeDesc expr: ent.getValue()) {
+ if (!first) {
+ sb.append(" ");
+ }
+
+ first = false;
+ sb.append("{");
+ sb.append(expr.getExprString());
+ sb.append("}");
+ }
+ }
+ ret.put(ent.getKey(), sb.toString());
+ }
+
+ return ret;
+ }
+
public void setExprs(final Map<Byte, ArrayList<exprNodeDesc>> exprs) {
this.exprs = exprs;
}
@@ -79,6 +111,20 @@
this.noOuterJoin = noOuterJoin;
}
+ @explain(displayName="condition map")
+ public List<joinCond> getCondsList() {
+ if (conds == null) {
+ return null;
+ }
+
+ ArrayList<joinCond> l = new ArrayList<joinCond>();
+ for(joinCond cond: conds) {
+ l.add(cond);
+ }
+
+ return l;
+ }
+
public joinCond[] getConds() {
return this.conds;
}
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/loadDesc.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/loadDesc.java?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/loadDesc.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/loadDesc.java Thu Sep 18 10:37:59 2008
@@ -30,6 +30,8 @@
this.sourceDir = sourceDir;
}
+
+ @explain(displayName="source", normalExplain=false)
public String getSourceDir() {
return this.sourceDir;
}
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/loadFileDesc.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/loadFileDesc.java?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/loadFileDesc.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/loadFileDesc.java Thu Sep 18 10:37:59 2008
@@ -36,12 +36,16 @@
this.targetDir = targetDir;
this.isDfsDir = isDfsDir;
}
+
+ @explain(displayName="destination")
public String getTargetDir() {
return this.targetDir;
}
public void setTargetDir(final String targetDir) {
this.targetDir=targetDir;
}
+
+ @explain(displayName="hdfs directory")
public boolean getIsDfsDir() {
return this.isDfsDir;
}
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/loadTableDesc.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/loadTableDesc.java?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/loadTableDesc.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/loadTableDesc.java Thu Sep 18 10:37:59 2008
@@ -48,14 +48,15 @@
this(sourceDir, table, partitionSpec, true);
}
-
-
+ @explain(displayName="table")
public tableDesc getTable() {
return this.table;
}
public void setTable(final org.apache.hadoop.hive.ql.plan.tableDesc table) {
this.table = table;
}
+
+ @explain(displayName="partition")
public HashMap<String, String> getPartitionSpec() {
return this.partitionSpec;
}
@@ -63,6 +64,7 @@
this.partitionSpec = partitionSpec;
}
+ @explain(displayName="replace")
public boolean getReplace() {
return replace;
}
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/mapredWork.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/mapredWork.java?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/mapredWork.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/mapredWork.java Thu Sep 18 10:37:59 2008
@@ -24,6 +24,7 @@
import org.apache.hadoop.hive.ql.exec.Operator;
import org.apache.hadoop.hive.ql.exec.Utilities;
+@explain(displayName="Map Reduce")
public class mapredWork implements Serializable {
private static final long serialVersionUID = 1L;
private String command;
@@ -31,18 +32,22 @@
// use LinkedHashMap to make sure the iteration order is
// deterministic, to ease testing
private LinkedHashMap<String,ArrayList<String>> pathToAliases;
+
private LinkedHashMap<String,partitionDesc> pathToPartitionInfo;
+
private HashMap<String,Operator<? extends Serializable>> aliasToWork;
// map<->reduce interface
// schema of the map-reduce 'key' object - this is homogeneous
private schemaDesc keySchema;
+
// schema of the map-reduce 'val' object - this is heterogeneous
private HashMap<String,schemaDesc> aliasToSchema;
-
private Operator<?> reducer;
+
private Integer numReduceTasks;
+
private boolean needsTagging;
public mapredWork() { }
@@ -70,18 +75,24 @@
public void setCommand(final String command) {
this.command = command;
}
+
+ @explain(displayName="Path -> Alias", normalExplain=false)
public LinkedHashMap<String,ArrayList<String>> getPathToAliases() {
return this.pathToAliases;
}
public void setPathToAliases(final LinkedHashMap<String,ArrayList<String>> pathToAliases) {
this.pathToAliases = pathToAliases;
}
+
+ @explain(displayName="Path -> Partition", normalExplain=false)
public LinkedHashMap<String,partitionDesc> getPathToPartitionInfo() {
return this.pathToPartitionInfo;
}
public void setPathToPartitionInfo(final LinkedHashMap<String,partitionDesc> pathToPartitionInfo) {
this.pathToPartitionInfo = pathToPartitionInfo;
}
+
+ @explain(displayName="Alias -> Map Operator Tree")
public HashMap<String, Operator<? extends Serializable>> getAliasToWork() {
return this.aliasToWork;
}
@@ -101,12 +112,16 @@
this.aliasToSchema = aliasToSchema;
}
+ @explain(displayName="Reduce Operator Tree")
public Operator<?> getReducer() {
return this.reducer;
}
+
public void setReducer(final Operator<?> reducer) {
this.reducer = reducer;
}
+
+ @explain(displayName="# Reducers")
public Integer getNumReduceTasks() {
return this.numReduceTasks;
}
@@ -174,6 +189,7 @@
setAliases();
}
+ @explain(displayName="Needs Tagging", normalExplain=false)
public boolean getNeedsTagging() {
return this.needsTagging;
}
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/moveWork.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/moveWork.java?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/moveWork.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/moveWork.java Thu Sep 18 10:37:59 2008
@@ -21,7 +21,7 @@
import java.util.*;
import java.io.*;
-
+@explain(displayName="Move Operator")
public class moveWork implements Serializable {
private static final long serialVersionUID = 1L;
private List<loadTableDesc> loadTableWork;
@@ -34,12 +34,15 @@
this.loadTableWork = loadTableWork;
this.loadFileWork = loadFileWork;
}
+ @explain(displayName="tables")
public List<loadTableDesc> getLoadTableWork() {
return this.loadTableWork;
}
public void setLoadTableWork(final List<loadTableDesc> loadTableWork) {
this.loadTableWork = loadTableWork;
}
+
+ @explain(displayName="files")
public List<loadFileDesc> getLoadFileWork() {
return this.loadFileWork;
}
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/partitionDesc.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/partitionDesc.java?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/partitionDesc.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/partitionDesc.java Thu Sep 18 10:37:59 2008
@@ -19,11 +19,8 @@
package org.apache.hadoop.hive.ql.plan;
import java.io.Serializable;
-import java.util.*;
-
-import org.apache.hadoop.hive.ql.metadata.Table;
-import org.apache.hadoop.hive.ql.metadata.Partition;
+@explain(displayName="Partition")
public class partitionDesc implements Serializable {
private static final long serialVersionUID = 1L;
private tableDesc table;
@@ -35,12 +32,16 @@
this.table = table;
this.partSpec = partSpec;
}
+
+ @explain(displayName="")
public tableDesc getTableDesc() {
return this.table;
}
public void setTableDesc(final tableDesc table) {
this.table = table;
}
+
+ @explain(displayName="partition values")
public java.util.LinkedHashMap<String, String> getPartSpec() {
return this.partSpec;
}
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/reduceSinkDesc.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/reduceSinkDesc.java?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/reduceSinkDesc.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/reduceSinkDesc.java Thu Sep 18 10:37:59 2008
@@ -20,6 +20,7 @@
import java.io.Serializable;
+@explain(displayName="Reduce Output Operator")
public class reduceSinkDesc implements Serializable {
private static final long serialVersionUID = 1L;
// these are the expressions that go into the reduce key
@@ -56,6 +57,7 @@
this.numPartitionFields = numPartitionFields;
}
+ @explain(displayName="key expressions")
public java.util.ArrayList<exprNodeDesc> getKeyCols() {
return this.keyCols;
}
@@ -64,6 +66,7 @@
this.keyCols=keyCols;
}
+ @explain(displayName="value expressions")
public java.util.ArrayList<exprNodeDesc> getValueCols() {
return this.valueCols;
}
@@ -72,12 +75,15 @@
this.valueCols=valueCols;
}
+ @explain(displayName="# partition fields")
public int getNumPartitionFields() {
return this.numPartitionFields;
}
public void setNumPartitionFields(int numPartitionFields) {
this.numPartitionFields = numPartitionFields;
}
+
+ @explain(displayName="tag")
public int getTag() {
return this.tag;
}
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/scriptDesc.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/scriptDesc.java?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/scriptDesc.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/scriptDesc.java Thu Sep 18 10:37:59 2008
@@ -20,6 +20,7 @@
import java.io.Serializable;
+@explain(displayName="Transform Operator")
public class scriptDesc implements Serializable {
private static final long serialVersionUID = 1L;
private String scriptCmd;
@@ -33,12 +34,16 @@
this.scriptCmd = scriptCmd;
this.scriptOutputInfo = scriptOutputInfo;
}
+
+ @explain(displayName="command")
public String getScriptCmd() {
return this.scriptCmd;
}
public void setScriptCmd(final String scriptCmd) {
this.scriptCmd=scriptCmd;
}
+
+ @explain(displayName="output info")
public tableDesc getScriptOutputInfo() {
return this.scriptOutputInfo;
}
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/selectDesc.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/selectDesc.java?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/selectDesc.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/selectDesc.java Thu Sep 18 10:37:59 2008
@@ -20,6 +20,7 @@
import java.io.Serializable;
+@explain(displayName="Select Operator")
public class selectDesc implements Serializable {
private static final long serialVersionUID = 1L;
private java.util.ArrayList<org.apache.hadoop.hive.ql.plan.exprNodeDesc> colList;
@@ -28,6 +29,7 @@
final java.util.ArrayList<org.apache.hadoop.hive.ql.plan.exprNodeDesc> colList) {
this.colList = colList;
}
+ @explain(displayName="expressions")
public java.util.ArrayList<org.apache.hadoop.hive.ql.plan.exprNodeDesc> getColList() {
return this.colList;
}
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/showTablesDesc.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/showTablesDesc.java?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/showTablesDesc.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/showTablesDesc.java Thu Sep 18 10:37:59 2008
@@ -19,8 +19,10 @@
package org.apache.hadoop.hive.ql.plan;
import java.io.File;
+import java.io.IOException;
import java.io.Serializable;
+@explain(displayName="Show Tables")
public class showTablesDesc extends ddlDesc implements Serializable
{
private static final long serialVersionUID = 1L;
@@ -46,6 +48,7 @@
/**
* @return the pattern
*/
+ @explain(displayName="pattern")
public String getPattern() {
return pattern;
}
@@ -64,6 +67,15 @@
return resFile;
}
+ @explain(displayName="result file", normalExplain=false)
+ public String getResFileString() {
+ try {
+ return getResFile().getCanonicalPath();
+ }
+ catch (IOException ioe) {
+ return "error";
+ }
+ }
/**
* @param resFile the resFile to set
*/
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/tableDesc.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/tableDesc.java?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/tableDesc.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/tableDesc.java Thu Sep 18 10:37:59 2008
@@ -23,9 +23,6 @@
import org.apache.hadoop.mapred.InputFormat;
import org.apache.hadoop.mapred.OutputFormat;
import org.apache.hadoop.hive.serde.SerDe;
-import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.io.WritableComparable;
-
public class tableDesc implements Serializable {
private static final long serialVersionUID = 1L;
@@ -64,6 +61,8 @@
public void setOutputFileFormatClass(final Class<? extends OutputFormat> outputFileFormatClass) {
this.outputFileFormatClass=outputFileFormatClass;
}
+
+ @explain(displayName="properties", normalExplain=false)
public java.util.Properties getProperties() {
return this.properties;
}
@@ -73,6 +72,7 @@
/**
* @return the serdeClassName
*/
+ @explain(displayName="serde")
public String getSerdeClassName() {
return this.serdeClassName;
}
@@ -83,7 +83,18 @@
this.serdeClassName = serdeClassName;
}
+ @explain(displayName="name")
public String getTableName() {
return this.properties.getProperty(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_NAME);
}
+
+ @explain(displayName="input format")
+ public String getInputFileFormatClassName() {
+ return getInputFileFormatClass().getName();
+ }
+
+ @explain(displayName="output format")
+ public String getOutputFileFormatClassName() {
+ return getOutputFileFormatClass().getName();
+ }
}
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java Thu Sep 18 10:37:59 2008
@@ -153,9 +153,9 @@
testFiles = conf.get("test.data.files").replace('\\', '/').replace("c:", "");
logDir = conf.get("test.log.dir");
- String ow = System.getenv("overwrite");
+ String ow = System.getProperty("test.output.overwrite");
overWrite = false;
- if ((ow != null) && (ow.length() > 0)) {
+ if ((ow != null) && ow.equalsIgnoreCase("true")){
overWrite = true;
}
@@ -551,12 +551,26 @@
cmdArray[3] + " " + cmdArray[4] + " " + cmdArray[5]);
}
else {
+ System.out.println("overwritting");
// Remove any existing output
String [] cmdArray1 = new String[5];
cmdArray1[0] = "rm";
cmdArray1[1] = "-rf";
cmdArray1[2] = (new File(outDir, tname)).getPath();
System.out.println(cmdArray1[0] + " " + cmdArray1[1] + " " + cmdArray1[2]);
+
+ Process executor = Runtime.getRuntime().exec(cmdArray1);
+
+ StreamPrinter outPrinter = new StreamPrinter(executor.getInputStream(), null, System.out);
+ StreamPrinter errPrinter = new StreamPrinter(executor.getErrorStream(), null, System.err);
+
+ outPrinter.start();
+ errPrinter.start();
+ int exitVal = executor.waitFor();
+ if (exitVal != 0) {
+ return exitVal;
+ }
+
// Capture code
cmdArray = new String[5];
cmdArray[0] = "cp";
@@ -581,11 +595,24 @@
public int checkCliDriverResults(String tname) throws Exception {
String [] cmdArray;
- cmdArray = new String[3];
- cmdArray[0] = "diff";
- cmdArray[1] = (new File(logDir, tname + ".out")).getPath();
- cmdArray[2] = (new File(outDir, tname + ".out")).getPath();
- System.out.println(cmdArray[0] + " " + cmdArray[1] + " " + cmdArray[2]);
+
+ if (!overWrite) {
+ cmdArray = new String[5];
+ cmdArray[0] = "diff";
+ cmdArray[1] = "-I";
+ cmdArray[2] = "\\|\\(tmp/hive-.*\\)";
+ cmdArray[3] = (new File(logDir, tname + ".out")).getPath();
+ cmdArray[4] = (new File(outDir, tname + ".out")).getPath();
+ System.out.println(cmdArray[0] + " " + cmdArray[1] + " " + cmdArray[2] + " " +
+ cmdArray[3] + " " + cmdArray[4]);
+ }
+ else {
+ cmdArray = new String[3];
+ cmdArray[0] = "cp";
+ cmdArray[1] = (new File(logDir, tname + ".out")).getPath();
+ cmdArray[2] = (new File(outDir, tname + ".out")).getPath();
+ System.out.println(cmdArray[0] + " " + cmdArray[1] + " " + cmdArray[2]);
+ }
Process executor = Runtime.getRuntime().exec(cmdArray);
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestCompositeHiveObject.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestCompositeHiveObject.java?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestCompositeHiveObject.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestCompositeHiveObject.java Thu Sep 18 10:37:59 2008
@@ -111,7 +111,7 @@
// evaluate on row
op.process(cr);
- Map<Enum, Long> results = op.getStats();
+ Map<Enum<?>, Long> results = op.getStats();
assertEquals(results.get(FilterOperator.Counter.FILTERED), Long.valueOf(0));
assertEquals(results.get(FilterOperator.Counter.PASSED), Long.valueOf(1));
System.out.println("Filter Operator ok");
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java Thu Sep 18 10:37:59 2008
@@ -150,8 +150,8 @@
exprNodeDesc desc = new exprNodeFuncDesc(
TypeInfo.getPrimitiveTypeInfo(Boolean.class),
- UDFRegistry.getUDFClass("<"),
- UDFRegistry.getUDFMethod("<", true, String.class, Number.class),
+ FunctionRegistry.getUDFClass("<"),
+ FunctionRegistry.getUDFMethod("<", true, String.class, Number.class),
children
);
return new filterDesc(desc);
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestJEXL.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestJEXL.java?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestJEXL.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestJEXL.java Thu Sep 18 10:37:59 2008
@@ -68,7 +68,7 @@
int basetimes = 100000;
JexlContext jc = JexlHelper.createContext();
- jc.getVars().put("__udf__concat", UDFRegistry.getUDFClass("concat").newInstance());
+ jc.getVars().put("__udf__concat", FunctionRegistry.getUDFClass("concat").newInstance());
measureSpeed("1 + 2",
basetimes * 100,
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java Thu Sep 18 10:37:59 2008
@@ -85,7 +85,7 @@
op.process(oner);
}
- Map<Enum, Long> results = op.getStats();
+ Map<Enum<?>, Long> results = op.getStats();
assertEquals(results.get(FilterOperator.Counter.FILTERED), Long.valueOf(4));
assertEquals(results.get(FilterOperator.Counter.PASSED), Long.valueOf(1));
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/cast1.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/cast1.q?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/cast1.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/cast1.q Thu Sep 18 10:37:59 2008
@@ -1,4 +1,9 @@
CREATE TABLE dest1(c1 INT, c2 DOUBLE, c3 DOUBLE, c4 DOUBLE, c5 INT, c6 INT, c7 INT);
+
+EXPLAIN
+FROM src INSERT OVERWRITE TABLE dest1 SELECT 3 + 2, 3.0 + 2, 3 + 2.0, 3.0 + 2.0, 3 + CAST(2.0 AS INT), CAST(1 AS BOOLEAN), CAST(TRUE AS INT) WHERE src.key = 86;
+
FROM src INSERT OVERWRITE TABLE dest1 SELECT 3 + 2, 3.0 + 2, 3 + 2.0, 3.0 + 2.0, 3 + CAST(2.0 AS INT), CAST(1 AS BOOLEAN), CAST(TRUE AS INT) WHERE src.key = 86;
+
select dest1.* FROM dest1;
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/groupby1.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/groupby1.q?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/groupby1.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/groupby1.q Thu Sep 18 10:37:59 2008
@@ -1,3 +1,8 @@
CREATE TABLE dest1(key INT, value DOUBLE);
+
+EXPLAIN
FROM src INSERT OVERWRITE TABLE dest1 SELECT src.key, sum(substr(src.value,4)) GROUP BY src.key;
+
+FROM src INSERT OVERWRITE TABLE dest1 SELECT src.key, sum(substr(src.value,4)) GROUP BY src.key;
+
SELECT dest1.* FROM dest1;
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/groupby2.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/groupby2.q?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/groupby2.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/groupby2.q Thu Sep 18 10:37:59 2008
@@ -1,5 +1,9 @@
CREATE TABLE dest1(key STRING, c1 INT, c2 STRING);
+EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1 SELECT substr(src.key,0,1), count(DISTINCT substr(src.value,4)), concat(substr(src.key,0,1),sum(substr(src.value,4))) GROUP BY substr(src.key,0,1);
+
FROM src
INSERT OVERWRITE TABLE dest1 SELECT substr(src.key,0,1), count(DISTINCT substr(src.value,4)), concat(substr(src.key,0,1),sum(substr(src.value,4))) GROUP BY substr(src.key,0,1);
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/groupby3.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/groupby3.q?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/groupby3.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/groupby3.q Thu Sep 18 10:37:59 2008
@@ -1,5 +1,9 @@
CREATE TABLE dest1(c1 INT, c2 INT, c3 INT, c4 INT, c5 INT);
+EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1 SELECT sum(substr(src.value,4)), avg(substr(src.value,4)), avg(DISTINCT substr(src.value,4)), max(substr(src.value,4)), min(substr(src.value,4));
+
FROM src
INSERT OVERWRITE TABLE dest1 SELECT sum(substr(src.value,4)), avg(substr(src.value,4)), avg(DISTINCT substr(src.value,4)), max(substr(src.value,4)), min(substr(src.value,4));
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/groupby4.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/groupby4.q?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/groupby4.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/groupby4.q Thu Sep 18 10:37:59 2008
@@ -1,5 +1,9 @@
CREATE TABLE dest1(c1 STRING);
+EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1 SELECT substr(src.key,0,1) GROUP BY substr(src.key,0,1);
+
FROM src
INSERT OVERWRITE TABLE dest1 SELECT substr(src.key,0,1) GROUP BY substr(src.key,0,1);
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/groupby5.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/groupby5.q?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/groupby5.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/groupby5.q Thu Sep 18 10:37:59 2008
@@ -1,5 +1,11 @@
CREATE TABLE dest1(key INT, value STRING);
+EXPLAIN
+INSERT OVERWRITE TABLE dest1
+SELECT src.key, sum(substr(src.value,4))
+FROM src
+GROUP BY src.key;
+
INSERT OVERWRITE TABLE dest1
SELECT src.key, sum(substr(src.value,4))
FROM src
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/groupby6.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/groupby6.q?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/groupby6.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/groupby6.q Thu Sep 18 10:37:59 2008
@@ -1,5 +1,9 @@
CREATE TABLE dest1(c1 STRING);
+EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1 SELECT DISTINCT substr(src.value,4,1);
+
FROM src
INSERT OVERWRITE TABLE dest1 SELECT DISTINCT substr(src.value,4,1);
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input1.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input1.q?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input1.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input1.q Thu Sep 18 10:37:59 2008
@@ -1,4 +1,9 @@
CREATE TABLE TEST1(A INT, B FLOAT);
+
+EXPLAIN
+DESCRIBE TEST1;
+
DESCRIBE TEST1;
+
DROP TABLE TEST1;
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input10.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input10.q?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input10.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input10.q Thu Sep 18 10:37:59 2008
@@ -1,4 +1,9 @@
CREATE TABLE TEST10(key INT, value STRING) PARTITIONED BY(ds STRING, hr STRING);
+
+EXPLAIN
+DESCRIBE TEST10;
+
DESCRIBE TEST10;
+
DROP TABLE TEST10;
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input11.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input11.q?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input11.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input11.q Thu Sep 18 10:37:59 2008
@@ -1,5 +1,9 @@
CREATE TABLE dest1(key INT, value STRING);
+EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1 SELECT src.key, src.value WHERE src.key < 100;
+
FROM src
INSERT OVERWRITE TABLE dest1 SELECT src.key, src.value WHERE src.key < 100;
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input12.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input12.q?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input12.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input12.q Thu Sep 18 10:37:59 2008
@@ -2,6 +2,12 @@
CREATE TABLE dest2(key INT, value STRING);
CREATE TABLE dest3(key INT, value STRING) PARTITIONED BY(ds STRING, hr STRING);
+EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1 SELECT src.* WHERE src.key < 100
+INSERT OVERWRITE TABLE dest2 SELECT src.key, src.value WHERE src.key >= 100 and src.key < 200
+INSERT OVERWRITE TABLE dest3 PARTITION(ds='2008-04-08', hr='12') SELECT src.key WHERE src.key >= 200;
+
FROM src
INSERT OVERWRITE TABLE dest1 SELECT src.* WHERE src.key < 100
INSERT OVERWRITE TABLE dest2 SELECT src.key, src.value WHERE src.key >= 100 and src.key < 200
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input13.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input13.q?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input13.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input13.q Thu Sep 18 10:37:59 2008
@@ -2,6 +2,13 @@
CREATE TABLE dest2(key INT, value STRING);
CREATE TABLE dest3(key INT, value STRING) PARTITIONED BY(ds STRING, hr STRING);
+EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1 SELECT src.* WHERE src.key < 100
+INSERT OVERWRITE TABLE dest2 SELECT src.key, src.value WHERE src.key >= 100 and src.key < 200
+INSERT OVERWRITE TABLE dest3 PARTITION(ds='2008-04-08', hr='12') SELECT src.key WHERE src.key >= 200 and src.key < 300
+INSERT OVERWRITE DIRECTORY '../../../../build/contrib/hive/ql/test/data/warehouse/dest4.out' SELECT src.value WHERE src.key >= 300;
+
FROM src
INSERT OVERWRITE TABLE dest1 SELECT src.* WHERE src.key < 100
INSERT OVERWRITE TABLE dest2 SELECT src.key, src.value WHERE src.key >= 100 and src.key < 200
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input14.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input14.q?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input14.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input14.q Thu Sep 18 10:37:59 2008
@@ -1,5 +1,14 @@
CREATE TABLE dest1(key INT, value STRING);
+EXPLAIN
+FROM (
+ FROM src
+ SELECT TRANSFORM(src.key, src.value) AS (tkey, tvalue)
+ USING '/bin/cat'
+ CLUSTER BY tkey
+) tmap
+INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100;
+
FROM (
FROM src
SELECT TRANSFORM(src.key, src.value) AS (tkey, tvalue)
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input15.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input15.q?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input15.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input15.q Thu Sep 18 10:37:59 2008
@@ -1,4 +1,7 @@
+EXPLAIN
CREATE TABLE TEST15(key INT, value STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t';
+
DESCRIBE TEST15;
+
DROP TABLE TEST15;
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input2.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input2.q?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input2.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input2.q Thu Sep 18 10:37:59 2008
@@ -7,3 +7,6 @@
SHOW TABLES;
DROP TABLE TEST2b;
+EXPLAIN
+SHOW TABLES;
+
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input3.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input3.q?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input3.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input3.q Thu Sep 18 10:37:59 2008
@@ -3,8 +3,12 @@
CREATE TABLE TEST3b(A ARRAY<INT>, B FLOAT, C MAP<FLOAT, INT>);
DESCRIBE TEST3b;
SHOW TABLES;
+EXPLAIN
+ALTER TABLE TEST3b ADD COLUMNS (X FLOAT);
ALTER TABLE TEST3b ADD COLUMNS (X FLOAT);
DESCRIBE TEST3b;
+EXPLAIN
+ALTER TABLE TEST3b RENAME TO TEST3c;
ALTER TABLE TEST3b RENAME TO TEST3c;
DESCRIBE TEST3c;
SHOW TABLES;
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input4.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input4.q?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input4.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input4.q Thu Sep 18 10:37:59 2008
@@ -1,4 +1,6 @@
CREATE TABLE INPUT4(KEY STRING, VALUE STRING);
+EXPLAIN
+LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE INPUT4;
LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE INPUT4;
SELECT INPUT4.VALUE, INPUT4.KEY FROM INPUT4;
DROP TABLE INPUT4;
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input5.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input5.q?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input5.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input5.q Thu Sep 18 10:37:59 2008
@@ -1,5 +1,14 @@
CREATE TABLE dest1(key INT, value STRING);
+EXPLAIN
+FROM (
+ FROM src_thrift
+ SELECT TRANSFORM(src_thrift.lint, src_thrift.lintstring) AS (tkey, tvalue)
+ USING '/bin/cat'
+ CLUSTER BY tkey
+) tmap
+INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue;
+
FROM (
FROM src_thrift
SELECT TRANSFORM(src_thrift.lint, src_thrift.lintstring) AS (tkey, tvalue)
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input6.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input6.q?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input6.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input6.q Thu Sep 18 10:37:59 2008
@@ -1,5 +1,9 @@
CREATE TABLE dest1(key STRING, value STRING);
+EXPLAIN
+FROM src1
+INSERT OVERWRITE TABLE dest1 SELECT src1.key, src1.value WHERE src1.key is null;
+
FROM src1
INSERT OVERWRITE TABLE dest1 SELECT src1.key, src1.value WHERE src1.key is null;
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input7.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input7.q?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input7.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input7.q Thu Sep 18 10:37:59 2008
@@ -1,5 +1,9 @@
CREATE TABLE dest1(c1 DOUBLE, c2 INT);
+EXPLAIN
+FROM src1
+INSERT OVERWRITE TABLE dest1 SELECT NULL, src1.key;
+
FROM src1
INSERT OVERWRITE TABLE dest1 SELECT NULL, src1.key;
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input8.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input8.q?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input8.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input8.q Thu Sep 18 10:37:59 2008
@@ -1,5 +1,9 @@
CREATE TABLE dest1(c1 STRING, c2 INT, c3 DOUBLE);
+EXPLAIN
+FROM src1
+INSERT OVERWRITE TABLE dest1 SELECT 4 + NULL, src1.key - NULL, NULL + NULL;
+
FROM src1
INSERT OVERWRITE TABLE dest1 SELECT 4 + NULL, src1.key - NULL, NULL + NULL;
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input9.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input9.q?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input9.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input9.q Thu Sep 18 10:37:59 2008
@@ -1,5 +1,9 @@
CREATE TABLE dest1(value STRING, key INT);
+EXPLAIN
+FROM src1
+INSERT OVERWRITE TABLE dest1 SELECT NULL, src1.key where NULL = NULL;
+
FROM src1
INSERT OVERWRITE TABLE dest1 SELECT NULL, src1.key where NULL = NULL;
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input_part1.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input_part1.q?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input_part1.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input_part1.q Thu Sep 18 10:37:59 2008
@@ -1,5 +1,9 @@
CREATE TABLE dest1(key INT, value STRING, hr STRING, ds STRING);
+EXPLAIN
+FROM srcpart
+INSERT OVERWRITE TABLE dest1 SELECT srcpart.key, srcpart.value, srcpart.hr, srcpart.ds WHERE srcpart.key < 100 and srcpart.ds = '2008-04-08' and srcpart.hr = '12';
+
FROM srcpart
INSERT OVERWRITE TABLE dest1 SELECT srcpart.key, srcpart.value, srcpart.hr, srcpart.ds WHERE srcpart.key < 100 and srcpart.ds = '2008-04-08' and srcpart.hr = '12';
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input_testsequencefile.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input_testsequencefile.q?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input_testsequencefile.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input_testsequencefile.q Thu Sep 18 10:37:59 2008
@@ -1,5 +1,9 @@
CREATE TABLE dest4_sequencefile(key INT, value STRING) STORED AS COMPRESSED;
+EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest4_sequencefile SELECT src.key, src.value;
+
FROM src
INSERT OVERWRITE TABLE dest4_sequencefile SELECT src.key, src.value;
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input_testxpath.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input_testxpath.q?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input_testxpath.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input_testxpath.q Thu Sep 18 10:37:59 2008
@@ -1,5 +1,9 @@
CREATE TABLE dest1(key INT, value STRING);
+EXPLAIN
+FROM src_thrift
+INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint[1], src_thrift.lintstring[0].mystring;
+
FROM src_thrift
INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint[1], src_thrift.lintstring[0].mystring;
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/inputddl1.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/inputddl1.q?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/inputddl1.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/inputddl1.q Thu Sep 18 10:37:59 2008
@@ -1,4 +1,8 @@
+EXPLAIN
CREATE TABLE INPUTDDL1(key INT, value STRING);
+
+CREATE TABLE INPUTDDL1(key INT, value STRING);
+
SELECT INPUTDDL1.* from INPUTDDL1;
-DROP TABLE INPUTDDL1;
+DROP TABLE INPUTDDL1;
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/inputddl2.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/inputddl2.q?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/inputddl2.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/inputddl2.q Thu Sep 18 10:37:59 2008
@@ -1,3 +1,5 @@
+EXPLAIN
+CREATE TABLE INPUTDDL2(key INT, value STRING) PARTITIONED BY(ds DATETIME, country STRING);
CREATE TABLE INPUTDDL2(key INT, value STRING) PARTITIONED BY(ds DATETIME, country STRING);
DESCRIBE INPUTDDL2;
DROP TABLE INPUTDDL2;
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/inputddl3.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/inputddl3.q?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/inputddl3.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/inputddl3.q Thu Sep 18 10:37:59 2008
@@ -1,3 +1,5 @@
+EXPLAIN
+CREATE TABLE INPUTDDL3(key INT, value STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t';
CREATE TABLE INPUTDDL3(key INT, value STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t';
DESCRIBE INPUTDDL3;
DROP TABLE INPUTDDL3;
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/join1.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/join1.q?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/join1.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/join1.q Thu Sep 18 10:37:59 2008
@@ -1,5 +1,9 @@
CREATE TABLE dest1(key INT, value STRING);
+EXPLAIN
+FROM src src1 JOIN src src2 ON (src1.key = src2.key)
+INSERT OVERWRITE TABLE dest1 SELECT src1.key, src2.value;
+
FROM src src1 JOIN src src2 ON (src1.key = src2.key)
INSERT OVERWRITE TABLE dest1 SELECT src1.key, src2.value;
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/join2.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/join2.q?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/join2.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/join2.q Thu Sep 18 10:37:59 2008
@@ -1,5 +1,9 @@
CREATE TABLE dest1(key INT, value STRING);
+EXPLAIN
+FROM src src1 JOIN src src2 ON (src1.key = src2.key) JOIN src src3 ON (src1.key + src2.key = src3.key)
+INSERT OVERWRITE TABLE dest1 SELECT src1.key, src3.value;
+
FROM src src1 JOIN src src2 ON (src1.key = src2.key) JOIN src src3 ON (src1.key + src2.key = src3.key)
INSERT OVERWRITE TABLE dest1 SELECT src1.key, src3.value;
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/join3.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/join3.q?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/join3.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/join3.q Thu Sep 18 10:37:59 2008
@@ -1,5 +1,9 @@
CREATE TABLE dest1(key INT, value STRING);
+EXPLAIN
+FROM src src1 JOIN src src2 ON (src1.key = src2.key) JOIN src src3 ON (src1.key = src3.key)
+INSERT OVERWRITE TABLE dest1 SELECT src1.key, src3.value;
+
FROM src src1 JOIN src src2 ON (src1.key = src2.key) JOIN src src3 ON (src1.key = src3.key)
INSERT OVERWRITE TABLE dest1 SELECT src1.key, src3.value;
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/join4.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/join4.q?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/join4.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/join4.q Thu Sep 18 10:37:59 2008
@@ -1,5 +1,20 @@
CREATE TABLE dest1(c1 INT, c2 STRING, c3 INT, c4 STRING);
+EXPLAIN
+FROM (
+ FROM
+ (
+ FROM src src1 SELECT src1.key AS c1, src1.value AS c2 WHERE src1.key > 10 and src1.key < 20
+ ) a
+ LEFT OUTER JOIN
+ (
+ FROM src src2 SELECT src2.key AS c3, src2.value AS c4 WHERE src2.key > 15 and src2.key < 25
+ ) b
+ ON (a.c1 = b.c3)
+ SELECT a.c1 AS c1, a.c2 AS c2, b.c3 AS c3, b.c4 AS c4
+) c
+INSERT OVERWRITE TABLE dest1 SELECT c.c1, c.c2, c.c3, c.c4;
+
FROM (
FROM
(
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/join5.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/join5.q?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/join5.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/join5.q Thu Sep 18 10:37:59 2008
@@ -1,5 +1,20 @@
CREATE TABLE dest1(c1 INT, c2 STRING, c3 INT, c4 STRING);
+EXPLAIN
+FROM (
+ FROM
+ (
+ FROM src src1 SELECT src1.key AS c1, src1.value AS c2 WHERE src1.key > 10 and src1.key < 20
+ ) a
+ RIGHT OUTER JOIN
+ (
+ FROM src src2 SELECT src2.key AS c3, src2.value AS c4 WHERE src2.key > 15 and src2.key < 25
+ ) b
+ ON (a.c1 = b.c3)
+ SELECT a.c1 AS c1, a.c2 AS c2, b.c3 AS c3, b.c4 AS c4
+) c
+INSERT OVERWRITE TABLE dest1 SELECT c.c1, c.c2, c.c3, c.c4;
+
FROM (
FROM
(
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/join6.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/join6.q?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/join6.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/join6.q Thu Sep 18 10:37:59 2008
@@ -1,5 +1,6 @@
CREATE TABLE dest1(c1 INT, c2 STRING, c3 INT, c4 STRING);
+EXPLAIN
FROM (
FROM
(
@@ -14,4 +15,19 @@
) c
INSERT OVERWRITE TABLE dest1 SELECT c.c1, c.c2, c.c3, c.c4;
+FROM (
+ FROM
+ (
+ FROM src src1 SELECT src1.key AS c1, src1.value AS c2 WHERE src1.key > 10 and src1.key < 20
+ ) a
+ FULL OUTER JOIN
+ (
+ FROM src src2 SELECT src2.key AS c3, src2.value AS c4 WHERE src2.key > 15 and src2.key < 25
+ ) b
+ ON (a.c1 = b.c3)
+ SELECT a.c1 AS c1, a.c2 AS c2, b.c3 AS c3, b.c4 AS c4
+) c
+INSERT OVERWRITE TABLE dest1 SELECT c.c1, c.c2, c.c3, c.c4;
+
+
SELECT dest1.* FROM dest1;
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/join7.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/join7.q?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/join7.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/join7.q Thu Sep 18 10:37:59 2008
@@ -1,5 +1,25 @@
CREATE TABLE dest1(c1 INT, c2 STRING, c3 INT, c4 STRING, c5 INT, c6 STRING);
+EXPLAIN
+FROM (
+ FROM
+ (
+ FROM src src1 SELECT src1.key AS c1, src1.value AS c2 WHERE src1.key > 10 and src1.key < 20
+ ) a
+ FULL OUTER JOIN
+ (
+ FROM src src2 SELECT src2.key AS c3, src2.value AS c4 WHERE src2.key > 15 and src2.key < 25
+ ) b
+ ON (a.c1 = b.c3)
+ LEFT OUTER JOIN
+ (
+ FROM src src3 SELECT src3.key AS c5, src3.value AS c6 WHERE src3.key > 20 and src3.key < 25
+ ) c
+ ON (a.c1 = c.c5)
+ SELECT a.c1 AS c1, a.c2 AS c2, b.c3 AS c3, b.c4 AS c4, c.c5 AS c5, c.c6 AS c6
+) c
+INSERT OVERWRITE TABLE dest1 SELECT c.c1, c.c2, c.c3, c.c4, c.c5, c.c6;
+
FROM (
FROM
(
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/join8.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/join8.q?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/join8.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/join8.q Thu Sep 18 10:37:59 2008
@@ -1,5 +1,20 @@
CREATE TABLE dest1(c1 INT, c2 STRING, c3 INT, c4 STRING);
+EXPLAIN
+FROM (
+ FROM
+ (
+ FROM src src1 SELECT src1.key AS c1, src1.value AS c2 WHERE src1.key > 10 and src1.key < 20
+ ) a
+ LEFT OUTER JOIN
+ (
+ FROM src src2 SELECT src2.key AS c3, src2.value AS c4 WHERE src2.key > 15 and src2.key < 25
+ ) b
+ ON (a.c1 = b.c3)
+ SELECT a.c1 AS c1, a.c2 AS c2, b.c3 AS c3, b.c4 AS c4
+) c
+INSERT OVERWRITE TABLE dest1 SELECT c.c1, c.c2, c.c3, c.c4 where c.c3 IS NULL AND c.c1 IS NOT NULL;
+
FROM (
FROM
(
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/sample1.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/sample1.q?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/sample1.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/sample1.q Thu Sep 18 10:37:59 2008
@@ -1,6 +1,11 @@
CREATE TABLE dest1(key INT, value STRING, dt STRING, hr STRING);
-- no input pruning, no sample filter
+EXPLAIN EXTENDED
+INSERT OVERWRITE TABLE dest1 SELECT s.*
+FROM srcpart TABLESAMPLE (BUCKET 1 OUT OF 1) s
+WHERE s.ds='2008-04-08' and s.hr='11';
+
INSERT OVERWRITE TABLE dest1 SELECT s.*
FROM srcpart TABLESAMPLE (BUCKET 1 OUT OF 1) s
WHERE s.ds='2008-04-08' and s.hr='11';
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/sample2.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/sample2.q?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/sample2.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/sample2.q Thu Sep 18 10:37:59 2008
@@ -2,6 +2,10 @@
-- input pruning, no sample filter
-- default table sample columns
+EXPLAIN EXTENDED
+INSERT OVERWRITE TABLE dest1 SELECT s.*
+FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 2) s;
+
INSERT OVERWRITE TABLE dest1 SELECT s.*
FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 2) s;
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/sample4.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/sample4.q?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/sample4.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/sample4.q Thu Sep 18 10:37:59 2008
@@ -2,6 +2,10 @@
-- bucket column is the same as table sample
-- No need for sample filter
+EXPLAIN EXTENDED
+INSERT OVERWRITE TABLE dest1 SELECT s.*
+FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 2 on key) s;
+
INSERT OVERWRITE TABLE dest1 SELECT s.*
FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 2 on key) s;
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/sample5.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/sample5.q?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/sample5.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/sample5.q Thu Sep 18 10:37:59 2008
@@ -1,6 +1,10 @@
CREATE TABLE dest1(key INT, value STRING);
-- no input pruning, sample filter
+EXPLAIN EXTENDED
+INSERT OVERWRITE TABLE dest1 SELECT s.* -- here's another test
+FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 5 on key) s;
+
INSERT OVERWRITE TABLE dest1 SELECT s.* -- here's another test
FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 5 on key) s;
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/sample6.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/sample6.q?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/sample6.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/sample6.q Thu Sep 18 10:37:59 2008
@@ -1,6 +1,10 @@
CREATE TABLE dest1(key INT, value STRING);
-- both input pruning and sample filter
+EXPLAIN EXTENDED
+INSERT OVERWRITE TABLE dest1 SELECT s.*
+FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 4 on key) s;
+
INSERT OVERWRITE TABLE dest1 SELECT s.*
FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 4 on key) s;
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/sample7.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/sample7.q?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/sample7.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/sample7.q Thu Sep 18 10:37:59 2008
@@ -1,6 +1,11 @@
CREATE TABLE dest1(key INT, value STRING);
-- both input pruning and sample filter
+EXPLAIN EXTENDED
+INSERT OVERWRITE TABLE dest1 SELECT s.*
+FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 4 on key) s
+WHERE s.key > 100;
+
INSERT OVERWRITE TABLE dest1 SELECT s.*
FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 4 on key) s
WHERE s.key > 100;
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/subq.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/subq.q?rev=696736&r1=696735&r2=696736&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/subq.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/subq.q Thu Sep 18 10:37:59 2008
@@ -1,3 +1,9 @@
+EXPLAIN
+FROM (
+ FROM src select src.* WHERE src.key < 100
+) unioninput
+INSERT OVERWRITE DIRECTORY '../../../../build/contrib/hive/ql/test/data/warehouse/union.out' SELECT unioninput.*;
+
FROM (
FROM src select src.* WHERE src.key < 100
) unioninput