You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by zs...@apache.org on 2008/09/20 01:56:35 UTC
svn commit: r697291 [14/31] - in /hadoop/core/trunk: ./
src/contrib/hive/cli/src/java/org/apache/hadoop/hive/cli/
src/contrib/hive/metastore/if/
src/contrib/hive/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/
src/contrib/hive/metastor...
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/reduceSinkDesc.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/reduceSinkDesc.java?rev=697291&r1=697290&r2=697291&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/reduceSinkDesc.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/reduceSinkDesc.java Fri Sep 19 16:56:30 2008
@@ -26,6 +26,11 @@
// these are the expressions that go into the reduce key
private java.util.ArrayList<exprNodeDesc> keyCols;
private java.util.ArrayList<exprNodeDesc> valueCols;
+ // Describe how to serialize the key
+ private tableDesc keySerializeInfo;
+ // Describe how to serialize the value
+ private tableDesc valueSerializeInfo;
+
private int tag;
// The partition key will be the first #numPartitionFields of keyCols
@@ -36,25 +41,33 @@
public reduceSinkDesc() { }
public reduceSinkDesc
- (final java.util.ArrayList<exprNodeDesc> keyCols,
- final java.util.ArrayList<exprNodeDesc> valueCols,
- final int numPartitionFields) {
- this.keyCols = keyCols;
- this.valueCols = valueCols;
- this.tag = -1;
- this.numPartitionFields = numPartitionFields;
-}
+ (final java.util.ArrayList<exprNodeDesc> keyCols,
+ final java.util.ArrayList<exprNodeDesc> valueCols,
+ final int numPartitionFields,
+ final tableDesc keySerializeInfo,
+ final tableDesc valueSerializeInfo) {
+ this.keyCols = keyCols;
+ this.valueCols = valueCols;
+ this.tag = -1;
+ this.numPartitionFields = numPartitionFields;
+ this.keySerializeInfo = keySerializeInfo;
+ this.valueSerializeInfo = valueSerializeInfo;
+ }
public reduceSinkDesc
(java.util.ArrayList<exprNodeDesc> keyCols,
java.util.ArrayList<exprNodeDesc> valueCols,
int tag,
- int numPartitionFields) {
+ int numPartitionFields,
+ final tableDesc keySerializeInfo,
+ final tableDesc valueSerializeInfo) {
this.keyCols = keyCols;
this.valueCols = valueCols;
assert tag != -1;
this.tag = tag;
this.numPartitionFields = numPartitionFields;
+ this.keySerializeInfo = keySerializeInfo;
+ this.valueSerializeInfo = valueSerializeInfo;
}
@explain(displayName="key expressions")
@@ -90,5 +103,21 @@
public void setTag(int tag) {
this.tag = tag;
}
+
+ public tableDesc getKeySerializeInfo() {
+ return keySerializeInfo;
+ }
+
+ public void setKeySerializeInfo(tableDesc keySerializeInfo) {
+ this.keySerializeInfo = keySerializeInfo;
+ }
+
+ public tableDesc getValueSerializeInfo() {
+ return valueSerializeInfo;
+ }
+
+ public void setValueSerializeInfo(tableDesc valueSerializeInfo) {
+ this.valueSerializeInfo = valueSerializeInfo;
+ }
}
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/scriptDesc.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/scriptDesc.java?rev=697291&r1=697290&r2=697291&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/scriptDesc.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/scriptDesc.java Fri Sep 19 16:56:30 2008
@@ -24,15 +24,20 @@
public class scriptDesc implements Serializable {
private static final long serialVersionUID = 1L;
private String scriptCmd;
+ // Describe how to deserialize data back from user script
private tableDesc scriptOutputInfo;
+ // Describe how to serialize data out to user script
+ private tableDesc scriptInputInfo;
public scriptDesc() { }
public scriptDesc(
final String scriptCmd,
- final tableDesc scriptOutputInfo) {
+ final tableDesc scriptOutputInfo,
+ final tableDesc scriptInputInfo) {
this.scriptCmd = scriptCmd;
this.scriptOutputInfo = scriptOutputInfo;
+ this.scriptInputInfo = scriptInputInfo;
}
@explain(displayName="command")
@@ -50,4 +55,10 @@
public void setScriptOutputInfo(final tableDesc scriptOutputInfo) {
this.scriptOutputInfo = scriptOutputInfo;
}
+ public tableDesc getScriptInputInfo() {
+ return scriptInputInfo;
+ }
+ public void setScriptInputInfo(tableDesc scriptInputInfo) {
+ this.scriptInputInfo = scriptInputInfo;
+ }
}
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/showTablesDesc.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/showTablesDesc.java?rev=697291&r1=697290&r2=697291&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/showTablesDesc.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/showTablesDesc.java Fri Sep 19 16:56:30 2008
@@ -18,21 +18,20 @@
package org.apache.hadoop.hive.ql.plan;
-import java.io.File;
-import java.io.IOException;
import java.io.Serializable;
+import org.apache.hadoop.fs.Path;
@explain(displayName="Show Tables")
public class showTablesDesc extends ddlDesc implements Serializable
{
private static final long serialVersionUID = 1L;
String pattern;
- File resFile;
+ Path resFile;
/**
* @param resFile
*/
- public showTablesDesc(File resFile) {
+ public showTablesDesc(Path resFile) {
this.resFile = resFile;
pattern = null;
}
@@ -40,7 +39,7 @@
/**
* @param pattern names of tables to show
*/
- public showTablesDesc(File resFile, String pattern) {
+ public showTablesDesc(Path resFile, String pattern) {
this.resFile = resFile;
this.pattern = pattern;
}
@@ -63,23 +62,18 @@
/**
* @return the resFile
*/
- public File getResFile() {
+ public Path getResFile() {
return resFile;
}
@explain(displayName="result file", normalExplain=false)
public String getResFileString() {
- try {
- return getResFile().getCanonicalPath();
- }
- catch (IOException ioe) {
- return "error";
- }
+ return getResFile().getName();
}
/**
* @param resFile the resFile to set
*/
- public void setResFile(File resFile) {
+ public void setResFile(Path resFile) {
this.resFile = resFile;
}
}
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/tableDesc.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/tableDesc.java?rev=697291&r1=697290&r2=697291&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/tableDesc.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/tableDesc.java Fri Sep 19 16:56:30 2008
@@ -22,32 +22,33 @@
import org.apache.hadoop.mapred.InputFormat;
import org.apache.hadoop.mapred.OutputFormat;
-import org.apache.hadoop.hive.serde.SerDe;
+import org.apache.hadoop.hive.serde2.Deserializer;
+import org.apache.hadoop.hive.serde2.SerDe;
public class tableDesc implements Serializable {
private static final long serialVersionUID = 1L;
- private Class<? extends SerDe> serdeClass;
+ private Class<? extends Deserializer> deserializerClass;
private Class<? extends InputFormat> inputFileFormatClass;
private Class<? extends OutputFormat> outputFileFormatClass;
private java.util.Properties properties;
private String serdeClassName;
public tableDesc() { }
public tableDesc(
- final Class<? extends SerDe> serdeClass,
+ final Class<? extends Deserializer> serdeClass,
final Class<? extends InputFormat> inputFileFormatClass,
final Class<? extends OutputFormat> class1,
final java.util.Properties properties) {
- this.serdeClass = serdeClass;
+ this.deserializerClass = serdeClass;
this.inputFileFormatClass = inputFileFormatClass;
this.outputFileFormatClass = class1;
this.properties = properties;
this.serdeClassName = properties.getProperty(org.apache.hadoop.hive.serde.Constants.SERIALIZATION_LIB);;
}
- public Class<? extends SerDe> getSerdeClass() {
- return this.serdeClass;
+ public Class<? extends Deserializer> getDeserializerClass() {
+ return this.deserializerClass;
}
- public void setSerdeClass(final Class<? extends SerDe> serdeClass) {
- this.serdeClass = serdeClass;
+ public void setDeserializerClass(final Class<? extends Deserializer> serdeClass) {
+ this.deserializerClass = serdeClass;
}
public Class<? extends InputFormat> getInputFileFormatClass() {
return this.inputFileFormatClass;
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPOr.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPOr.java?rev=697291&r1=697290&r2=697291&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPOr.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPOr.java Fri Sep 19 16:56:30 2008
@@ -25,7 +25,7 @@
public class UDFOPOr extends UDF {
- private static Log LOG = LogFactory.getLog("org.apache.hadoop.hive.ql.udf.UDFOPOr");
+ private static Log LOG = LogFactory.getLog(UDFOPOr.class.getName());
public UDFOPOr() {
}
@@ -40,6 +40,6 @@
r = Boolean.TRUE;
}
// LOG.info("evaluate(" + a + "," + b + ")=" + r);
- return null;
+ return r;
}
}
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java?rev=697291&r1=697290&r2=697291&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java Fri Sep 19 16:56:30 2008
@@ -18,39 +18,50 @@
package org.apache.hadoop.hive.ql;
-import java.io.*;
-import java.util.*;
+import java.io.BufferedInputStream;
+import java.io.DataInputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.FileWriter;
+import java.io.PrintStream;
+import java.io.Serializable;
+import java.net.URI;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.TreeMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
-import java.net.URI;
-import org.apache.hadoop.fs.FileSystem;
+import org.antlr.runtime.tree.CommonTree;
import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.cli.CliDriver;
import org.apache.hadoop.hive.cli.CliSessionState;
import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.api.Constants;
+import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.exec.Utilities.StreamPrinter;
import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat;
import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.parse.ParseDriver;
+import org.apache.hadoop.hive.ql.parse.ParseException;
+import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.ql.thrift.Complex;
-import org.apache.hadoop.hive.serde.thrift.ThriftSerDe;
+import org.apache.hadoop.hive.serde.Constants;
import org.apache.hadoop.mapred.SequenceFileInputFormat;
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
import org.apache.hadoop.mapred.TextInputFormat;
-import org.apache.hadoop.hive.ql.Context;
-import org.apache.hadoop.hive.ql.parse.ParseDriver;
-import org.apache.hadoop.hive.ql.parse.ParseException;
-import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
-import org.apache.hadoop.hive.ql.exec.Task;
-import org.apache.hadoop.hive.ql.exec.Utilities;
+import com.facebook.thrift.protocol.TBinaryProtocol;
+import org.apache.hadoop.hive.serde2.ThriftDeserializer;
-import org.antlr.runtime.*;
import org.antlr.runtime.tree.*;
public class QTestUtil {
@@ -266,9 +277,9 @@
Table srcThrift = new Table("src_thrift");
srcThrift.setInputFormatClass(SequenceFileInputFormat.class.getName());
srcThrift.setOutputFormatClass(SequenceFileOutputFormat.class.getName());
- srcThrift.setSerializationLib(ThriftSerDe.class.getName());
- srcThrift.setSerializationClass(Complex.class.getName());
- srcThrift.setSerializationFormat(com.facebook.thrift.protocol.TBinaryProtocol.class.getName());
+ srcThrift.setSerializationLib(ThriftDeserializer.shortName());
+ srcThrift.setSerdeParam(Constants.SERIALIZATION_CLASS, Complex.class.getName());
+ srcThrift.setSerdeParam(Constants.SERIALIZATION_FORMAT, TBinaryProtocol.class.getName());
db.createTable(srcThrift);
srcTables.add("src_thrift");
@@ -360,7 +371,7 @@
outf = new File(logDir);
outf = new File(outf, qf.getName().concat(".out"));
FileOutputStream fo = new FileOutputStream(outf);
- ss.out = new PrintStream(fo);
+ ss.out = new PrintStream(fo, true, "UTF-8");
ss.setIsSilent(true);
cliDriver = new CliDriver(ss);
SessionState.start(ss);
@@ -371,15 +382,13 @@
}
public int executeClient(String tname) {
- return cliDriver.processLine(qMap.get(tname));
+ return CliDriver.processLine(qMap.get(tname));
}
public void convertSequenceFileToTextFile() throws Exception {
// Create an instance of hive in order to create the tables
testWarehouse = conf.getVar(HiveConf.ConfVars.METASTOREWAREHOUSE);
db = Hive.get(conf);
- FileSystem fs = FileSystem.get(conf);
-
// Create dest4 to replace dest4_sequencefile
LinkedList<String> cols = new LinkedList<String>();
cols.add("key");
@@ -532,7 +541,7 @@
System.out.println("warehousePath = " + warehousePath.toString() + " localPath = " + localPath.toString());
if (FileSystem.getLocal(conf).exists(localPath)) {
- FileSystem.getLocal(conf).delete(localPath);
+ FileSystem.getLocal(conf).delete(localPath, true);
}
copyDirectoryToLocal(warehousePath, localPath);
@@ -600,7 +609,7 @@
cmdArray = new String[5];
cmdArray[0] = "diff";
cmdArray[1] = "-I";
- cmdArray[2] = "\\|\\(tmp/hive-.*\\)";
+ cmdArray[2] = "\\(file:\\)\\|\\(tmp/hive-.*\\)";
cmdArray[3] = (new File(logDir, tname + ".out")).getPath();
cmdArray[4] = (new File(outDir, tname + ".out")).getPath();
System.out.println(cmdArray[0] + " " + cmdArray[1] + " " + cmdArray[2] + " " +
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestCompositeHiveObject.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestCompositeHiveObject.java?rev=697291&r1=697290&r2=697291&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestCompositeHiveObject.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestCompositeHiveObject.java Fri Sep 19 16:56:30 2008
@@ -1,231 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.exec;
-
-import junit.framework.TestCase;
-import java.io.*;
-import java.util.*;
-
-import org.apache.hadoop.hive.serde.*;
-import org.apache.hadoop.hive.serde.thrift.*;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer;
-import org.apache.hadoop.hive.ql.parse.TypeInfo;
-import org.apache.hadoop.hive.ql.plan.*;
-import org.apache.hadoop.hive.ql.plan.PlanUtils.ExpressionTypes;
-
-public class TestCompositeHiveObject extends TestCase {
-
- // this is our row to test expressions on
- protected HiveObject [] r;
- protected CompositeHiveObject cr;
-
- protected void setUp() throws HiveException {
- r = new HiveObject [5];
- for(int i=0; i<5; i++) {
- ArrayList<String> data = new ArrayList<String> ();
- data.add(""+i);
- data.add(""+(i+1));
- data.add(""+(i+2));
- ColumnSet cs = new ColumnSet(data);
- try {
- r[i] = new TableHiveObject(cs, new columnsetSerDe());
- } catch (Exception e) {
- e.printStackTrace();
- throw new RuntimeException (e);
- }
- }
- cr = new CompositeHiveObject(5);
- for(int i=0; i<5; i++) {
- cr.addHiveObject(r[i]);
- }
-
- }
-
- public void testCompositeHiveObjectExpression() throws Exception {
- try {
- // get a evaluator for a simple field expression
- exprNodeColumnDesc exprDesc = new exprNodeColumnDesc(String.class, "0.col[1]");
- ExprNodeEvaluator eval = ExprNodeEvaluatorFactory.get(exprDesc);
-
- // evaluate on row
- Object ret = eval.evaluateToObject(cr);
- assertEquals(ret, "1");
-
- System.out.println("Full Expression ok");
-
- // repeat same test by evaluating on one row at a time
- exprDesc = new exprNodeColumnDesc(String.class, "0");
- eval = ExprNodeEvaluatorFactory.get(exprDesc);
-
- // evaluate on row
- HiveObject ho = eval.evaluate(cr);
- exprDesc = new exprNodeColumnDesc(String.class, "col[1]");
- eval = ExprNodeEvaluatorFactory.get(exprDesc);
- ret = eval.evaluateToObject(ho);
-
- assertEquals(ret, "1");
-
- System.out.println("Nested Expression ok");
- } catch (Exception e) {
- e.printStackTrace();
- throw e;
- }
- }
-
-
- public void testCompositeHiveObjectFilterOperator() throws Exception {
- try {
-
- exprNodeDesc f1 = new exprNodeColumnDesc(String.class, "0.col[2]");
- exprNodeDesc f2 = new exprNodeColumnDesc(String.class, "1.col[1]");
- exprNodeDesc f3 = new exprNodeColumnDesc(String.class, "2.col[0]");
- exprNodeDesc func1 = SemanticAnalyzer.getFuncExprNodeDesc("==", f1, f2);
- exprNodeDesc func2 = SemanticAnalyzer.getFuncExprNodeDesc("==", f2, f3);
- exprNodeDesc func3 = SemanticAnalyzer.getFuncExprNodeDesc("&&", func1, func2);
- filterDesc filterCtx = new filterDesc(func3);
-
- // Configuration
- Operator<filterDesc> op = OperatorFactory.get(filterDesc.class);
- op.setConf(filterCtx);
-
- // runtime initialization
- op.initialize(null);
-
- // evaluate on row
- op.process(cr);
-
- Map<Enum<?>, Long> results = op.getStats();
- assertEquals(results.get(FilterOperator.Counter.FILTERED), Long.valueOf(0));
- assertEquals(results.get(FilterOperator.Counter.PASSED), Long.valueOf(1));
- System.out.println("Filter Operator ok");
- } catch (Exception e) {
- e.printStackTrace();
- throw e;
- }
- }
-
- public void testCompositeHiveObjectSelectOperator() throws Exception {
- try {
- // col1
- exprNodeDesc exprDesc1 = new exprNodeColumnDesc(TypeInfo.getPrimitiveTypeInfo(String.class),
- "col[1]");
-
- // col2
- exprNodeDesc expr1 = new exprNodeColumnDesc(String.class, "col[0]");
- exprNodeDesc expr2 = new exprNodeConstantDesc("1");
- exprNodeDesc exprDesc2 = SemanticAnalyzer.getFuncExprNodeDesc("concat", expr1, expr2);
-
- // select operator to project these two columns
- ArrayList<exprNodeDesc> earr = new ArrayList<exprNodeDesc> ();
- earr.add(exprDesc1);
- earr.add(exprDesc2);
- selectDesc selectCtx = new selectDesc(earr);
- Operator<selectDesc> op = OperatorFactory.get(selectDesc.class);
- op.setConf(selectCtx);
-
-
- // collectOperator to look at the output of the select operator
- collectDesc cd = new collectDesc (Integer.valueOf(1));
- CollectOperator cdop = (CollectOperator) OperatorFactory.get(collectDesc.class);
- cdop.setConf(cd);
- ArrayList<Operator<? extends Serializable>> nextOp = new ArrayList<Operator<? extends Serializable>> ();
- nextOp.add(cdop);
-
- op.setChildOperators(nextOp);
- op.initialize(null);
-
- // evaluate on row
- op.process(r[0]);
-
- // analyze result
- HiveObject ho = cdop.retrieve();
- exprDesc1 = new exprNodeColumnDesc(TypeInfo.getPrimitiveTypeInfo(String.class), "0");
- ExprNodeEvaluator eval = ExprNodeEvaluatorFactory.get(exprDesc1);
- Object ret = eval.evaluateToObject(ho);
-
- assertEquals(ret, "1");
-
- exprDesc1 = new exprNodeColumnDesc(TypeInfo.getPrimitiveTypeInfo(String.class), "1");
- eval = ExprNodeEvaluatorFactory.get(exprDesc1);
- ret = eval.evaluateToObject(ho);
- assertEquals(ret, "01");
-
- System.out.println("Select Operator ok");
- } catch (Exception e) {
- e.printStackTrace();
- throw e;
- }
- }
-
-
- public void testLabeledCompositeObject() throws Exception {
- try {
- String [] fnames = {"key", "value"};
- LabeledCompositeHiveObject cr = new LabeledCompositeHiveObject(fnames);
- cr.addHiveObject(r[0]);
- cr.addHiveObject(r[1]);
-
- // get a evaluator for a simple field expression
- exprNodeDesc exprDesc = new exprNodeColumnDesc(String.class, "value.col[2]");
- ExprNodeEvaluator eval = ExprNodeEvaluatorFactory.get(exprDesc);
-
- // evaluate on row
- Object ret = eval.evaluateToObject(cr);
- assertEquals(ret, "3");
-
- System.out.println("Labeled Composite full expression ok");
-
- // repeat same test by evaluating on one row at a time
- exprDesc = new exprNodeColumnDesc(String.class, "value");
- eval = ExprNodeEvaluatorFactory.get(exprDesc);
-
- // evaluate on row
- HiveObject ho = eval.evaluate(cr);
- exprDesc = new exprNodeColumnDesc(String.class, "col[2]");
- eval = ExprNodeEvaluatorFactory.get(exprDesc);
- ret = eval.evaluateToObject(ho);
-
- assertEquals(ret, "3");
-
- System.out.println("Labeled Composite nested Expression ok");
-
-
- exprDesc = new exprNodeColumnDesc(String.class, "invalid_field");
- eval = ExprNodeEvaluatorFactory.get(exprDesc);
- boolean gotException = false;
- try {
- ret = eval.evaluateToObject(cr);
- } catch (Exception e) {
- gotException = true;
- }
- assertEquals(gotException, true);
-
- System.out.println("Invalid field name check ok");
-
- } catch (Exception e) {
- e.printStackTrace();
- throw e;
- }
- }
-
- // TODO:
- // 1. test null hive objects
- // 2. test empty select expressions
-}
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java?rev=697291&r1=697290&r2=697291&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java Fri Sep 19 16:56:30 2008
@@ -36,11 +36,13 @@
import org.apache.hadoop.hive.ql.exec.OperatorFactory;
import org.apache.hadoop.hive.ql.exec.Utilities.StreamPrinter;
import org.apache.hadoop.hive.ql.exec.Utilities;
-import org.apache.hadoop.hive.ql.parse.TypeInfo;
-import org.apache.hadoop.hive.ql.plan.*;
import org.apache.hadoop.hive.ql.plan.PlanUtils.ExpressionTypes;
+import org.apache.hadoop.hive.ql.plan.*;
+import org.apache.hadoop.hive.ql.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.ql.typeinfo.TypeInfoFactory;
-import org.apache.hadoop.hive.serde.simple_meta.MetadataTypedColumnsetSerDe;
+import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
/**
@@ -145,11 +147,11 @@
private filterDesc getTestFilterDesc(String column) {
ArrayList<exprNodeDesc> children = new ArrayList<exprNodeDesc>();
- children.add(new exprNodeColumnDesc(TypeInfo.getPrimitiveTypeInfo(String.class), column));
- children.add(new exprNodeConstantDesc(TypeInfo.getPrimitiveTypeInfo(Number.class), Long.valueOf(100)));
+ children.add(new exprNodeColumnDesc(TypeInfoFactory.getPrimitiveTypeInfo(String.class), column));
+ children.add(new exprNodeConstantDesc(TypeInfoFactory.getPrimitiveTypeInfo(Number.class), Long.valueOf(100)));
exprNodeDesc desc = new exprNodeFuncDesc(
- TypeInfo.getPrimitiveTypeInfo(Boolean.class),
+ TypeInfoFactory.getPrimitiveTypeInfo(Boolean.class),
FunctionRegistry.getUDFClass("<"),
FunctionRegistry.getUDFMethod("<", true, String.class, Number.class),
children
@@ -181,10 +183,8 @@
Operator<scriptDesc> op2 = OperatorFactory.get
(new scriptDesc("/bin/cat",
- new tableDesc(MetadataTypedColumnsetSerDe.class,
- null, null,
- Utilities.makeProperties("serialization.format", "9",
- "columns", "key,value"))),
+ PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "key,value"),
+ PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "key,value")),
op3);
@@ -201,7 +201,7 @@
// map-side work
Operator<reduceSinkDesc> op1 = OperatorFactory.get
- (new reduceSinkDesc
+ (PlanUtils.getReduceSinkDesc
(Utilities.makeList(new exprNodeColumnDesc(String.class, "key")),
Utilities.makeList(new exprNodeColumnDesc(String.class, "value")),
1));
@@ -225,7 +225,7 @@
// map-side work
Operator<reduceSinkDesc> op1 = OperatorFactory.get
- (new reduceSinkDesc
+ (PlanUtils.getReduceSinkDesc
(Utilities.makeList(new exprNodeColumnDesc(String.class, "key")),
Utilities.makeList(new exprNodeColumnDesc(String.class, "key"),
new exprNodeColumnDesc(String.class, "value")),
@@ -257,7 +257,7 @@
// map-side work
Operator<reduceSinkDesc> op1 = OperatorFactory.get
- (new reduceSinkDesc
+ (PlanUtils.getReduceSinkDesc
(Utilities.makeList(new exprNodeColumnDesc(String.class, "key")),
Utilities.makeList
(new exprNodeColumnDesc(String.class, "value")), Byte.valueOf((byte)0),
@@ -266,7 +266,7 @@
Utilities.addMapWork(mr, src, "a", op1);
Operator<reduceSinkDesc> op2 = OperatorFactory.get
- (new reduceSinkDesc
+ (PlanUtils.getReduceSinkDesc
(Utilities.makeList(new exprNodeColumnDesc(String.class, "key")),
Utilities.makeList(new exprNodeColumnDesc(String.class, "key")),
Byte.valueOf((byte)1),
@@ -287,7 +287,11 @@
(new selectDesc
(Utilities.makeList
(new exprNodeColumnDesc(String.class, Utilities.ReduceField.ALIAS.toString()),
- new exprNodeColumnDesc(String.class, Utilities.ReduceField.VALUE.toString() + ".0"))), op4);
+ new exprNodeFieldDesc(TypeInfoFactory.getPrimitiveTypeInfo(String.class),
+ new exprNodeColumnDesc(TypeInfoFactory.getListTypeInfo(
+ TypeInfoFactory.getPrimitiveTypeInfo(String.class)),
+ Utilities.ReduceField.VALUE.toString()),
+ "0"))), op4);
mr.setReducer(op5);
}
@@ -299,19 +303,17 @@
// map-side work
Operator<reduceSinkDesc> op1 = OperatorFactory.get
- (new reduceSinkDesc
+ (PlanUtils.getReduceSinkDesc
(Utilities.makeList(new exprNodeColumnDesc(String.class, "tkey")),
Utilities.makeList(new exprNodeColumnDesc(String.class, "tkey"),
new exprNodeColumnDesc(String.class, "tvalue")),
1));
Operator<scriptDesc> op0 = OperatorFactory.get
- (new scriptDesc("\'/bin/cat\'",
- new tableDesc(MetadataTypedColumnsetSerDe.class,
- TextInputFormat.class,
- IgnoreKeyTextOutputFormat.class,
- Utilities.makeProperties("serialization.format", "9",
- "columns", "tkey,tvalue"))), op1);
+ (new scriptDesc("/bin/cat",
+ PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "tkey,tvalue"),
+ PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "key,value")),
+ op1);
Operator<selectDesc> op4 = OperatorFactory.get(new selectDesc(
Utilities.makeList(new exprNodeColumnDesc(String.class, "key"),
@@ -337,7 +339,7 @@
// map-side work
Operator<reduceSinkDesc> op0 = OperatorFactory.get
- (new reduceSinkDesc
+ (PlanUtils.getReduceSinkDesc
(Utilities.makeList(new exprNodeColumnDesc(String.class, "0")),
Utilities.makeList(new exprNodeColumnDesc(String.class, "0"),
new exprNodeColumnDesc(String.class, "1")),
@@ -367,19 +369,17 @@
// map-side work
Operator<reduceSinkDesc> op1 = OperatorFactory.get
- (new reduceSinkDesc
- (Utilities.makeList(new exprNodeColumnDesc(String.class, "tkey")),
+ (PlanUtils.getReduceSinkDesc(
+ Utilities.makeList(new exprNodeColumnDesc(String.class, "tkey")),
Utilities.makeList(new exprNodeColumnDesc(String.class, "tkey"),
new exprNodeColumnDesc(String.class, "tvalue")),
- 1));
+ 1));
Operator<scriptDesc> op0 = OperatorFactory.get
(new scriptDesc("\'/bin/cat\'",
- new tableDesc(org.apache.hadoop.hive.serde.simple_meta.MetadataTypedColumnsetSerDe.class,
- TextInputFormat.class,
- IgnoreKeyTextOutputFormat.class,
- Utilities.makeProperties("serialization.format", "9",
- "columns", "tkey,tvalue"))), op1);
+ PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "tkey,tvalue"),
+ PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "tkey,tvalue")),
+ op1);
Operator<selectDesc> op4 = OperatorFactory.get(new selectDesc(
Utilities.makeList(new exprNodeColumnDesc(String.class, "key"),
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java?rev=697291&r1=697290&r2=697291&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java Fri Sep 19 16:56:30 2008
@@ -22,100 +22,143 @@
import java.io.*;
import java.util.*;
-import org.apache.hadoop.hive.serde.*;
-import org.apache.hadoop.hive.serde.thrift.*;
+import org.apache.hadoop.hive.serde2.objectinspector.InspectableObject;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer;
import org.apache.hadoop.hive.ql.plan.exprNodeColumnDesc;
import org.apache.hadoop.hive.ql.plan.exprNodeConstantDesc;
import org.apache.hadoop.hive.ql.plan.exprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.exprNodeIndexDesc;
import org.apache.hadoop.hive.ql.plan.PlanUtils.ExpressionTypes;
+import org.apache.hadoop.hive.ql.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.ql.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.hive.ql.typeinfo.TypeInfoUtils;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
public class TestExpressionEvaluator extends TestCase {
// this is our row to test expressions on
- protected HiveObject r;
+ protected InspectableObject r;
- protected void setUp() {
- ArrayList<String> data = new ArrayList<String> ();
- data.add("0");
- data.add("1");
- data.add("2");
- data.add("3");
- ColumnSet cs = new ColumnSet(data);
+ ArrayList<String> col1;
+ TypeInfo col1Type;
+ ArrayList<String> cola;
+ TypeInfo colaType;
+ ArrayList<Object> data;
+ ArrayList<String> names;
+ ArrayList<TypeInfo> typeInfos;
+ TypeInfo dataType;
+
+ public TestExpressionEvaluator() {
+ col1 = new ArrayList<String> ();
+ col1.add("0");
+ col1.add("1");
+ col1.add("2");
+ col1.add("3");
+ col1Type = TypeInfoFactory.getListTypeInfo(
+ TypeInfoFactory.getPrimitiveTypeInfo(String.class));
+ cola = new ArrayList<String> ();
+ cola.add("a");
+ cola.add("b");
+ cola.add("c");
+ colaType = TypeInfoFactory.getListTypeInfo(
+ TypeInfoFactory.getPrimitiveTypeInfo(String.class));
try {
- r = new TableHiveObject(cs, new columnsetSerDe());
- } catch (Exception e) {
+ data = new ArrayList<Object>();
+ data.add(col1);
+ data.add(cola);
+ names = new ArrayList<String>();
+ names.add("col1");
+ names.add("cola");
+ typeInfos = new ArrayList<TypeInfo>();
+ typeInfos.add(col1Type);
+ typeInfos.add(colaType);
+ dataType = TypeInfoFactory.getStructTypeInfo(names, typeInfos);
+
+ r = new InspectableObject();
+ r.o = data;
+ r.oi = TypeInfoUtils.getStandardObjectInspectorFromTypeInfo(dataType);
+ } catch (Throwable e) {
+ e.printStackTrace();
throw new RuntimeException (e);
}
}
+ protected void setUp() {
+ }
- public void testExprNodeColumnEvaluator() throws Exception {
+ public void testExprNodeColumnEvaluator() throws Throwable {
try {
// get a evaluator for a simple field expression
- exprNodeDesc exprDesc = new exprNodeColumnDesc(String.class, "col[1]");
+ exprNodeDesc exprDesc = new exprNodeColumnDesc(colaType, "cola");
ExprNodeEvaluator eval = ExprNodeEvaluatorFactory.get(exprDesc);
// evaluate on row
- Object ret = eval.evaluateToObject(r);
- assertEquals(ret, "1");
+ InspectableObject result = new InspectableObject();
+ eval.evaluate(r.o, r.oi, result);
+ assertEquals(result.o, cola);
System.out.println("ExprNodeColumnEvaluator ok");
- } catch (Exception e) {
+ } catch (Throwable e) {
e.printStackTrace();
throw e;
}
}
- public void testExprNodeFuncEvaluator() throws Exception {
+ public void testExprNodeFuncEvaluator() throws Throwable {
try {
// get a evaluator for a string concatenation expression
- exprNodeDesc col0 = new exprNodeColumnDesc(String.class, "col[0]");
- exprNodeDesc col1 = new exprNodeColumnDesc(String.class, "col[1]");
- exprNodeDesc func1 = SemanticAnalyzer.getFuncExprNodeDesc("concat", col0, col1);
+ exprNodeDesc col1desc = new exprNodeColumnDesc(col1Type, "col1");
+ exprNodeDesc coladesc = new exprNodeColumnDesc(colaType, "cola");
+ exprNodeDesc col11desc = new exprNodeIndexDesc(col1desc, new exprNodeConstantDesc(new Integer(1)));
+ exprNodeDesc cola0desc = new exprNodeIndexDesc(coladesc, new exprNodeConstantDesc(new Integer(0)));
+ exprNodeDesc func1 = SemanticAnalyzer.getFuncExprNodeDesc("concat", col11desc, cola0desc);
ExprNodeEvaluator eval = ExprNodeEvaluatorFactory.get(func1);
// evaluate on row
- Object ret = eval.evaluateToObject(r);
- assertEquals(ret, "01");
+ InspectableObject result = new InspectableObject();
+ eval.evaluate(r.o, r.oi, result);
+ assertEquals(result.o, "1a");
System.out.println("ExprNodeFuncEvaluator ok");
- } catch (Exception e) {
+ } catch (Throwable e) {
e.printStackTrace();
throw e;
}
}
- public void testExprNodeConversionEvaluator() throws Exception {
+ public void testExprNodeConversionEvaluator() throws Throwable {
try {
// get a evaluator for a string concatenation expression
- exprNodeDesc col0 = new exprNodeConstantDesc(String.class, null);
- exprNodeDesc func1 = SemanticAnalyzer.getFuncExprNodeDesc("java.lang.double", col0);
+ exprNodeDesc col1desc = new exprNodeColumnDesc(col1Type, "col1");
+ exprNodeDesc col11desc = new exprNodeIndexDesc(col1desc, new exprNodeConstantDesc(new Integer(1)));
+ exprNodeDesc func1 = SemanticAnalyzer.getFuncExprNodeDesc(Double.class.getName(), col11desc);
ExprNodeEvaluator eval = ExprNodeEvaluatorFactory.get(func1);
// evaluate on row
- Object ret = eval.evaluateToObject(r);
- assertEquals(ret, null);
+ InspectableObject result = new InspectableObject();
+ eval.evaluate(r.o, r.oi, result);
+ assertEquals(result.o, Double.valueOf("1"));
System.out.println("testExprNodeConversionEvaluator ok");
- } catch (Exception e) {
+ } catch (Throwable e) {
e.printStackTrace();
throw e;
}
}
- private static void measureSpeed(String expr, int times, ExprNodeEvaluator eval, HiveObject input, Object output) throws HiveException {
+ private static void measureSpeed(String expr, int times, ExprNodeEvaluator eval, InspectableObject input, Object standardOutput) throws HiveException {
System.out.println("Evaluating " + expr + " for " + times + " times");
// evaluate on row
+ InspectableObject output = new InspectableObject();
long start = System.currentTimeMillis();
for (int i=0; i<times; i++) {
- Object ret = eval.evaluateToObject(input);
- assertEquals(ret, output);
+ eval.evaluate(input.o, input.oi, output);
+ assertEquals(output.o, standardOutput);
}
long end = System.currentTimeMillis();
System.out.println("Evaluation finished: " + String.format("%2.3f", (end - start)*0.001) + " seconds, "
+ String.format("%2.3f", (end - start)*1000.0/times) + " seconds/million call.");
}
- public void testExprNodeSpeed() throws Exception {
+ public void testExprNodeSpeed() throws Throwable {
try {
int basetimes = 100000;
measureSpeed("1 + 2",
@@ -178,39 +221,40 @@
new exprNodeConstantDesc("4"))),
r,
"1234");
- measureSpeed("concat(col[0], col[1])",
+ exprNodeDesc constant1 = new exprNodeConstantDesc(1);
+ exprNodeDesc constant2 = new exprNodeConstantDesc(2);
+ measureSpeed("concat(col1[1], cola[1])",
basetimes * 10,
ExprNodeEvaluatorFactory.get(
- SemanticAnalyzer.getFuncExprNodeDesc("concat",
- new exprNodeColumnDesc(String.class, "col[0]"),
- new exprNodeColumnDesc(String.class, "col[1]"))),
+ SemanticAnalyzer.getFuncExprNodeDesc("concat",
+ new exprNodeIndexDesc(new exprNodeColumnDesc(col1Type, "col1"), constant1),
+ new exprNodeIndexDesc(new exprNodeColumnDesc(colaType, "cola"), constant1))),
r,
- "01");
- measureSpeed("concat(concat(col[0], col[1]), col[2])",
+ "1b");
+ measureSpeed("concat(concat(col1[1], cola[1]), col1[2])",
basetimes * 10,
ExprNodeEvaluatorFactory.get(
SemanticAnalyzer.getFuncExprNodeDesc("concat",
SemanticAnalyzer.getFuncExprNodeDesc("concat",
- new exprNodeColumnDesc(String.class, "col[0]"),
- new exprNodeColumnDesc(String.class, "col[1]")),
- new exprNodeColumnDesc(String.class, "col[2]"))),
+ new exprNodeIndexDesc(new exprNodeColumnDesc(col1Type, "col1"), constant1),
+ new exprNodeIndexDesc(new exprNodeColumnDesc(colaType, "cola"), constant1)),
+ new exprNodeIndexDesc(new exprNodeColumnDesc(col1Type, "col1"), constant2))),
r,
- "012");
- measureSpeed("concat(concat(concat(col[0], col[1]), col[2]), col[3])",
+ "1b2");
+ measureSpeed("concat(concat(concat(col1[1], cola[1]), col1[2]), cola[2])",
basetimes * 10,
ExprNodeEvaluatorFactory.get(
SemanticAnalyzer.getFuncExprNodeDesc("concat",
SemanticAnalyzer.getFuncExprNodeDesc("concat",
SemanticAnalyzer.getFuncExprNodeDesc("concat",
- new exprNodeColumnDesc(String.class, "col[0]"),
- new exprNodeColumnDesc(String.class, "col[1]")),
- new exprNodeColumnDesc(String.class, "col[2]")),
- new exprNodeColumnDesc(String.class, "col[3]"))),
+ new exprNodeIndexDesc(new exprNodeColumnDesc(col1Type, "col1"), constant1),
+ new exprNodeIndexDesc(new exprNodeColumnDesc(colaType, "cola"), constant1)),
+ new exprNodeIndexDesc(new exprNodeColumnDesc(col1Type, "col1"), constant2)),
+ new exprNodeIndexDesc(new exprNodeColumnDesc(colaType, "cola"), constant2))),
r,
- "0123");
-
+ "1b2c");
- } catch (Exception e) {
+ } catch (Throwable e) {
e.printStackTrace();
throw e;
}
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java?rev=697291&r1=697290&r2=697291&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java Fri Sep 19 16:56:30 2008
@@ -22,54 +22,60 @@
import java.io.*;
import java.util.*;
import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.OutputFormat;
-import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.hive.serde.*;
-import org.apache.hadoop.hive.serde.thrift.*;
-import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat;
import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer;
-import org.apache.hadoop.hive.ql.parse.TypeInfo;
import org.apache.hadoop.hive.ql.plan.*;
-import org.apache.hadoop.hive.ql.plan.PlanUtils.ExpressionTypes;
+import org.apache.hadoop.hive.ql.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.serde.thrift.columnsetSerDe;
+import org.apache.hadoop.hive.serde2.objectinspector.InspectableObject;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.StructField;
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
public class TestOperators extends TestCase {
// this is our row to test expressions on
- protected HiveObject [] r;
+ protected InspectableObject [] r;
protected void setUp() {
- r = new HiveObject [5];
+ r = new InspectableObject [5];
+ ArrayList<String> names = new ArrayList<String>(3);
+ names.add("col0");
+ names.add("col1");
+ names.add("col2");
+ ArrayList<ObjectInspector> objectInspectors = new ArrayList<ObjectInspector>(3);
+ objectInspectors.add(ObjectInspectorFactory.getStandardPrimitiveObjectInspector(String.class));
+ objectInspectors.add(ObjectInspectorFactory.getStandardPrimitiveObjectInspector(String.class));
+ objectInspectors.add(ObjectInspectorFactory.getStandardPrimitiveObjectInspector(String.class));
for(int i=0; i<5; i++) {
ArrayList<String> data = new ArrayList<String> ();
data.add(""+i);
data.add(""+(i+1));
data.add(""+(i+2));
- ColumnSet cs = new ColumnSet(data);
try {
- r[i] = new TableHiveObject(cs, new columnsetSerDe());
- } catch (Exception e) {
+ r[i] = new InspectableObject();
+ r[i].o = data;
+ r[i].oi = ObjectInspectorFactory.getStandardStructObjectInspector(names, objectInspectors);
+ } catch (Throwable e) {
throw new RuntimeException (e);
}
}
}
- public void testBaseFilterOperator() throws Exception {
+ public void testBaseFilterOperator() throws Throwable {
try {
- exprNodeDesc col0 = new exprNodeColumnDesc(String.class, "col[0]");
- exprNodeDesc col1 = new exprNodeColumnDesc(String.class, "col[1]");
- exprNodeDesc col2 = new exprNodeColumnDesc(String.class, "col[2]");
+ System.out.println("Testing Filter Operator");
+ exprNodeDesc col0 = new exprNodeColumnDesc(String.class, "col0");
+ exprNodeDesc col1 = new exprNodeColumnDesc(String.class, "col1");
+ exprNodeDesc col2 = new exprNodeColumnDesc(String.class, "col2");
exprNodeDesc zero = new exprNodeConstantDesc(Number.class, Long.valueOf(0));
exprNodeDesc func1 = SemanticAnalyzer.getFuncExprNodeDesc(">", col2, col1);
- System.out.println("func1 = " + func1);
exprNodeDesc func2 = SemanticAnalyzer.getFuncExprNodeDesc("==", col0, zero);
- System.out.println("func2 = " + func2);
exprNodeDesc func3 = SemanticAnalyzer.getFuncExprNodeDesc("&&", func1, func2);
assert(func3 != null);
filterDesc filterCtx = new filterDesc(func3);
@@ -81,12 +87,14 @@
// runtime initialization
op.initialize(null);
- for(HiveObject oner: r) {
- op.process(oner);
+ for(InspectableObject oner: r) {
+ op.process(oner.o, oner.oi);
}
Map<Enum<?>, Long> results = op.getStats();
+ System.out.println("filtered = " + results.get(FilterOperator.Counter.FILTERED));
assertEquals(results.get(FilterOperator.Counter.FILTERED), Long.valueOf(4));
+ System.out.println("passed = " + results.get(FilterOperator.Counter.PASSED));
assertEquals(results.get(FilterOperator.Counter.PASSED), Long.valueOf(1));
/*
@@ -96,21 +104,22 @@
*/
System.out.println("Filter Operator ok");
- } catch (Exception e) {
+ } catch (Throwable e) {
e.printStackTrace();
throw e;
}
}
- public void testFileSinkOperator() throws Exception {
+ public void testFileSinkOperator() throws Throwable {
try {
+ System.out.println("Testing FileSink Operator");
// col1
- exprNodeDesc exprDesc1 = new exprNodeColumnDesc(TypeInfo.getPrimitiveTypeInfo(String.class),
- "col[1]");
+ exprNodeDesc exprDesc1 = new exprNodeColumnDesc(TypeInfoFactory.getPrimitiveTypeInfo(String.class),
+ "col1");
// col2
ArrayList<exprNodeDesc> exprDesc2children = new ArrayList<exprNodeDesc>();
- exprNodeDesc expr1 = new exprNodeColumnDesc(String.class, "col[0]");
+ exprNodeDesc expr1 = new exprNodeColumnDesc(String.class, "col0");
exprNodeDesc expr2 = new exprNodeConstantDesc("1");
exprNodeDesc exprDesc2 = SemanticAnalyzer.getFuncExprNodeDesc("concat", expr1, expr2);
@@ -135,26 +144,27 @@
// evaluate on row
for(int i=0; i<5; i++) {
- op.process(r[i]);
+ op.process(r[i].o, r[i].oi);
}
op.close(false);
System.out.println("FileSink Operator ok");
- } catch (Exception e) {
+ } catch (Throwable e) {
e.printStackTrace();
throw e;
}
}
- public void testScriptOperator() throws Exception {
+ public void testScriptOperator() throws Throwable {
try {
+ System.out.println("Testing Script Operator");
// col1
- exprNodeDesc exprDesc1 = new exprNodeColumnDesc(String.class, "col[1]");
+ exprNodeDesc exprDesc1 = new exprNodeColumnDesc(String.class, "col1");
// col2
- exprNodeDesc expr1 = new exprNodeColumnDesc(String.class, "col[0]");
+ exprNodeDesc expr1 = new exprNodeColumnDesc(String.class, "col0");
exprNodeDesc expr2 = new exprNodeConstantDesc("1");
exprNodeDesc exprDesc2 = SemanticAnalyzer.getFuncExprNodeDesc("concat", expr1, expr2);
@@ -166,15 +176,10 @@
Operator<selectDesc> op = OperatorFactory.get(selectDesc.class);
op.setConf(selectCtx);
-
// scriptOperator to echo the output of the select
- Properties p = new Properties ();
- p.setProperty(Constants.SERIALIZATION_FORMAT, "9");
- tableDesc td = new tableDesc(columnsetSerDe.class,
- TextInputFormat.class,
- IgnoreKeyTextOutputFormat.class,
- p);
- scriptDesc sd = new scriptDesc ("cat", td);
+ tableDesc scriptOutput = PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "a,b");
+ tableDesc scriptInput = PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "a,b");
+ scriptDesc sd = new scriptDesc("cat", scriptOutput, scriptInput);
Operator<scriptDesc> sop = OperatorFactory.get(scriptDesc.class);
sop.setConf(sd);
ArrayList<Operator<? extends Serializable>> nextScriptOp = new ArrayList<Operator<? extends Serializable>> ();
@@ -199,43 +204,48 @@
// evaluate on row
for(int i=0; i<5; i++) {
- op.process(r[i]);
+ op.process(r[i].o, r[i].oi);
}
op.close(false);
+ InspectableObject io = new InspectableObject();
for(int i=0; i<5; i++) {
- HiveObject ho = cdop.retrieve();
- ColumnSet c = (ColumnSet) ho.getJavaObject();
- // System.out.println("Obtained:" + c.col.get(0) + "," + c.col.get(1) + " and wanted " + ""+(i+1) + "," + (i) + "1");
- assertEquals(c.col.get(0), ""+(i+1));
- assertEquals(c.col.get(1), (i) + "1");
+ cdop.retrieve(io);
+ System.out.println("[" + i + "] io.o=" + io.o);
+ System.out.println("[" + i + "] io.oi=" + io.oi);
+ StructObjectInspector soi = (StructObjectInspector)io.oi;
+ assert(soi != null);
+ StructField a = soi.getStructFieldRef("a");
+ StructField b = soi.getStructFieldRef("b");
+ assertEquals(""+(i+1), soi.getStructFieldData(io.o, a));
+ assertEquals((i) + "1", soi.getStructFieldData(io.o, b));
}
System.out.println("Script Operator ok");
- } catch (Exception e) {
+ } catch (Throwable e) {
e.printStackTrace();
throw e;
}
}
- public void testMapOperator() throws Exception {
+ public void testMapOperator() throws Throwable {
try {
+ System.out.println("Testing Map Operator");
// initialize configuration
Configuration hconf = new JobConf(TestOperators.class);
HiveConf.setVar(hconf, HiveConf.ConfVars.HADOOPMAPFILENAME, "hdfs:///testDir/testFile");
// initialize pathToAliases
ArrayList<String> aliases = new ArrayList<String> ();
- aliases.add("a"); aliases.add("b");
+ aliases.add("a");
+ aliases.add("b");
LinkedHashMap<String, ArrayList<String>> pathToAliases = new LinkedHashMap<String, ArrayList<String>> ();
pathToAliases.put("/testDir", aliases);
// initialize pathToTableInfo
- tableDesc td = new tableDesc(org.apache.hadoop.hive.serde.thrift.columnsetSerDe.class,
- TextInputFormat.class,
- IgnoreKeyTextOutputFormat.class,
- new Properties ());
+ // Default: treat the table as a single column "col"
+ tableDesc td = Utilities.defaultTd;
partitionDesc pd = new partitionDesc(td, null);
LinkedHashMap<String,org.apache.hadoop.hive.ql.plan.partitionDesc> pathToPartitionInfo = new
LinkedHashMap<String,org.apache.hadoop.hive.ql.plan.partitionDesc> ();
@@ -263,22 +273,25 @@
mo.initialize(hconf);
Text tw = new Text();
+ InspectableObject io1 = new InspectableObject();
+ InspectableObject io2 = new InspectableObject();
for(int i=0; i<5; i++) {
- tw.set("" + i + "\001" + (i+1) + "\001" + (i+2));
+ String answer = "[[" + i + ", " + (i+1) + ", " + (i+2) + "]]";
+
+ tw.set("" + i + "\u0001" + (i+1) + "\u0001"+ (i+2));
mo.process((Writable)tw);
- HiveObject ho1 = cdop1.retrieve();
- HiveObject ho2 = cdop2.retrieve();
- ColumnSet c = (ColumnSet) ho1.getJavaObject();
- assertEquals(c.col.get(0) + "\001" + c.col.get(1) + "\001" + c.col.get(2),
- tw.toString());
- c = (ColumnSet) ho2.getJavaObject();
- assertEquals(c.col.get(0) + "\001" + c.col.get(1) + "\001" + c.col.get(2),
- tw.toString());
+ cdop1.retrieve(io1);
+ cdop2.retrieve(io2);
+ System.out.println("io1.o.toString() = " + io1.o.toString());
+ System.out.println("io2.o.toString() = " + io2.o.toString());
+ System.out.println("answer.toString() = " + answer.toString());
+ assertEquals(answer.toString(), io1.o.toString());
+ assertEquals(answer.toString(), io2.o.toString());
}
System.out.println("Map Operator ok");
- } catch (Exception e) {
+ } catch (Throwable e) {
e.printStackTrace();
throw (e);
}
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java?rev=697291&r1=697290&r2=697291&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java Fri Sep 19 16:56:30 2008
@@ -26,11 +26,11 @@
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer;
-import org.apache.hadoop.hive.ql.parse.TypeInfo;
import org.apache.hadoop.hive.ql.plan.*;
+import org.apache.hadoop.hive.ql.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat;
-import org.apache.hadoop.hive.serde.thrift.columnsetSerDe;
+import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe;
public class TestPlan extends TestCase {
@@ -42,8 +42,8 @@
try {
// initialize a complete map reduce configuration
- exprNodeDesc expr1 = new exprNodeColumnDesc(TypeInfo.getPrimitiveTypeInfo(String.class), F1);
- exprNodeDesc expr2 = new exprNodeColumnDesc(TypeInfo.getPrimitiveTypeInfo(String.class), F2);
+ exprNodeDesc expr1 = new exprNodeColumnDesc(TypeInfoFactory.getPrimitiveTypeInfo(String.class), F1);
+ exprNodeDesc expr2 = new exprNodeColumnDesc(TypeInfoFactory.getPrimitiveTypeInfo(String.class), F2);
exprNodeDesc filterExpr = SemanticAnalyzer.getFuncExprNodeDesc("==", expr1, expr2);
filterDesc filterCtx = new filterDesc(filterExpr);
@@ -55,10 +55,7 @@
LinkedHashMap<String, ArrayList<String>> pa = new LinkedHashMap<String, ArrayList<String>> ();
pa.put("/tmp/testfolder", aliasList);
- tableDesc tblDesc = new tableDesc(columnsetSerDe.class,
- TextInputFormat.class,
- IgnoreKeyTextOutputFormat.class,
- new Properties());
+ tableDesc tblDesc = Utilities.defaultTd;
partitionDesc partDesc = new partitionDesc(tblDesc, null);
LinkedHashMap<String, partitionDesc> pt = new LinkedHashMap<String, partitionDesc> ();
pt.put("/tmp/testfolder", partDesc);
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestWritables.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestWritables.java?rev=697291&r1=697290&r2=697291&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestWritables.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestWritables.java Fri Sep 19 16:56:30 2008
@@ -1,150 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.exec;
-
-import junit.framework.TestCase;
-import java.io.*;
-import java.util.*;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.io.WritableComparable;
-
-
-import org.apache.hadoop.hive.serde.*;
-import org.apache.hadoop.hive.ql.plan.*;
-import org.apache.hadoop.hive.ql.io.*;
-import org.apache.hadoop.hive.utils.ByteStream;
-
-public class TestWritables extends TestCase {
-
- protected CompositeHiveObject [] r;
-
- protected void setUp() {
- try {
- r = new CompositeHiveObject [5];
- for(int i=0; i<5; i++) {
-
- r[i] = new CompositeHiveObject (3);
- for (int j=0; j < 3; j++) {
- r[i].addHiveObject(new PrimitiveHiveObject(Integer.valueOf (i-1+j)));
- }
- }
- } catch (Exception e) {
- throw new RuntimeException (e);
- }
- }
-
- public void testWritable() throws Exception {
- try {
- ByteStream.Output bos = new ByteStream.Output ();
- DataOutputStream dos = new DataOutputStream (bos);
- HiveObjectSerializer hos = new NaiiveSerializer();
- WritableHiveObject who = null;
- WritableHiveObject.setSerialFormat();
- for(int i=0; i < 5; i++) {
- who = new WritableHiveObject(i, null, hos);
- who.setHo(r[i]);
- who.write(dos);
- }
-
- ByteStream.Input bin = new ByteStream.Input(bos.getData(), 0, bos.getCount());
- DataInputStream din = new DataInputStream(bin);
- for(int i=0; i < 5; i++) {
- who.readFields(din);
- HiveObject ho = who.getHo();
- for(int j=0; j<3; j++) {
- SerDeField sdf = ho.getFieldFromExpression(""+j);
- String str = (String)ho.get(sdf).getJavaObject();
- assertEquals(str, new String(""+(i-1+j)));
- }
- assertEquals(who.getTag(), i);
- }
- System.out.println("testWritable OK");
- } catch (Exception e) {
- e.printStackTrace();
- throw (e);
- }
- }
-
-
- public void testNoTagWritable() throws Exception {
-
- try {
- ByteStream.Output bos = new ByteStream.Output ();
- DataOutputStream dos = new DataOutputStream (bos);
- HiveObjectSerializer hos = new NaiiveSerializer();
- WritableHiveObject who = new NoTagWritableHiveObject(null, hos);
- for(int i=0; i < 5; i++) {
- who.setHo(r[i]);
- who.write(dos);
- }
-
- //System.out.println(new String(bos.getData(), 0, bos.getCount(), "UTF-8"));
-
- ByteStream.Input bin = new ByteStream.Input(bos.getData(), 0, bos.getCount());
- DataInputStream din = new DataInputStream(bin);
- for(int i=0; i < 5; i++) {
- who.readFields(din);
- HiveObject ho = who.getHo();
- for(int j=0; j<3; j++) {
- SerDeField sdf = ho.getFieldFromExpression(""+j);
- String str = (String)ho.get(sdf).getJavaObject();
- assertEquals(str, new String(""+(i-1+j)));
- }
- }
- System.out.println("testNoTagWritable OK");
- } catch (Exception e) {
- e.printStackTrace();
- throw (e);
- }
- }
-
- public void testWritableComparable() throws Exception {
- try {
- ByteStream.Output bos = new ByteStream.Output ();
- DataOutputStream dos = new DataOutputStream (bos);
- HiveObjectSerializer hos = new NaiiveSerializer();
- NoTagWritableComparableHiveObject [] who = new NoTagWritableComparableHiveObject [5];
- // 3, 1, 4, 2, 0
- for(int i=0; i < 5; i++) {
- who[i] = new NoTagWritableComparableHiveObject(null, hos);
- who[i].setHo(r[((i+1)*3) % 5]);
- who[i].write(dos);
- }
-
- ByteStream.Input bin = new ByteStream.Input(bos.getData(), 0, bos.getCount());
- DataInputStream din = new DataInputStream(bin);
-
- for(int i=0; i < 5; i++) {
- who[i].readFields(din);
- }
-
- assertEquals(who[0].compareTo(who[1]) > 0, true);
- assertEquals(who[1].compareTo(who[2]) > 0, false);
- assertEquals(who[2].compareTo(who[3]) > 0, true);
- assertEquals(who[3].compareTo(who[4]) > 0, true);
- System.out.println("testWritableComparable OK");
- } catch (Exception e) {
- e.printStackTrace();
- throw (e);
- }
- }
-}
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java?rev=697291&r1=697290&r2=697291&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java Fri Sep 19 16:56:30 2008
@@ -27,17 +27,19 @@
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.DB;
-import org.apache.hadoop.hive.metastore.api.Constants;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat;
import org.apache.hadoop.hive.ql.thrift.Complex;
-import org.apache.hadoop.hive.serde.simple_meta.MetadataTypedColumnsetSerDe;
-import org.apache.hadoop.hive.serde.thrift.ThriftSerDe;
+import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe;
+import org.apache.hadoop.hive.serde2.ThriftDeserializer;
+import org.apache.hadoop.hive.serde.Constants;
import org.apache.hadoop.mapred.SequenceFileInputFormat;
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hadoop.util.StringUtils;
+import com.facebook.thrift.protocol.TBinaryProtocol;
+
public class TestHive extends TestCase {
private Hive hm;
private HiveConf hiveConf;
@@ -98,7 +100,8 @@
}
List<FieldSchema> partCols = new ArrayList<FieldSchema>();
- partCols.add(new FieldSchema("ds", Constants.STRING_TYPE_NAME, "partition column, date but in string format as date type is not yet supported in QL"));
+ partCols.add(new FieldSchema("ds", Constants.STRING_TYPE_NAME,
+ "partition column, date but in string format as date type is not yet supported in QL"));
tbl.setPartCols(partCols);
tbl.setNumBuckets((short) 512);
@@ -106,13 +109,12 @@
tbl.setRetention(10);
// set output format parameters (these are not supported by QL but only for demo purposes)
- tbl.setIsCompressed(false);
- tbl.setFieldDelim("1");
- tbl.setLineDelim("\n");
- tbl.setMapKeyDelim("3"); // ^D
- tbl.setCollectionItemDelim("2");
+ tbl.setSerdeParam(Constants.FIELD_DELIM, "1");
+ tbl.setSerdeParam(Constants.LINE_DELIM, "\n");
+ tbl.setSerdeParam(Constants.MAPKEY_DELIM, "3");
+ tbl.setSerdeParam(Constants.COLLECTION_DELIM, "2");
- tbl.setSerializationFormat("1");
+ tbl.setSerdeParam(Constants.FIELD_DELIM, "1");
tbl.setSerializationLib(MetadataTypedColumnsetSerDe.class.getName());
// create table
@@ -135,7 +137,7 @@
// now that URI is set correctly, set the original table's uri and then compare the two tables
tbl.setDataLocation(ft.getDataLocation());
assertTrue("Tables doesn't match: " + tableName, ft.getTTable().equals(tbl.getTTable()));
- assertEquals("Serde is not set correctly", tbl.getSerDe().getShortName(), ft.getSerDe().getShortName());
+ assertEquals("Serde is not set correctly", tbl.getDeserializer().getShortName(), ft.getDeserializer().getShortName());
} catch (HiveException e) {
e.printStackTrace();
assertTrue("Unable to fetch table correctly: " + tableName, false);
@@ -171,9 +173,9 @@
Table tbl = new Table(tableName);
tbl.setInputFormatClass(SequenceFileInputFormat.class.getName());
tbl.setOutputFormatClass(SequenceFileOutputFormat.class.getName());
- tbl.setSerializationLib(ThriftSerDe.class.getName());
- tbl.setSerializationClass(Complex.class.getName());
- tbl.setSerializationFormat(com.facebook.thrift.protocol.TBinaryProtocol.class.getName());
+ tbl.setSerializationLib(ThriftDeserializer.class.getName());
+ tbl.setSerdeParam(Constants.SERIALIZATION_CLASS, Complex.class.getName());
+ tbl.setSerdeParam(Constants.SERIALIZATION_FORMAT, TBinaryProtocol.class.getName());
try {
hm.createTable(tbl);
} catch (HiveException e) {
@@ -193,7 +195,7 @@
// now that URI is set correctly, set the original table's uri and then compare the two tables
tbl.setDataLocation(ft.getDataLocation());
assertTrue("Tables doesn't match: " + tableName, ft.getTTable().equals(tbl.getTTable()));
- assertEquals("Serde is not set correctly", tbl.getSerDe().getShortName(), ft.getSerDe().getShortName());
+ assertEquals("Serde is not set correctly", tbl.getDeserializer().getShortName(), ft.getDeserializer().getShortName());
} catch (HiveException e) {
System.err.println(StringUtils.stringifyException(e));
assertTrue("Unable to fetch table correctly: " + tableName, false);
@@ -208,12 +210,12 @@
private static Table createTestTable(String dbName, String tableName) throws HiveException {
Table tbl = new Table(tableName);
- tbl.getTTable().setDatabase(dbName);
+ tbl.getTTable().setDbName(dbName);
tbl.setInputFormatClass(SequenceFileInputFormat.class.getName());
tbl.setOutputFormatClass(SequenceFileOutputFormat.class.getName());
- tbl.setSerializationLib(ThriftSerDe.class.getName());
- tbl.setSerializationClass(Complex.class.getName());
- tbl.setSerializationFormat(com.facebook.thrift.protocol.TBinaryProtocol.class.getName());
+ tbl.setSerializationLib(ThriftDeserializer.class.getName());
+ tbl.setSerdeParam(Constants.SERIALIZATION_CLASS, Complex.class.getName());
+ tbl.setSerdeParam(Constants.SERIALIZATION_FORMAT, TBinaryProtocol.class.getName());
return tbl;
}
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/groupby6.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/groupby6.q?rev=697291&r1=697290&r2=697291&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/groupby6.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/groupby6.q Fri Sep 19 16:56:30 2008
@@ -1,11 +1,12 @@
-CREATE TABLE dest1(c1 STRING);
-
-EXPLAIN
-FROM src
-INSERT OVERWRITE TABLE dest1 SELECT DISTINCT substr(src.value,4,1);
-
-FROM src
-INSERT OVERWRITE TABLE dest1 SELECT DISTINCT substr(src.value,4,1);
-
-SELECT dest1.* FROM dest1;
-
+CREATE TABLE dest1(c1 STRING);
+
+EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1 SELECT DISTINCT substr(src.value,4,1);
+
+FROM src
+INSERT OVERWRITE TABLE dest1 SELECT DISTINCT substr(src.value,4,1);
+
+SELECT dest1.* FROM dest1;
+
+
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input15.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input15.q?rev=697291&r1=697290&r2=697291&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input15.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input15.q Fri Sep 19 16:56:30 2008
@@ -1,6 +1,8 @@
EXPLAIN
CREATE TABLE TEST15(key INT, value STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t';
+CREATE TABLE TEST15(key INT, value STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t';
+
DESCRIBE TEST15;
DROP TABLE TEST15;
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input_testsequencefile.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input_testsequencefile.q?rev=697291&r1=697290&r2=697291&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input_testsequencefile.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input_testsequencefile.q Fri Sep 19 16:56:30 2008
@@ -1,4 +1,7 @@
-CREATE TABLE dest4_sequencefile(key INT, value STRING) STORED AS COMPRESSED;
+set mapred.output.compress=true;
+set mapred.output.compression.type=BLOCK;
+
+CREATE TABLE dest4_sequencefile(key INT, value STRING) STORED AS SEQUENCEFILE;
EXPLAIN
FROM src
@@ -7,4 +10,5 @@
FROM src
INSERT OVERWRITE TABLE dest4_sequencefile SELECT src.key, src.value;
+set mapred.output.compress=false;
SELECT dest4_sequencefile.* FROM dest4_sequencefile;
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/negative/wrong_distinct1.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/negative/wrong_distinct1.q?rev=697291&r1=697290&r2=697291&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/negative/wrong_distinct1.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/negative/wrong_distinct1.q Fri Sep 19 16:56:30 2008
@@ -1,2 +1,2 @@
-FROM src
-INSERT OVERWRITE TABLE dest1 SELECT DISTINCT src.key, substr(src.value,4,1) GROUP BY src.key
+FROM src
+INSERT OVERWRITE TABLE dest1 SELECT DISTINCT src.key, substr(src.value,4,1) GROUP BY src.key
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/negative/wrong_distinct2.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/negative/wrong_distinct2.q?rev=697291&r1=697290&r2=697291&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/negative/wrong_distinct2.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/negative/wrong_distinct2.q Fri Sep 19 16:56:30 2008
@@ -1,2 +1,2 @@
-FROM src
-INSERT OVERWRITE TABLE dest1 SELECT src.key, DISTINCT substr(src.value,4,1) GROUP BY src.key
+FROM src
+INSERT OVERWRITE TABLE dest1 SELECT src.key, DISTINCT substr(src.value,4,1) GROUP BY src.key
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/positive/groupby6.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/positive/groupby6.q?rev=697291&r1=697290&r2=697291&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/positive/groupby6.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/positive/groupby6.q Fri Sep 19 16:56:30 2008
@@ -1,2 +1,2 @@
-FROM src
-INSERT OVERWRITE TABLE dest1 SELECT DISTINCT substr(src.value,4,1)
+FROM src
+INSERT OVERWRITE TABLE dest1 SELECT DISTINCT substr(src.value,4,1)
Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/cast1.q.out
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/cast1.q.out?rev=697291&r1=697290&r2=697291&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/cast1.q.out (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/cast1.q.out Fri Sep 19 16:56:30 2008
@@ -1,50 +1,52 @@
-ABSTRACT SYNTAX TREE:
- (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (+ 3 2)) (TOK_SELEXPR (+ 3.0 2)) (TOK_SELEXPR (+ 3 2.0)) (TOK_SELEXPR (+ 3.0 2.0)) (TOK_SELEXPR (+ 3 (TOK_FUNCTION TOK_INT 2.0))) (TOK_SELEXPR (TOK_FUNCTION TOK_BOOLEAN 1)) (TOK_SELEXPR (TOK_FUNCTION TOK_INT TRUE))) (TOK_WHERE (= (TOK_COLREF src key) 86))))
-
-STAGE DEPENDENCIES:
- Stage-1 is a root stage
- Stage-0 depends on stages: Stage-1
-
-STAGE PLANS:
- Stage: Stage-1
- Map Reduce
- Alias -> Map Operator Tree:
- src
- Filter Operator
- predicate:
- expr: (key = 86)
- type: class java.lang.Boolean
- Select Operator
- expressions:
- expr: (3 + 2)
- type: class java.lang.Integer
- expr: (3.0 + UDFToDouble(2))
- type: class java.lang.Double
- expr: (UDFToDouble(3) + 2.0)
- type: class java.lang.Double
- expr: (3.0 + 2.0)
- type: class java.lang.Double
- expr: (3 + UDFToInteger(2.0))
- type: class java.lang.Integer
- expr: UDFToBoolean(1)
- type: class java.lang.Boolean
- expr: UDFToInteger(true)
- type: class java.lang.Integer
- File Output Operator
- table:
- name: dest1
- serde: org.apache.hadoop.hive.serde.simple_meta.MetadataTypedColumnsetSerDe
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-
- Stage: Stage-0
- Move Operator
- tables:
- table:
- name: dest1
- serde: org.apache.hadoop.hive.serde.simple_meta.MetadataTypedColumnsetSerDe
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- replace:
-
-5 5.0 5.0 5.0 5 false 1
+ABSTRACT SYNTAX TREE:
+ (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (+ 3 2)) (TOK_SELEXPR (+ 3.0 2)) (TOK_SELEXPR (+ 3 2.0)) (TOK_SELEXPR (+ 3.0 2.0)) (TOK_SELEXPR (+ 3 (TOK_FUNCTION TOK_INT 2.0))) (TOK_SELEXPR (TOK_FUNCTION TOK_BOOLEAN 1)) (TOK_SELEXPR (TOK_FUNCTION TOK_INT TRUE))) (TOK_WHERE (= (TOK_COLREF src key) 86))))
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Alias -> Map Operator Tree:
+ src
+ Filter Operator
+ predicate:
+ expr: (key = 86)
+ type: Boolean
+ Select Operator
+ expressions:
+ expr: (3 + 2)
+ type: int
+ expr: (3.0 + UDFToDouble(2))
+ type: double
+ expr: (UDFToDouble(3) + 2.0)
+ type: double
+ expr: (3.0 + 2.0)
+ type: double
+ expr: (3 + UDFToInteger(2.0))
+ type: int
+ expr: UDFToBoolean(1)
+ type: Boolean
+ expr: UDFToInteger(true)
+ type: int
+ File Output Operator
+ table:
+ name: dest1
+ serde: simple_meta
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+
+ Stage: Stage-0
+ Move Operator
+ tables:
+ table:
+ name: dest1
+ serde: simple_meta
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ replace:
+
+
+5 5.0 5.0 5.0 5 false 1
+