You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@pig.apache.org by ol...@apache.org on 2008/10/31 21:24:35 UTC
svn commit: r709555 - in /hadoop/pig/branches/types: ./
src/org/apache/pig/backend/hadoop/executionengine/physicalLayer/expressionOperators/
src/org/apache/pig/impl/logicalLayer/
src/org/apache/pig/impl/logicalLayer/parser/ src/org/apache/pig/impl/logi...
Author: olga
Date: Fri Oct 31 13:24:34 2008
New Revision: 709555
URL: http://svn.apache.org/viewvc?rev=709555&view=rev
Log:
PIG-505: fix for using map elements in expressions
Modified:
hadoop/pig/branches/types/CHANGES.txt
hadoop/pig/branches/types/src/org/apache/pig/backend/hadoop/executionengine/physicalLayer/expressionOperators/POCast.java
hadoop/pig/branches/types/src/org/apache/pig/impl/logicalLayer/LOForEach.java
hadoop/pig/branches/types/src/org/apache/pig/impl/logicalLayer/LOProject.java
hadoop/pig/branches/types/src/org/apache/pig/impl/logicalLayer/parser/QueryParser.jjt
hadoop/pig/branches/types/src/org/apache/pig/impl/logicalLayer/schema/Schema.java
hadoop/pig/branches/types/src/org/apache/pig/impl/logicalLayer/validators/TypeCheckingVisitor.java
hadoop/pig/branches/types/test/org/apache/pig/test/TestEvalPipeline.java
hadoop/pig/branches/types/test/org/apache/pig/test/TestLogicalPlanBuilder.java
hadoop/pig/branches/types/test/org/apache/pig/test/TestTypeCheckingValidator.java
hadoop/pig/branches/types/test/org/apache/pig/test/utils/dotGraph/LogicalPlanLoader.java
Modified: hadoop/pig/branches/types/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/pig/branches/types/CHANGES.txt?rev=709555&r1=709554&r2=709555&view=diff
==============================================================================
--- hadoop/pig/branches/types/CHANGES.txt (original)
+++ hadoop/pig/branches/types/CHANGES.txt Fri Oct 31 13:24:34 2008
@@ -306,3 +306,4 @@
PIG-497: problems with UTF8 handling in BinStorage (pradeepk via olgan)
+ PIG-505: working with map elements (sms via olgan)
Modified: hadoop/pig/branches/types/src/org/apache/pig/backend/hadoop/executionengine/physicalLayer/expressionOperators/POCast.java
URL: http://svn.apache.org/viewvc/hadoop/pig/branches/types/src/org/apache/pig/backend/hadoop/executionengine/physicalLayer/expressionOperators/POCast.java?rev=709555&r1=709554&r2=709555&view=diff
==============================================================================
--- hadoop/pig/branches/types/src/org/apache/pig/backend/hadoop/executionengine/physicalLayer/expressionOperators/POCast.java (original)
+++ hadoop/pig/branches/types/src/org/apache/pig/backend/hadoop/executionengine/physicalLayer/expressionOperators/POCast.java Fri Oct 31 13:24:34 2008
@@ -138,7 +138,13 @@
}
try {
- res.result = load.bytesToInteger(dba.get());
+ if(null != load) {
+ res.result = load.bytesToInteger(dba.get());
+ } else {
+ String msg = "Received a bytearray from the UDF. Cannot determine how to convert the bytearray to int." + " castToType: " + castToType + " name: " + DataType.findTypeName(castToType);
+ log.error(msg);
+ throw new ExecException(msg);
+ }
} catch (IOException e) {
log.error("Error while casting from ByteArray to Integer");
}
@@ -265,7 +271,13 @@
}
try {
- res.result = load.bytesToLong(dba.get());
+ if(null != load) {
+ res.result = load.bytesToLong(dba.get());
+ } else {
+ String msg = "Received a bytearray from the UDF. Cannot determine how to convert the bytearray to long.";
+ log.error(msg);
+ throw new ExecException(msg);
+ }
} catch (IOException e) {
log.error("Error while casting from ByteArray to Long");
}
@@ -387,7 +399,13 @@
}
try {
- res.result = load.bytesToDouble(dba.get());
+ if(null != load) {
+ res.result = load.bytesToDouble(dba.get());
+ } else {
+ String msg = "Received a bytearray from the UDF. Cannot determine how to convert the bytearray to double.";
+ log.error(msg);
+ throw new ExecException(msg);
+ }
} catch (IOException e) {
log.error("Error while casting from ByteArray to Double");
}
@@ -508,7 +526,13 @@
}
try {
- res.result = load.bytesToFloat(dba.get());
+ if(null != load) {
+ res.result = load.bytesToFloat(dba.get());
+ } else {
+ String msg = "Received a bytearray from the UDF. Cannot determine how to convert the bytearray to float.";
+ log.error(msg);
+ throw new ExecException(msg);
+ }
} catch (IOException e) {
log.error("Error while casting from ByteArray to Float");
}
@@ -631,7 +655,13 @@
}
try {
- res.result = load.bytesToCharArray(dba.get());
+ if(null != load) {
+ res.result = load.bytesToCharArray(dba.get());
+ } else {
+ String msg = "Received a bytearray from the UDF. Cannot determine how to convert the bytearray to string.";
+ log.error(msg);
+ throw new ExecException(msg);
+ }
} catch (IOException e) {
log.error("Error while casting from ByteArray to CharArray");
}
@@ -742,7 +772,13 @@
}
try {
- res.result = load.bytesToTuple(dba.get());
+ if(null != load) {
+ res.result = load.bytesToTuple(dba.get());
+ } else {
+ String msg = "Received a bytearray from the UDF. Cannot determine how to convert the bytearray to tuple.";
+ log.error(msg);
+ throw new ExecException(msg);
+ }
} catch (IOException e) {
log.error("Error while casting from ByteArray to Tuple");
}
@@ -820,7 +856,13 @@
}
try {
- res.result = load.bytesToBag(dba.get());
+ if(null != load) {
+ res.result = load.bytesToBag(dba.get());
+ } else {
+ String msg = "Received a bytearray from the UDF. Cannot determine how to convert the bytearray to bag.";
+ log.error(msg);
+ throw new ExecException(msg);
+ }
} catch (IOException e) {
log.error("Error while casting from ByteArray to DataBag");
}
@@ -898,7 +940,13 @@
}
try {
- res.result = load.bytesToMap(dba.get());
+ if(null != load) {
+ res.result = load.bytesToMap(dba.get());
+ } else {
+ String msg = "Received a bytearray from the UDF. Cannot determine how to convert the bytearray to map.";
+ log.error(msg);
+ throw new ExecException(msg);
+ }
} catch (IOException e) {
log.error("Error while casting from ByteArray to Map");
}
Modified: hadoop/pig/branches/types/src/org/apache/pig/impl/logicalLayer/LOForEach.java
URL: http://svn.apache.org/viewvc/hadoop/pig/branches/types/src/org/apache/pig/impl/logicalLayer/LOForEach.java?rev=709555&r1=709554&r2=709555&view=diff
==============================================================================
--- hadoop/pig/branches/types/src/org/apache/pig/impl/logicalLayer/LOForEach.java (original)
+++ hadoop/pig/branches/types/src/org/apache/pig/impl/logicalLayer/LOForEach.java Fri Oct 31 13:24:34 2008
@@ -273,7 +273,7 @@
newFs.setParent(null, op);
} else {
for(Schema.FieldSchema ufs: userDefinedSchema.getFields()) {
- QueryParser.SchemaUtils.setFieldSchemaDefaultType(ufs, DataType.BYTEARRAY);
+ Schema.FieldSchema.setFieldSchemaDefaultType(ufs, DataType.BYTEARRAY);
newFs = new Schema.FieldSchema(ufs);
fss.add(newFs);
newFs.setParent(null, op);
Modified: hadoop/pig/branches/types/src/org/apache/pig/impl/logicalLayer/LOProject.java
URL: http://svn.apache.org/viewvc/hadoop/pig/branches/types/src/org/apache/pig/impl/logicalLayer/LOProject.java?rev=709555&r1=709554&r2=709555&view=diff
==============================================================================
--- hadoop/pig/branches/types/src/org/apache/pig/impl/logicalLayer/LOProject.java (original)
+++ hadoop/pig/branches/types/src/org/apache/pig/impl/logicalLayer/LOProject.java Fri Oct 31 13:24:34 2008
@@ -205,7 +205,7 @@
//TODO
//the type of the operator will be unknown. when type checking is in place
//add the type of the operator as a parameter to the fieldschema creation
- mFieldSchema = new Schema.FieldSchema(null, expressionOperator.getSchema(), DataType.TUPLE);
+ mFieldSchema = new Schema.FieldSchema(expressionOperator.getAlias(), expressionOperator.getSchema(), DataType.TUPLE);
mFieldSchema.setParent(null, expressionOperator);
//mFieldSchema = new Schema.FieldSchema(expressionOperator.getAlias(), expressionOperator.getSchema());
}
Modified: hadoop/pig/branches/types/src/org/apache/pig/impl/logicalLayer/parser/QueryParser.jjt
URL: http://svn.apache.org/viewvc/hadoop/pig/branches/types/src/org/apache/pig/impl/logicalLayer/parser/QueryParser.jjt?rev=709555&r1=709554&r2=709555&view=diff
==============================================================================
--- hadoop/pig/branches/types/src/org/apache/pig/impl/logicalLayer/parser/QueryParser.jjt (original)
+++ hadoop/pig/branches/types/src/org/apache/pig/impl/logicalLayer/parser/QueryParser.jjt Fri Oct 31 13:24:34 2008
@@ -524,25 +524,6 @@
}
log.trace("Exiting attachPlan");
}
-
- public static class SchemaUtils {
- public static void setSchemaDefaultType(Schema s, byte t) {
- if(null == s) return;
- for(Schema.FieldSchema fs: s.getFields()) {
- setFieldSchemaDefaultType(fs, t);
- }
- }
-
- public static void setFieldSchemaDefaultType(Schema.FieldSchema fs, byte t) {
- if(null == fs) return;
- if(DataType.NULL == fs.type) {
- fs.type = t;
- }
- if(DataType.isSchemaType(fs.type)) {
- setSchemaDefaultType(fs.schema, t);
- }
- }
- }
}
@@ -852,7 +833,7 @@
}
{
(
- ( op = NestedExpr(lp) [ <AS> "(" schema = TupleSchema() ")" {SchemaUtils.setSchemaDefaultType(schema, DataType.BYTEARRAY); op.setSchema(schema);} ] )
+ ( op = NestedExpr(lp) [ <AS> "(" schema = TupleSchema() ")" {Schema.setSchemaDefaultType(schema, DataType.BYTEARRAY); op.setSchema(schema);} ] )
| op = BaseExpr(lp)
)
{log.trace("Exiting Expr"); return op;}
@@ -918,7 +899,7 @@
(
LOOKAHEAD(2) "(" schema = TupleSchema() ")"
{
- SchemaUtils.setSchemaDefaultType(schema, DataType.BYTEARRAY);
+ Schema.setSchemaDefaultType(schema, DataType.BYTEARRAY);
op.setSchema(schema);
log.debug("Load as schema" + schema);
}
@@ -953,7 +934,7 @@
(
LOOKAHEAD(2) "(" schema = TupleSchema() ")"
{
- SchemaUtils.setSchemaDefaultType(schema, DataType.BYTEARRAY);
+ Schema.setSchemaDefaultType(schema, DataType.BYTEARRAY);
op.setSchema(schema);
log.debug("Stream as schema()"+ schema);
}
Modified: hadoop/pig/branches/types/src/org/apache/pig/impl/logicalLayer/schema/Schema.java
URL: http://svn.apache.org/viewvc/hadoop/pig/branches/types/src/org/apache/pig/impl/logicalLayer/schema/Schema.java?rev=709555&r1=709554&r2=709555&view=diff
==============================================================================
--- hadoop/pig/branches/types/src/org/apache/pig/impl/logicalLayer/schema/Schema.java (original)
+++ hadoop/pig/branches/types/src/org/apache/pig/impl/logicalLayer/schema/Schema.java Fri Oct 31 13:24:34 2008
@@ -518,6 +518,7 @@
otherTakesAliasPrecedence, allowMergeableTypes);
} else {
mergedSubSchema = otherFs.schema;
+ setSchemaDefaultType(mergedSubSchema, DataType.BYTEARRAY);
}
// create the merged field
try {
@@ -528,6 +529,22 @@
}
return mergedFs;
}
+
+ /**
+ * Recursively set NULL type to the specifid type
+ * @param fs the field schema whose NULL type has to be set
+ * @param t the specified type
+ */
+ public static void setFieldSchemaDefaultType(Schema.FieldSchema fs, byte t) {
+ if(null == fs) return;
+ if(DataType.NULL == fs.type) {
+ fs.type = t;
+ }
+ if(DataType.isSchemaType(fs.type)) {
+ setSchemaDefaultType(fs.schema, t);
+ }
+ }
+
private boolean isNullOrUnknownType(FieldSchema fs) {
return (fs.type == DataType.NULL || fs.type == DataType.UNKNOWN);
@@ -1421,6 +1438,18 @@
return new Schema(outputList) ;
}
+ /**
+ * Recursively set NULL type to the specifid type in a schema
+ * @param schema the schema whose NULL type has to be set
+ * @param t the specified type
+ */
+ public static void setSchemaDefaultType(Schema s, byte t) {
+ if(null == s) return;
+ for(Schema.FieldSchema fs: s.getFields()) {
+ FieldSchema.setFieldSchemaDefaultType(fs, t);
+ }
+ }
+
}
Modified: hadoop/pig/branches/types/src/org/apache/pig/impl/logicalLayer/validators/TypeCheckingVisitor.java
URL: http://svn.apache.org/viewvc/hadoop/pig/branches/types/src/org/apache/pig/impl/logicalLayer/validators/TypeCheckingVisitor.java?rev=709555&r1=709554&r2=709555&view=diff
==============================================================================
--- hadoop/pig/branches/types/src/org/apache/pig/impl/logicalLayer/validators/TypeCheckingVisitor.java (original)
+++ hadoop/pig/branches/types/src/org/apache/pig/impl/logicalLayer/validators/TypeCheckingVisitor.java Fri Oct 31 13:24:34 2008
@@ -1500,7 +1500,7 @@
binCond.setType(lhsType);
}
else {
- String msg = "Unsupported input type for BinCond: lhs = " + lhsType + "; rhs = " + rhsType;
+ String msg = "Unsupported input type for BinCond: lhs = " + DataType.findTypeName(lhsType) + "; rhs = " + DataType.findTypeName(rhsType);
msgCollector.collect(msg, MessageType.Error) ;
throw new VisitorException(msg) ;
}
@@ -1616,13 +1616,6 @@
if(inputType == DataType.BYTEARRAY) {
try {
LoadFunc loadFunc = getLoadFunc(cast.getExpression());
- if((null == loadFunc) && (expectedType != DataType.BYTEARRAY)) {
- String msg = "Internal error. Could not resolve load function to use for casting from " +
- DataType.findTypeName(inputType) + " to " +
- DataType.findTypeName(expectedType) + ". Found null.";
- msgCollector.collect(msg, MessageType.Error);
- throw new VisitorException(msg);
- }
cast.setLoadFunc(loadFunc);
} catch (FrontendException fee) {
String msg = "Cannot resolve load function to use for casting from " +
@@ -1900,19 +1893,6 @@
checkInnerPlan(comparisonPlan) ;
-
- /*
- try {
- System.err.println("Filter inner plan typechecked");
- LOPrinter lv = new LOPrinter(System.err, comparisonPlan);
- lv.visit();
- System.err.println();
- } catch (Exception e) {
- System.err.println(e.getMessage());
- e.printStackTrace();
- }
- */
-
byte innerCondType = comparisonPlan.getLeaves().get(0).getType() ;
if (innerCondType != DataType.BOOLEAN) {
String msg = "Filter's condition must evaluate to boolean. Found: " + DataType.findTypeName(innerCondType);
@@ -2605,7 +2585,7 @@
MultiMap<String, LoadFunc> loadFuncMap = new MultiMap<String, LoadFunc>();
if(op instanceof ExpressionOperator) {
if(op instanceof LOUserFunc) {
- throw new FrontendException("Found a user defined function. Cannot determine the load function to use");
+ return null;
}
Schema.FieldSchema fs = ((ExpressionOperator)op).getFieldSchema();
Modified: hadoop/pig/branches/types/test/org/apache/pig/test/TestEvalPipeline.java
URL: http://svn.apache.org/viewvc/hadoop/pig/branches/types/test/org/apache/pig/test/TestEvalPipeline.java?rev=709555&r1=709554&r2=709555&view=diff
==============================================================================
--- hadoop/pig/branches/types/test/org/apache/pig/test/TestEvalPipeline.java (original)
+++ hadoop/pig/branches/types/test/org/apache/pig/test/TestEvalPipeline.java Fri Oct 31 13:24:34 2008
@@ -262,6 +262,44 @@
}
}
+ static public class MapUDF extends EvalFunc<Map<Object, Object>> {
+ @Override
+ public Map<Object, Object> exec(Tuple input) throws IOException {
+
+ TupleFactory tupleFactory = TupleFactory.getInstance();
+ ArrayList<Object> objList = new ArrayList<Object>();
+ objList.add(new Integer(1));
+ objList.add(new Double(1.0));
+ objList.add(new Float(1.0));
+ objList.add(new String("World!"));
+ Tuple tuple = tupleFactory.newTuple(objList);
+
+ BagFactory bagFactory = BagFactory.getInstance();
+ DataBag bag = bagFactory.newDefaultBag();
+ bag.add(tuple);
+
+ Map<Object, Object> mapInMap = new HashMap<Object, Object>();
+ mapInMap.put("int", new Integer(10));
+ mapInMap.put("float", new Float(10.0));
+
+ Map<Object, Object> myMap = new HashMap<Object, Object>();
+ myMap.put("string", new String("Hello"));
+ myMap.put("int", new Integer(1));
+ myMap.put("long", new Long(1));
+ myMap.put("float", new Float(1.0));
+ myMap.put("double", new Double(1.0));
+ myMap.put("dba", new DataByteArray(new String("bytes").getBytes()));
+ myMap.put("map", mapInMap);
+ myMap.put("tuple", tuple);
+ myMap.put("bag", bag);
+ return myMap;
+ }
+
+ public Schema outputSchema(Schema input) {
+ return new Schema(new Schema.FieldSchema(null, DataType.MAP));
+ }
+ }
+
@Test
public void testBagFunctionWithFlattening() throws Exception{
@@ -707,4 +745,76 @@
assertEquals("wendyξ", t.get(0));
}
+
+ public void testMapUDF() throws Exception{
+ int LOOP_COUNT = 2;
+ File tmpFile = File.createTempFile("test", "txt");
+ PrintStream ps = new PrintStream(new FileOutputStream(tmpFile));
+ Random r = new Random();
+ for(int i = 0; i < LOOP_COUNT; i++) {
+ for(int j=0;j<LOOP_COUNT;j+=2){
+ ps.println(i+"\t"+j);
+ ps.println(i+"\t"+j);
+ }
+ }
+ ps.close();
+
+ String tmpOutputFile = FileLocalizer.getTemporaryPath(null,
+ pigServer.getPigContext()).toString();
+ pigServer.registerQuery("A = LOAD '" + Util.generateURI(tmpFile.toString()) + "';");
+ pigServer.registerQuery("B = foreach A generate " + MapUDF.class.getName() + "($0) as mymap;"); //the argument does not matter
+ String query = "C = foreach B {"
+ + "generate (double)mymap#'double' as d, (long)mymap#'long' + (float)mymap#'float' as float_sum, CONCAT((chararray) mymap#'string', ' World!'), mymap#'int' * 10, (bag{tuple()}) mymap#'bag' as mybag, (tuple()) mymap#'tuple' as mytuple, (map[])mymap#'map' as mapInMap, mymap#'dba' as dba;"
+ + "};";
+
+ pigServer.registerQuery(query);
+ Iterator<Tuple> iter = pigServer.openIterator("C");
+ if(!iter.hasNext()) fail("No output found");
+ int numIdentity = 0;
+ while(iter.hasNext()){
+ Tuple t = iter.next();
+ assertEquals(1.0, (Double)t.get(0), 0.01);
+ assertEquals(2.0, (Float)t.get(1), 0.01);
+ assertTrue(((String)t.get(2)).equals("Hello World!"));
+ assertEquals(new Integer(10), (Integer)t.get(3));
+ assertEquals(1, ((DataBag)t.get(4)).size());
+ assertEquals(4, ((Tuple)t.get(5)).size());
+ assertEquals(2, ((Map<Object, Object>)t.get(6)).size());
+ assertEquals(DataByteArray.class, t.get(7).getClass());
+ assertEquals(8, t.size());
+ ++numIdentity;
+ }
+ assertEquals(LOOP_COUNT * LOOP_COUNT, numIdentity);
+ }
+
+ public void testMapUDFFail() throws Exception{
+ int LOOP_COUNT = 2;
+ File tmpFile = File.createTempFile("test", "txt");
+ PrintStream ps = new PrintStream(new FileOutputStream(tmpFile));
+ Random r = new Random();
+ for(int i = 0; i < LOOP_COUNT; i++) {
+ for(int j=0;j<LOOP_COUNT;j+=2){
+ ps.println(i+"\t"+j);
+ ps.println(i+"\t"+j);
+ }
+ }
+ ps.close();
+
+ String tmpOutputFile = FileLocalizer.getTemporaryPath(null,
+ pigServer.getPigContext()).toString();
+ pigServer.registerQuery("A = LOAD '" + Util.generateURI(tmpFile.toString()) + "';");
+ pigServer.registerQuery("B = foreach A generate " + MapUDF.class.getName() + "($0) as mymap;"); //the argument does not matter
+ String query = "C = foreach B {"
+ + "generate mymap#'dba' * 10;"
+ + "};";
+
+ pigServer.registerQuery(query);
+ try {
+ Iterator<Tuple> iter = pigServer.openIterator("C");
+ fail("Error expected.");
+ } catch (Exception e) {
+ e.getMessage().contains("Cannot determine");
+ }
+ }
+
}
Modified: hadoop/pig/branches/types/test/org/apache/pig/test/TestLogicalPlanBuilder.java
URL: http://svn.apache.org/viewvc/hadoop/pig/branches/types/test/org/apache/pig/test/TestLogicalPlanBuilder.java?rev=709555&r1=709554&r2=709555&view=diff
==============================================================================
--- hadoop/pig/branches/types/test/org/apache/pig/test/TestLogicalPlanBuilder.java (original)
+++ hadoop/pig/branches/types/test/org/apache/pig/test/TestLogicalPlanBuilder.java Fri Oct 31 13:24:34 2008
@@ -1706,7 +1706,7 @@
ByteArrayInputStream stream = new ByteArrayInputStream(schemaString.getBytes()) ;
QueryParser queryParser = new QueryParser(stream) ;
Schema schema = queryParser.TupleSchema() ;
- QueryParser.SchemaUtils.setSchemaDefaultType(schema, defaultType);
+ Schema.setSchemaDefaultType(schema, defaultType);
return schema;
}
Modified: hadoop/pig/branches/types/test/org/apache/pig/test/TestTypeCheckingValidator.java
URL: http://svn.apache.org/viewvc/hadoop/pig/branches/types/test/org/apache/pig/test/TestTypeCheckingValidator.java?rev=709555&r1=709554&r2=709555&view=diff
==============================================================================
--- hadoop/pig/branches/types/test/org/apache/pig/test/TestTypeCheckingValidator.java (original)
+++ hadoop/pig/branches/types/test/org/apache/pig/test/TestTypeCheckingValidator.java Fri Oct 31 13:24:34 2008
@@ -156,7 +156,7 @@
}
@Test
- public void testExpressionTypeChecking2Fail() throws Throwable {
+ public void testExpressionTypeChecking2() throws Throwable {
LogicalPlan plan = new LogicalPlan() ;
LOConst constant1 = new LOConst(plan, genNewOperatorKey(), 10) ;
constant1.setType(DataType.INTEGER) ;
@@ -194,18 +194,25 @@
CompilationMessageCollector collector = new CompilationMessageCollector() ;
TypeCheckingValidator typeValidator = new TypeCheckingValidator() ;
- try {
- typeValidator.validate(plan, collector) ;
- } catch (PlanValidationException pve) {
- // good
- }
+ typeValidator.validate(plan, collector) ;
printMessageCollector(collector) ;
printTypeGraph(plan) ;
- if (!collector.hasError()) {
- throw new Exception("Error expected during type checking") ;
+ if (collector.hasError()) {
+ throw new Exception("Error not expected during type checking") ;
}
+
+
+ // Induction check
+ assertEquals(DataType.INTEGER, sub1.getType()) ;
+ assertEquals(DataType.BOOLEAN, gt1.getType()) ;
+ assertEquals(DataType.BOOLEAN, and1.getType()) ;
+ assertEquals(DataType.BOOLEAN, not1.getType()) ;
+
+ // Cast insertion check
+ assertEquals(DataType.INTEGER, sub1.getRhsOperand().getType()) ;
+ assertEquals(DataType.LONG, gt1.getLhsOperand().getType()) ;
}
@@ -787,7 +794,7 @@
// Positive case with cast insertion
@Test
- public void testRegexTypeChecking2Fail() throws Throwable {
+ public void testRegexTypeChecking2() throws Throwable {
LogicalPlan plan = new LogicalPlan() ;
LOConst constant1 = new LOConst(plan, genNewOperatorKey(), new DataByteArray()) ;
constant1.setType(DataType.BYTEARRAY) ;
@@ -802,21 +809,25 @@
CompilationMessageCollector collector = new CompilationMessageCollector() ;
TypeCheckingValidator typeValidator = new TypeCheckingValidator() ;
- try {
- typeValidator.validate(plan, collector) ;
- } catch (PlanValidationException pve) {
- // good
- }
+ typeValidator.validate(plan, collector) ;
printMessageCollector(collector) ;
printTypeGraph(plan) ;
// After type checking
- if (!collector.hasError()) {
- throw new Exception("Error expected during type checking") ;
+ if (collector.hasError()) {
+ throw new Exception("Error not expected during type checking") ;
}
+ // check type
+ System.out.println(DataType.findTypeName(regex.getType())) ;
+ assertEquals(DataType.BOOLEAN, regex.getType()) ;
+
+ // check wiring
+ LOCast cast = (LOCast) regex.getOperand() ;
+ assertEquals(cast.getType(), DataType.CHARARRAY);
+ assertEquals(cast.getExpression(), constant1) ;
}
// Negative case
@@ -3573,6 +3584,8 @@
}
+ // The following test is commented out with PIG-505
+ /*
@Test
public void testCogroupUDFLineageFail() throws Throwable {
planTester.buildPlan("a = load 'a' using BinStorage() as (field1, field2: float, field3: chararray );") ;
@@ -3602,6 +3615,7 @@
}
}
+ */
@Test
public void testCogroupLineage2NoSchema() throws Throwable {
Modified: hadoop/pig/branches/types/test/org/apache/pig/test/utils/dotGraph/LogicalPlanLoader.java
URL: http://svn.apache.org/viewvc/hadoop/pig/branches/types/test/org/apache/pig/test/utils/dotGraph/LogicalPlanLoader.java?rev=709555&r1=709554&r2=709555&view=diff
==============================================================================
--- hadoop/pig/branches/types/test/org/apache/pig/test/utils/dotGraph/LogicalPlanLoader.java (original)
+++ hadoop/pig/branches/types/test/org/apache/pig/test/utils/dotGraph/LogicalPlanLoader.java Fri Oct 31 13:24:34 2008
@@ -180,7 +180,7 @@
Schema schema = null ;
try {
schema = queryParser.TupleSchema() ;
- QueryParser.SchemaUtils.setSchemaDefaultType(schema, DataType.BYTEARRAY);
+ Schema.setSchemaDefaultType(schema, DataType.BYTEARRAY);
// set all the [NoAlias] to null
for(int i=0; i < dummyAliasCounter; i++) {