You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@pig.apache.org by zl...@apache.org on 2017/02/24 03:34:40 UTC

svn commit: r1784224 [10/17] - in /pig/branches/spark: ./ bin/ conf/ contrib/piggybank/java/ contrib/piggybank/java/src/main/java/org/apache/pig/piggybank/evaluation/ contrib/piggybank/java/src/main/java/org/apache/pig/piggybank/evaluation/util/apachel...

Modified: pig/branches/spark/test/e2e/pig/tests/nightly.conf
URL: http://svn.apache.org/viewvc/pig/branches/spark/test/e2e/pig/tests/nightly.conf?rev=1784224&r1=1784223&r2=1784224&view=diff
==============================================================================
--- pig/branches/spark/test/e2e/pig/tests/nightly.conf (original)
+++ pig/branches/spark/test/e2e/pig/tests/nightly.conf Fri Feb 24 03:34:37 2017
@@ -567,6 +567,7 @@ store c into ':OUTPATH:';\,
 			{
 			'num' => 9,
                         'floatpostprocess' => 1,
+                        'ignore23' => 'I cannot get it right due to float precision, temporarily disable',
 			'pig' => q\a = load ':INPATH:/singlefile/studenttab10k' as (name, age, gpa);
 b = group a by name;
 c = foreach b generate group, AVG(a.gpa);
@@ -1517,8 +1518,8 @@ store i into ':OUTPATH:';\,
             {
             # Union + operators
             'num' => 12,
-            'pig' => q\a = load ':INPATH:/singlefile/studentnulltab10k' as (name:chararray, age:int, gpa:double);
-b = load ':INPATH:/singlefile/studentcolon10k' using PigStorage(':') as (name:chararray, age:int, gpa:double);
+            'pig' => q\a = load ':INPATH:/singlefile/studentnulltab10k' as (name, age:int, gpa:double);
+b = load ':INPATH:/singlefile/studentcolon10k' using PigStorage(':') as (name, age:int, gpa:double);
 c = union a, b;
 -- Exercise all expression operators --
 d = foreach c generate (name is not NULL? UPPER(name) : 'FNU LNU') as name, (age < 30 ? -1 : age) as age, (gpa is NULL ? 0.0 : ((gpa > 0.5 AND gpa < 1.0) ? 1 : gpa)) as gpa;
@@ -2185,7 +2186,7 @@ store d into ':OUTPATH:';\,
 b = order a by $0, $1, $2;
 c = limit b 100;
 store c into ':OUTPATH:';\,
-            	'sortArgs' => ['-t', '	', '-k', '1,2'],
+            	'sortArgs' => ['-t', '	', '-k', '1,3'],
 			},
 			{
 				# Make sure that limit higher than number of rows doesn't mess stuff up
@@ -2205,7 +2206,6 @@ store c into ':OUTPATH:';\,
 			},
 			{
 				'num' => 5,
-				'execonly' => 'mapred,local', #tez may pick either input as part of the optimization so cannot be tested easily
 				'pig' =>q\a = load ':INPATH:/singlefile/studenttab10k';
 b = load ':INPATH:/singlefile/votertab10k';
 a1 = foreach a generate $0, $1;
@@ -2285,21 +2285,7 @@ store d into ':OUTPATH:';\,
 				'verify_pig_script' => q\a = load ':INPATH:/singlefile/studenttab10k' as (name:chararray, age:int, gpa:double);
 b = limit a 2000;
 store b into ':OUTPATH:';\,
-                        },
-			{
-				'num' => 12,
-				'execonly' => 'tez', #Limit_5 was not able to test on tez. 
-				'pig' =>q\a = load ':INPATH:/singlefile/studenttab10k';
-b = load ':INPATH:/singlefile/studenttab10k';
-a1 = foreach a generate $0, $1;
-b1 = foreach b generate $0, $1;
-c = union a1, b1;
-d = limit c 100;
-store d into ':OUTPATH:';\,
-				'verify_pig_script' => q\a = load ':INPATH:/singlefile/studenttab10k' as (name:chararray, age:int);
-b = limit a 100;
-store b into ':OUTPATH:';\,
-			}
+                        }
 		]
 		},
 		{
@@ -2750,41 +2736,6 @@ store c into ':OUTPATH:';\,
 			},
 		],
         },
-        {
-        'name' => 'StoreLoad',
-        'tests' => [
-            {
-            'num' => 1,
-            'floatpostprocess' => 1,
-            'delimiter' => '    ',
-            'pig' => q\
-a = load ':INPATH:/singlefile/studenttab10k' using PigStorage() as (name:chararray, age:int, gpa: double);
-b = filter a by age < 25;
-c = filter a by age > 70;
-store b into ':OUTPATH:.intermediate1' using PigStorage(',');
-store c into ':OUTPATH:.intermediate2' using PigStorage(',');
-d = load ':OUTPATH:.intermediate1' using PigStorage(',') as (name:chararray, age:int, gpa: double);
-e = load ':OUTPATH:.intermediate2' using PigStorage(',') as (name:chararray, age:int, gpa: double);
-f = join d by name, e by name;
-store f into ':OUTPATH:';\,
-            'notmq' => 1,
-            },
-            {
-            # Self join
-            'num' => 2,
-            'floatpostprocess' => 1,
-            'delimiter' => '    ',
-            'pig' => q\
-a = load ':INPATH:/singlefile/studenttab10k' using PigStorage() as (name:chararray, age:int, gpa: double);
-b = filter a by name == 'nick miller';
-store b into ':OUTPATH:.intermediate' using PigStorage(',');
-c = load ':OUTPATH:.intermediate' using PigStorage(',') as (name:chararray, age:int, gpa: double);
-d = join a by name, c by name;
-store d into ':OUTPATH:';\,
-            'notmq' => 1,
-            },
-        ],
-        },
 
 	{
 		'name' => 'MergeJoin',
@@ -3220,25 +3171,6 @@ e = join a by name full outer, b by name
 store e into ':OUTPATH:';\,
 
                         },
-                        # skew join with tuple key
-                        {
-                            'num' => 15,
-                            'java_params' => ['-Dpig.skewedjoin.reduce.maxtuple=100'],
-                            'pig' => q\a = load ':INPATH:/singlefile/studenttab10k' using PigStorage() as (name, age, gpa);
-b = load ':INPATH:/singlefile/votertab10k' as (name, age, registration, contributions);
-c = group a by (name, age);
-d = group b by (name, age);
-e = join c by $0, d by $0 using 'skewed' parallel 5;
-f = foreach e generate c::group, flatten(c::a), d::group, flatten(d::b);
-store f into ':OUTPATH:';\,
-                            'verify_pig_script' => q\a = load ':INPATH:/singlefile/studenttab10k' using PigStorage() as (name, age, gpa);
-b = load ':INPATH:/singlefile/votertab10k' as (name, age, registration, contributions);
-c = group a by (name, age);
-d = group b by (name, age);
-e = join c by $0, d by $0;
-f = foreach e generate c::group, flatten(c::a), d::group, flatten(d::b);
-store f into ':OUTPATH:';\
-                        }
                 ]
 
             },
@@ -4311,32 +4243,40 @@ store e into ':OUTPATH:';\,
                     # test common
                     'num' => 1,
                     'pig' => q\
+rmf table_testNativeMRJobSimple_input
+rmf table_testNativeMRJobSimple_output
 a = load ':INPATH:/singlefile/studenttab10k' using PigStorage() as (name, age, gpa);
-b = native ':MAPREDJARS:/hadoop-examples.jar' Store a into ':OUTPATH:.intermediate.1' Load ':OUTPATH:.intermediate.2' `wordcount :OUTPATH:.intermediate.1 :OUTPATH:.intermediate.2`;
+b = native ':MAPREDJARS:/hadoop-examples.jar' Store a into 'table_testNativeMRJobSimple_input' Load 'table_testNativeMRJobSimple_output' `wordcount table_testNativeMRJobSimple_input table_testNativeMRJobSimple_output`;
 store b into ':OUTPATH:';\,
                     'notmq' => 1,
                     'verify_pig_script' => q\
+rmf table_testNativeMRJobSimple_input
+rmf table_testNativeMRJobSimple_output
 a = load ':INPATH:/singlefile/studenttab10k' using PigStorage() as (name, age, gpa);
-b = mapreduce ':MAPREDJARS:/hadoop-examples.jar' Store a into ':OUTPATH:.intermediate.1' Load ':OUTPATH:.intermediate.2' `wordcount :OUTPATH:.intermediate.1 :OUTPATH:.intermediate.2`;
+b = mapreduce ':MAPREDJARS:/hadoop-examples.jar' Store a into 'table_testNativeMRJobSimple_input' Load 'table_testNativeMRJobSimple_output' `wordcount table_testNativeMRJobSimple_input table_testNativeMRJobSimple_output`;
 store b into ':OUTPATH:';\,
                     },
                     {
                     # test complex
                     'num' => 2,
                     'pig' => q\
+rmf table_testNativeMRJobSimple_input
+rmf table_testNativeMRJobSimple_output
 a = load ':INPATH:/singlefile/studenttab10k' using PigStorage() as (name, age, gpa);
 b = foreach a generate name;
 c = distinct b;
-d = native ':MAPREDJARS:/hadoop-examples.jar' Store c into ':OUTPATH:.intermediate.1' Load ':OUTPATH:.intermediate.2' as (name:chararray, count: int) `wordcount :OUTPATH:.intermediate.1 :OUTPATH:.intermediate.2`;
+d = native ':MAPREDJARS:/hadoop-examples.jar' Store c into 'table_testNativeMRJobSimple_input' Load 'table_testNativeMRJobSimple_output' as (name:chararray, count: int) `wordcount table_testNativeMRJobSimple_input table_testNativeMRJobSimple_output`;
 e = order d by name;
 store e into ':OUTPATH:';\,
                     'sortArgs' => ['-t', '	'],
                     'notmq' => 1,
                     'verify_pig_script' => q\
+rmf table_testNativeMRJobSimple_input
+rmf table_testNativeMRJobSimple_output
 a = load ':INPATH:/singlefile/studenttab10k' using PigStorage() as (name, age, gpa);
 b = foreach a generate name;
 c = distinct b;
-d = mapreduce ':MAPREDJARS:/hadoop-examples.jar' Store c into ':OUTPATH:.intermediate.1' Load ':OUTPATH:.intermediate.2' as (name:chararray, count: int) `wordcount :OUTPATH:.intermediate.1 :OUTPATH:.intermediate.2`;
+d = mapreduce ':MAPREDJARS:/hadoop-examples.jar' Store c into 'table_testNativeMRJobSimple_input' Load 'table_testNativeMRJobSimple_output' as (name:chararray, count: int) `wordcount table_testNativeMRJobSimple_input table_testNativeMRJobSimple_output`;
 e = order d by name;
 store e into ':OUTPATH:';\,
                     },
@@ -4344,8 +4284,16 @@ store e into ':OUTPATH:';\,
                     # test streaming
                     'num' => 3,
                     'pig' => q\
+rmf table_testNativeMRJobSimple_input
+rmf table_testNativeMRJobSimple_output
 a = load ':INPATH:/singlefile/studenttab10k' using PigStorage() as (name, age, gpa);
-b = mapreduce ':MAPREDJARS:/hadoop-streaming.jar' Store a into ':OUTPATH:.intermediate.1' Load ':OUTPATH:.intermediate.2' as (name:chararray, count: int) `-input :OUTPATH:.intermediate.1 -output :OUTPATH:.intermediate.2 -mapper cat -reducer wc`;
+b = mapreduce ':MAPREDJARS:/hadoop-streaming.jar' Store a into 'table_testNativeMRJobSimple_input' Load 'table_testNativeMRJobSimple_output' as (name:chararray, count: int) `-input table_testNativeMRJobSimple_input -output table_testNativeMRJobSimple_output -mapper cat -reducer wc`;
+store b into ':OUTPATH:';\,
+                    'pig23' => q\
+rmf table_testNativeMRJobSimple_input
+rmf table_testNativeMRJobSimple_output
+a = load ':INPATH:/singlefile/studenttab10k' using PigStorage() as (name, age, gpa);
+b = mapreduce ':MAPREDJARS:/hadoop-0.23.0-streaming.jar' Store a into 'table_testNativeMRJobSimple_input' Load 'table_testNativeMRJobSimple_output' as (name:chararray, count: int) `-input table_testNativeMRJobSimple_input -output table_testNativeMRJobSimple_output -mapper cat -reducer wc`;
 store b into ':OUTPATH:';\,
                     'notmq' => 1,
                     },
@@ -4936,6 +4884,21 @@ a = load ':INPATH:/singlefile/allscalar1
 b = load ':INPATH:/singlefile/allscalar10k' using PigStorage() as (name:chararray, age:int, gpa:double, instate:chararray);
 C = union a, b;
 store C into ':OUTPATH:';\, 
+                },
+                {
+                    # Test Union using merge with incompatible types.  float->bytearray and chararray->bytearray
+                    'num' => 8,
+                    'delimiter' => '	',
+                    'pig' => q\
+A = load ':INPATH:/singlefile/studenttab10k' using PigStorage() as (name:chararray, age:int);
+B = load ':INPATH:/singlefile/studenttab10k' using PigStorage() as (name:chararray, age:chararray);
+C = union onschema A, B;
+store C into ':OUTPATH:';\,
+                    'verify_pig_script' => q\
+A = load ':INPATH:/singlefile/studenttab10k' using PigStorage() as (name:chararray, age:bytearray);
+B = load ':INPATH:/singlefile/studenttab10k' using PigStorage() as (name:chararray, age:bytearray);
+C = union A, B;
+store C into ':OUTPATH:';\,
                 }
               ]
 
@@ -4964,6 +4927,7 @@ store C into ':OUTPATH:';\,
                 'tests' => [
                     {
                         'num' => 1,
+                        'ignore23' => 'guava version of Pig is higher than hadoop 23',
                         'pig' => q?register :FUNCPATH:/testudf.jar;
                                 define gm org.apache.pig.test.udf.evalfunc.GoodMonitored();
                                 a = load ':INPATH:/singlefile/studenttab10k' as (name, age, gpa);
@@ -5333,26 +5297,6 @@ store C into ':OUTPATH:';\,
                                 C = UNION A,B;
                                 D = filter C by name == 'alice allen';
                                 store D into ':OUTPATH:';",
-                    },{
-                        'num' => 5,
-                        'pig' => "set pig.optimizer.rules.disabled PushUpFilter;
-                                define bb BuildBloom('Hash.JENKINS_HASH', 'fixed', '128', '3');
-                                A = LOAD ':INPATH:/singlefile/studenttab10k' AS (name:chararray, age:int, gpa:double);
-                                B = filter A by name == 'alice allen';
-                                C = group B all;
-                                D = foreach C generate bb(B.name) as bloomfilter;
-                                E = LOAD ':INPATH:/singlefile/studenttab10k' AS (name:chararray, age:int, gpa:double);
-                                F = LOAD ':INPATH:/singlefile/studenttab10k' AS (name:chararray, age:int, gpa:double);
-                                G = union E, F;
-                                -- PushUpFilter is disabled to avoid filter being pushed before union
-                                H = filter G by Bloom(D.bloomfilter, name);
-                                store H into ':OUTPATH:';",
-                        'verify_pig_script' => "
-                                A = LOAD ':INPATH:/singlefile/studenttab10k' AS (name, age:int ,gpa:double);
-                                B = LOAD ':INPATH:/singlefile/studenttab10k' AS (name, age:int ,gpa:double);
-                                C = UNION A,B;
-                                D = filter C by name == 'alice allen';
-                                store D into ':OUTPATH:';",
                     }
                 ],
             },{
@@ -5693,15 +5637,13 @@ store a into ':OUTPATH:';\,
                         'execonly' => 'mapred,tez',
                         'pig' => q\
                                     SET default_parallel 7;
-                                    SET mapreduce.input.fileinputformat.split.maxsize '300';
-                                    SET pig.splitCombination false;
-                                    A = LOAD ':INPATH:/singlefile/prerank' using PigStorage(',') as (rownumber:long,rankcabd:long,rankbdaa:long,rankbdca:long,rankaacd:long,rankaaba:long,a:int,b:int,c:int);
+                                    A = LOAD ':INPATH:/singlefile/prerank' using PigStorage(',') as (rownumber:long,rankcabd:long,rankbdaa:long,rankbdca:long,rankaacd:long,rankaaba:long,a:int,b:int,c:int,tail:bytearray);
                                     B = rank A;
                                     C = foreach B generate rank_A,a,b,c;
                                     store C into ':OUTPATH:';
                                 \,
                         'verify_pig_script' => q\
-                                    A = LOAD ':INPATH:/singlefile/prerank' using PigStorage(',') as (rownumber:long,rankcabd:long,rankbdaa:long,rankbdca:long,rankaacd:long,rankaaba:long,a:int,b:int,c:int);
+                                    A = LOAD ':INPATH:/singlefile/prerank' using PigStorage(',') as (rownumber:long,rankcabd:long,rankbdaa:long,rankbdca:long,rankaacd:long,rankaaba:long,a:int,b:int,c:int,tail:bytearray);
                                     C = foreach A generate rownumber,a,b,c;
                                     store C into ':OUTPATH:';
                                 \,
@@ -5710,15 +5652,13 @@ store a into ':OUTPATH:';\,
                         'execonly' => 'mapred,tez',
                         'pig' => q\
                                     SET default_parallel 9;
-                                    SET mapreduce.input.fileinputformat.split.maxsize '300';
-                                    SET pig.splitCombination false;
-                                    A = LOAD ':INPATH:/singlefile/prerank' using PigStorage(',') as (rownumber:long,rankcabd:long,rankbdaa:long,rankbdca:long,rankaacd:long,rankaaba:long,a:int,b:int,c:int);
+                                    A = LOAD ':INPATH:/singlefile/prerank' using PigStorage(',') as (rownumber:long,rankcabd:long,rankbdaa:long,rankbdca:long,rankaacd:long,rankaaba:long,a:int,b:int,c:int,tail:bytearray);
                                     B = rank A by b DESC,a ASC;
                                     C = foreach B generate rank_A,b,a;
                                     store C into ':OUTPATH:';
                                 \,
                         'verify_pig_script' => q\
-                                    A = LOAD ':INPATH:/singlefile/prerank' using PigStorage(',') as (rownumber:long,rankcabd:long,rankbdaa:long,rankbdca:long,rankaacd:long,rankaaba:long,a:int,b:int,c:int);
+                                    A = LOAD ':INPATH:/singlefile/prerank' using PigStorage(',') as (rownumber:long,rankcabd:long,rankbdaa:long,rankbdca:long,rankaacd:long,rankaaba:long,a:int,b:int,c:int,tail:bytearray);
                                     C = foreach A generate rankbdaa,b,a;
                                     store C into ':OUTPATH:';
                                 \,
@@ -5727,15 +5667,13 @@ store a into ':OUTPATH:';\,
                         'execonly' => 'mapred,tez',
                         'pig' => q\
                                     SET default_parallel 7;
-                                    SET mapreduce.input.fileinputformat.split.maxsize '300';
-                                    SET pig.splitCombination false;
-                                    A = LOAD ':INPATH:/singlefile/prerank' using PigStorage(',') as (rownumber:long,rankcabd:long,rankbdaa:long,rankbdca:long,rankaacd:long,rankaaba:long,a:int,b:int,c:int);
+                                    A = LOAD ':INPATH:/singlefile/prerank' using PigStorage(',') as (rownumber:long,rankcabd:long,rankbdaa:long,rankbdca:long,rankaacd:long,rankaaba:long,a:int,b:int,c:int,tail:bytearray);
                                     B = rank A by c ASC,b DESC;
                                     C = foreach B generate rank_A,c,b;
                                     store C into ':OUTPATH:';
                                 \,
                         'verify_pig_script' => q\
-                                    A = LOAD ':INPATH:/singlefile/prerank' using PigStorage(',') as (rownumber:long,rankcabd:long,rankbdaa:long,rankbdca:long,rankaacd:long,rankaaba:long,a:int,b:int,c:int);
+                                    A = LOAD ':INPATH:/singlefile/prerank' using PigStorage(',') as (rownumber:long,rankcabd:long,rankbdaa:long,rankbdca:long,rankaacd:long,rankaaba:long,a:int,b:int,c:int,tail:bytearray);
                                     C = foreach A generate rankcabd,c,b;
                                     store C into ':OUTPATH:';
                                 \,
@@ -5743,29 +5681,26 @@ store a into ':OUTPATH:';\,
                         'num' => 4,
                         'execonly' => 'mapred,tez',
                         'pig' => q\
-                                    SET default_parallel 5;
-                                    SET mapreduce.input.fileinputformat.split.maxsize '300';
-                                    SET pig.splitCombination false;
-                                    A = LOAD ':INPATH:/singlefile/prerank' using PigStorage(',') as (rownumber:long,rankcabd:long,rankbdaa:long,rankbdca:long,rankaacd:long,rankaaba:long,a:int,b:int,c:int);
+                                    SET default_parallel 25;
+                                    A = LOAD ':INPATH:/singlefile/biggish' using PigStorage(',') as (rownumber:long,idx:long,tail:bytearray);
                                     B = rank A;
                                     C = order B by rank_A;
-                                    D = foreach C generate rank_A,a,b,c;
+                                    D = foreach C generate rank_A,rownumber;
                                     store D into ':OUTPATH:';
                                 \,
                         'verify_pig_script' => q\
-                                    A = LOAD ':INPATH:/singlefile/prerank' using PigStorage(',') as (rownumber:long,rankcabd:long,rankbdaa:long,rankbdca:long,rankaacd:long,rankaaba:long,a:int,b:int,c:int);
-                                    D = foreach A generate rownumber,a,b,c;
+                                    A = LOAD ':INPATH:/singlefile/biggish' using PigStorage(',') as (rownumber:long,idx:long,tail:bytearray);
+                                    D = foreach A generate idx,rownumber;
                                     store D into ':OUTPATH:';
                                 \,
                     }, {
                         'num' => 5,
                         'execonly' => 'mapred,tez',
                         'pig' => q\
-                                    SET default_parallel 5;
-                                    SET mapreduce.input.fileinputformat.split.maxsize '300';
+                                    SET default_parallel 11;
                                     SET pig.splitCombination false;
-                                    A = LOAD ':INPATH:/singlefile/prerank' using PigStorage(',') as (rownumber:long,rankcabd:long,rankbdaa:long,rankbdca:long,rankaacd:long,rankaaba:long,a:int,b:int,c:int);
-                                    B = LOAD ':INPATH:/singlefile/prerank' using PigStorage(',') as (rownumber:long,rankcabd:long,rankbdaa:long,rankbdca:long,rankaacd:long,rankaaba:long,a:int,b:int,c:int);
+                                    A = LOAD ':INPATH:/singlefile/biggish' using PigStorage(',') as (rownumber:long,idx:long,tail:bytearray);
+                                    B = LOAD ':INPATH:/singlefile/prerank' using PigStorage(',') as (rownumber:long,rankcabd:long,rankbdaa:long,rankbdca:long,rankaacd:long,rankaaba:long,a:int,b:int,c:int,tail:bytearray);
                                     C = join A by rownumber, B by rownumber;
                                     D = order C by B::rankcabd,B::rankbdca,B::rankaaba;
                                     E = rank D;
@@ -5775,7 +5710,7 @@ store a into ':OUTPATH:';\,
                                     store H into ':OUTPATH:';
                                 \,
                         'verify_pig_script' => q\
-                                    A = LOAD ':INPATH:/singlefile/prerank' using PigStorage(',') as (rownumber:long,idx:long);
+                                    A = LOAD ':INPATH:/singlefile/prerank' using PigStorage(',') as (rownumber:long,idx:long,tail:bytearray);
                                     B = foreach A generate rownumber,1;
                                     C = order B by rownumber;
                                     store C into ':OUTPATH:';
@@ -5784,16 +5719,14 @@ store a into ':OUTPATH:';\,
                         'num' => 6,
                         'execonly' => 'mapred,tez',
                         'pig' => q\
-                                    SET mapreduce.input.fileinputformat.split.maxsize '300';
-                                    SET pig.splitCombination false;
-                                    A = LOAD ':INPATH:/singlefile/prerank' using PigStorage(',') as (rownumber:long,rankcabd:long,rankbdaa:long,rankbdca:long,rankaacd:long,rankaaba:long,a:int,b:int,c:int);
+                                    A = LOAD ':INPATH:/singlefile/prerank' using PigStorage(',') as (rownumber:long,rankcabd:long,rankbdaa:long,rankbdca:long,rankaacd:long,rankaaba:long,a:int,b:int,c:int,tail:bytearray);
                                     split A into M if rownumber > 15, N if rownumber < 25;
                                     C = rank N;
                                     D = foreach C generate $0, a, b, c;
                                     store D into ':OUTPATH:';
                                 \,
                         'verify_pig_script' => q\
-                                    A = LOAD ':INPATH:/singlefile/prerank' using PigStorage(',') as (rownumber:long,rankcabd:long,rankbdaa:long,rankbdca:long,rankaacd:long,rankaaba:long,a:int,b:int,c:int);
+                                    A = LOAD ':INPATH:/singlefile/prerank' using PigStorage(',') as (rownumber:long,rankcabd:long,rankbdaa:long,rankbdca:long,rankaacd:long,rankaaba:long,a:int,b:int,c:int,tail:bytearray);
                                     B = filter A by rownumber < 25;
                                     D = foreach B generate rownumber, a, b, c;
                                     store D into ':OUTPATH:';
@@ -5808,16 +5741,14 @@ store a into ':OUTPATH:';\,
 						'num' => 1,
 						'execonly' => 'mapred,tez',
 						'pig' => q\
-                                                                        SET mapreduce.input.fileinputformat.split.maxsize '300';
-                                                                        SET pig.splitCombination false;
 									SET default_parallel 9;
-									A = LOAD ':INPATH:/singlefile/prerank' using PigStorage(',') as (rownumber:long,rankcabd:long,rankbdaa:long,rankbdca:long,rankaacd:long,rankaaba:long,a:int,b:int,c:int);
+									A = LOAD ':INPATH:/singlefile/prerank' using PigStorage(',') as (rownumber:long,rankcabd:long,rankbdaa:long,rankbdca:long,rankaacd:long,rankaaba:long,a:int,b:int,c:int,tail:bytearray);
 									B = rank A by a ASC,b ASC DENSE;
 									C = foreach B generate rank_A,a,b;
 									store C into ':OUTPATH:';
 								\,
 						'verify_pig_script' => q\
-									A = LOAD ':INPATH:/singlefile/prerank' using PigStorage(',') as (rownumber:long,rankcabd:long,rankbdaa:long,rankbdca:long,rankaacd:long,rankaaba:long,a:int,b:int,c:int);
+									A = LOAD ':INPATH:/singlefile/prerank' using PigStorage(',') as (rownumber:long,rankcabd:long,rankbdaa:long,rankbdca:long,rankaacd:long,rankaaba:long,a:int,b:int,c:int,tail:bytearray);
 									C = foreach A generate rankaaba,a,b;
 									store C into ':OUTPATH:';
 								\,
@@ -5825,16 +5756,14 @@ store a into ':OUTPATH:';\,
 						'num' => 2,
 						'execonly' => 'mapred,tez',
 						'pig' => q\
-                                                                        SET mapreduce.input.fileinputformat.split.maxsize '300';
-                                                                        SET pig.splitCombination false;
 									SET default_parallel 9;
-									A = LOAD ':INPATH:/singlefile/prerank' using PigStorage(',') as (rownumber:long,rankcabd:long,rankbdaa:long,rankbdca:long,rankaacd:long,rankaaba:long,a:int,b:int,c:int);
+									A = LOAD ':INPATH:/singlefile/prerank' using PigStorage(',') as (rownumber:long,rankcabd:long,rankbdaa:long,rankbdca:long,rankaacd:long,rankaaba:long,a:int,b:int,c:int,tail:bytearray);
 									B = rank A by a ASC,c DESC DENSE;
 									C = foreach B generate rank_A,a,c;
 									store C into ':OUTPATH:';
 								\,
 						'verify_pig_script' => q\
-									A = LOAD ':INPATH:/singlefile/prerank' using PigStorage(',') as (rownumber:long,rankcabd:long,rankbdaa:long,rankbdca:long,rankaacd:long,rankaaba:long,a:int,b:int,c:int);
+									A = LOAD ':INPATH:/singlefile/prerank' using PigStorage(',') as (rownumber:long,rankcabd:long,rankbdaa:long,rankbdca:long,rankaacd:long,rankaaba:long,a:int,b:int,c:int,tail:bytearray);
 									C = foreach A generate rankaacd,a,c;
 									store C into ':OUTPATH:';
 								\,
@@ -5842,16 +5771,14 @@ store a into ':OUTPATH:';\,
 						'num' => 3,
 						'execonly' => 'mapred,tez',
 						'pig' => q\
-                                                                        SET mapreduce.input.fileinputformat.split.maxsize '300';
-                                                                        SET pig.splitCombination false;
 									SET default_parallel 7;
-									A = LOAD ':INPATH:/singlefile/prerank' using PigStorage(',') as (rownumber:long,rankcabd:long,rankbdaa:long,rankbdca:long,rankaacd:long,rankaaba:long,a:int,b:int,c:int);
+									A = LOAD ':INPATH:/singlefile/prerank' using PigStorage(',') as (rownumber:long,rankcabd:long,rankbdaa:long,rankbdca:long,rankaacd:long,rankaaba:long,a:int,b:int,c:int,tail:bytearray);
 									B = rank A by b DESC,c ASC DENSE;
 									C = foreach B generate rank_A,b,c;
 									store C into ':OUTPATH:';
 								\,
 						'verify_pig_script' => q\
-									A = LOAD ':INPATH:/singlefile/prerank' using PigStorage(',') as (rownumber:long,rankcabd:long,rankbdaa:long,rankbdca:long,rankaacd:long,rankaaba:long,a:int,b:int,c:int);
+									A = LOAD ':INPATH:/singlefile/prerank' using PigStorage(',') as (rownumber:long,rankcabd:long,rankbdaa:long,rankbdca:long,rankaacd:long,rankaaba:long,a:int,b:int,c:int,tail:bytearray);
 									C = foreach A generate rankbdca,b,c;
 									store C into ':OUTPATH:';
 								\,
@@ -5859,11 +5786,9 @@ store a into ':OUTPATH:';\,
 						'num' => 4,
 						'execonly' => 'mapred,tez',
 						'pig' => q\
-                                                                        SET mapreduce.input.fileinputformat.split.maxsize '300';
-                                                                        SET pig.splitCombination false;
 									SET default_parallel 7;
-									A = LOAD ':INPATH:/singlefile/prerank' using PigStorage(',') as (rownumber:long,rankcabd:long,rankbdaa:long,rankbdca:long,rankaacd:long,rankaaba:long,a:int,b:int,c:int);
-									B = foreach A generate a,b,c;
+									A = LOAD ':INPATH:/singlefile/prerank' using PigStorage(',') as (rownumber:long,rankcabd:long,rankbdaa:long,rankbdca:long,rankaacd:long,rankaaba:long,a:int,b:int,c:int,tail:bytearray);
+									B = foreach A generate a,b,c,tail;
 									C = rank B by a ASC,b ASC DENSE;
 									D = rank C by a ASC,c DESC DENSE;
 									E = rank D by b DESC,c ASC DENSE;
@@ -5871,7 +5796,7 @@ store a into ':OUTPATH:';\,
 									store F into ':OUTPATH:';
 								\,
 						'verify_pig_script' => q\
-									A = LOAD ':INPATH:/singlefile/prerank' using PigStorage(',') as (rownumber:long,rankcabd:long,rankbdaa:long,rankbdca:long,rankaacd:long,rankaaba:long,a:int,b:int,c:int);
+									A = LOAD ':INPATH:/singlefile/prerank' using PigStorage(',') as (rownumber:long,rankcabd:long,rankbdaa:long,rankbdca:long,rankaacd:long,rankaaba:long,a:int,b:int,c:int,tail:bytearray);
 									B = foreach A generate rankbdca,rankaacd,rankaaba,a,b,c;
 									store B into ':OUTPATH:';
 								\,
@@ -5880,9 +5805,8 @@ store a into ':OUTPATH:';\,
 						'execonly' => 'mapred,tez',
 						'pig' => q\
 									SET default_parallel 9;
-                                                                        SET mapreduce.input.fileinputformat.split.maxsize '300';
 									SET pig.splitCombination false;
-									A = LOAD ':INPATH:/singlefile/prerank' using PigStorage(',') as (rownumber:long,rankcabd:long,rankbdaa:long,rankbdca:long,rankaacd:long,rankaaba:long,a:int,b:int,c:int);
+									A = LOAD ':INPATH:/singlefile/prerank' using PigStorage(',') as (rownumber:long,rankcabd:long,rankbdaa:long,rankbdca:long,rankaacd:long,rankaaba:long,a:int,b:int,c:int,tail:bytearray);
 									B = foreach A generate a,b,c;
 									C = rank B by a ASC,b ASC DENSE;
 									D = rank B by a ASC,c DESC DENSE;
@@ -5892,7 +5816,7 @@ store a into ':OUTPATH:';\,
 									store H into ':OUTPATH:';
 								\,
 						'verify_pig_script' => q\
-									A = LOAD ':INPATH:/singlefile/prerank' using PigStorage(',') as (rownumber:long,rankcabd:long,rankbdaa:long,rankbdca:long,rankaacd:long,rankaaba:long,a:int,b:int,c:int);
+									A = LOAD ':INPATH:/singlefile/prerank' using PigStorage(',') as (rownumber:long,rankcabd:long,rankbdaa:long,rankbdca:long,rankaacd:long,rankaaba:long,a:int,b:int,c:int,tail:bytearray);
 									C = foreach A generate rankaaba,a,b,c;
 									E = order C by a ASC,b ASC;
 									D = foreach A generate rankaacd,a,b,c;

Modified: pig/branches/spark/test/e2e/pig/tests/orc.conf
URL: http://svn.apache.org/viewvc/pig/branches/spark/test/e2e/pig/tests/orc.conf?rev=1784224&r1=1784223&r2=1784224&view=diff
==============================================================================
--- pig/branches/spark/test/e2e/pig/tests/orc.conf (original)
+++ pig/branches/spark/test/e2e/pig/tests/orc.conf Fri Feb 24 03:34:37 2017
@@ -1,21 +1,3 @@
-#!/usr/bin/env perl
-############################################################################           
-#  Licensed to the Apache Software Foundation (ASF) under one or more                  
-#  contributor license agreements.  See the NOTICE file distributed with               
-#  this work for additional information regarding copyright ownership.                 
-#  The ASF licenses this file to You under the Apache License, Version 2.0             
-#  (the "License"); you may not use this file except in compliance with                
-#  the License.  You may obtain a copy of the License at                               
-#                                                                                      
-#      http://www.apache.org/licenses/LICENSE-2.0                                      
-#                                                                                      
-#  Unless required by applicable law or agreed to in writing, software                 
-#  distributed under the License is distributed on an "AS IS" BASIS,                   
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.            
-#  See the License for the specific language governing permissions and                 
-#  limitations under the License.                                                      
-                                                                                       
-###############################################################################
 $cfg = {
         'driver' => 'Pig',
         'nummachines' => 5,

Modified: pig/branches/spark/test/e2e/pig/tests/turing_jython.conf
URL: http://svn.apache.org/viewvc/pig/branches/spark/test/e2e/pig/tests/turing_jython.conf?rev=1784224&r1=1784223&r2=1784224&view=diff
==============================================================================
--- pig/branches/spark/test/e2e/pig/tests/turing_jython.conf (original)
+++ pig/branches/spark/test/e2e/pig/tests/turing_jython.conf Fri Feb 24 03:34:37 2017
@@ -50,7 +50,7 @@ d = filter b by age < 50;
 e = cogroup c by (name, age), d by (name, age) ;
 f = foreach e generate flatten(c), flatten(d);
 g = group f by registration;
-h = foreach g generate group, (float) ROUND(SUM(f.d::contributions) * 100) / 100.0;
+h = foreach g generate group, SUM(f.d::contributions);
 i = order h by $1;
 store i into '$out';
 """).bind({'in1':input1,'in2':input2, 'out':output}).runSingle() 
@@ -68,7 +68,7 @@ else:
                                          e = cogroup c by (name, age), d by (name, age) ;
                                          f = foreach e generate flatten(c), flatten(d);
                                          g = group f by registration;
-                                         h = foreach g generate group, (float) ROUND(SUM(f.d::contributions) * 100) / 100.0;
+                                         h = foreach g generate group, SUM(f.d::contributions);
                                          i = order h by $1;
                                          store i into ':OUTPATH:';
 \,
@@ -92,12 +92,38 @@ hdfs = FileSystem.get(config)
                 }
                 ]
 
+                },
+                {
+                'name' => 'Jython_Embedded',
+                'tests' => [
+                        {
+                        'num' => 1, 
+			,'pig' => q\#!/usr/bin/python
+# JYTHON COMMENT
+from org.apache.pig.scripting import Pig
+
+P = Pig.compile("""A = load ':INPATH:/singlefile/studenttab10k' as (name, age, gpa); 
+store A into ':OUTPATH:';""")
+
+Q = P.bind()
+
+result = Q.runSingle()
+
+if result.isSuccessful():
+	print "Pig job PASSED"
+
+else:
+	raise "Pig job FAILED"    
+\,
+                        'verify_pig_script' => q\A = load ':INPATH:/singlefile/studenttab10k' as (name, age, gpa); 
+                                                 store A into ':OUTPATH:';\,
+                        }
+                        ]
 		}, {
 
 			'name' => 'Jython_CompileBindRun'
 		       ,'tests' => [
-				{
-                                # bind no parameters, runSingle
+				{ # bind() with no parameters, runSingle
 				'num' => 1
 				,'pig' => q\#!/usr/bin/python
 # JYTHON COMMENT
@@ -124,7 +150,7 @@ else:
 				,'delimiter' => '	'
 
 				},{
-                        # bind single input parameter
+#  	9.2 	1 	bind single input parameter and no output parameters 	 
 			'num' => 2
 			,'pig' => q\#!/usr/bin/python
 
@@ -153,7 +179,7 @@ else:
 
 #                       ,'expected_out_regex' => "Pig job PASSED"
 			},{
-                        # bind parallel execution with a multiple entries
+#  		bind parallel execution with a multiple entries
 			'num' => 3
 			,'pig' => q\#!/usr/bin/python
 from org.apache.pig.scripting import Pig
@@ -205,7 +231,9 @@ for i in [0, 1, 2]:
 \,
 
 			},{
-                # compile pig script file with no parameters
+#  8.6 	compile pig script file with no input and no output parameters
+#12.2 	import python modules 
+#
 		'num' => 4
 		,'pig' => q\#!/usr/bin/python
 from org.apache.pig.scripting import Pig
@@ -215,7 +243,6 @@ pig_script = ":TMP:/script.pig"
 pigfile = open( pig_script, 'w+')
 pigfile.write("""
 A = load ':INPATH:/singlefile/studenttab10k' as (name, age, gpa);
--- a comment
 store A into ':OUTPATH:';
 """)
 pigfile.close()
@@ -236,7 +263,7 @@ else:
                         ,'floatpostprocess' => 1
                         ,'delimiter' => '	'
 		},{
-                # compile pig script file with parameters
+#  8.7 	compile pig script file with no input and with output parameters
 		'num' => 5
 		,'pig' => q\#!/usr/bin/python
 from org.apache.pig.scripting import Pig
@@ -273,7 +300,7 @@ else:
                         ,'delimiter' => '	'
 
 		},{
-                    # results.getResults(alias) for null results
+            # 11.15   1  	 results.getResults(alias) for null results
 		    'num' => 6
 		    ,'pig' => q\#!/usr/bin/python
 from org.apache.pig.scripting import Pig
@@ -291,7 +318,7 @@ result = P.bind().runSingle()
                                       store EMPTY into ':OUTPATH:';\
 		},
         {
-                    # bind parameters from python context
+            # bind reading from python context
 		    'num' => 7
 		    ,'pig' => q\#!/usr/bin/python
 from org.apache.pig.scripting import Pig
@@ -313,7 +340,7 @@ result = P.bind().runSingle()
                                       store B into ':OUTPATH:';\
 
 		},{
-                    # bind multiple times
+            # bind multiple times
 		    'num' => 8
 		    ,'pig' => q\#!/usr/bin/python
 from org.apache.pig.scripting import Pig
@@ -340,8 +367,56 @@ for i in [1,2,3]:
                                       B= foreach A generate age + 3;
                                       store B into ':OUTPATH:.3';\,
 
+		},
+        {
+            # invoke .run() on a non-parallel pig script
+		    'num' => 9
+		    ,'pig' => q\#!/usr/bin/python
+from org.apache.pig.scripting import Pig
+
+P = Pig.compile("""
+A = load ':INPATH:/singlefile/studenttab10k' as (name, age, gpa);
+store A into ':OUTPATH:';
+""")
+result = P.bind().run()
+\,
+             'verify_pig_script' => q\A = load ':INPATH:/singlefile/studenttab10k' as (name, age, gpa);
+                                      store A into ':OUTPATH:';\,
+ 	    },
+        {
+#  8.6 	compile pig script file with no input and no output parameters
+#12.2 	import python modules 
+#
+		'num' => 10
+		,'pig' => q\#!/usr/bin/python
+from org.apache.pig.scripting import Pig
+
+#create pig script
+pig_script = ":TMP:/script.pig"
+pigfile = open( pig_script, 'w+')
+pigfile.write("""
+A = load ':INPATH:/singlefile/studenttab10k' as (name, age, gpa);
+-- a comment
+store A into ':OUTPATH:';
+""")
+pigfile.close()
+
+#execute pig script
+
+result = Pig.compileFromFile( pig_script ).bind().runSingle()
+
+if result.isSuccessful():
+    print "Pig job PASSED"
+else:
+    raise "Pig job FAILED"    
+\,
+
+             'verify_pig_script' => q\A = load ':INPATH:/singlefile/studenttab10k' as (name, age, gpa);
+                                      store A into ':OUTPATH:';
+\
+                        ,'floatpostprocess' => 1
+                        ,'delimiter' => '	'
 	},{
-                # python script with parameters
                 'num' => 11
                 ,'pig_params' => ['-p', qq(loadfile='studenttab10k')],
                 ,'pig' => q\#!/usr/bin/python
@@ -366,7 +441,6 @@ else:
                         ,'floatpostprocess' => 1
                         ,'delimiter' => '       '
         },{
-                # python script with parameter file
                 'num' => 12
                 ,'pig_params' => ['-m', ":PARAMPATH:/params_3"],
                 ,'pig' => q\#!/usr/bin/python
@@ -391,7 +465,6 @@ else:
                         ,'floatpostprocess' => 1
                         ,'delimiter' => '       '
 	},{
-                # python script with command line arguments
                 'num' => 13
                 ,'additional_cmd_args' => ['studenttab10k']
                 ,'pig' => q\#!/usr/bin/python
@@ -422,7 +495,7 @@ else:
 	'name' => 'Jython_Diagnostics'
        ,'tests' => [
                 {
-                # explain() on a complex query
+#  11.23 	1 	explain() on a complex query 	 
 		'num' => 1
 		,'pig' => q\#!/usr/bin/python
 from org.apache.pig.scripting import Pig
@@ -452,7 +525,7 @@ result = P.bind({'in1':input1, 'in2':inp
                        ,'rc'=> 0
 
 	}, {
-                # illustrate() on a complex query 	 
+#11.22 	1 	illustrate() on a complex query 	 
 		'num' => 2
 		,'execonly' => 'mapred,local' #TODO: PIG-3993: Illustrate is yet to be implemented in Tez
 		,'pig' => q\#!/usr/bin/python
@@ -482,7 +555,7 @@ result = P.bind({'in1':input1, 'in2':inp
                        ,'rc'=> 0
                        ,'expected_out_regex' => "A.*name:bytearray.*age:bytearray.*gpa:bytearray"
 	}, {
-                # describe() on an alias 	 
+#  11.24 	1 	describe() on an alias 	 
 		'num' => 3
 		,'pig' => q\#!/usr/bin/python
 from org.apache.pig.scripting import Pig
@@ -510,7 +583,7 @@ result = P.bind({'in1':input1, 'in2':inp
                        ,'rc'=> 0
                        ,'expected_out_regex' => "A:.*{name:.*bytearray,age:.*bytearray,gpa:.*bytearray}"
 	}, {
-                # describe() on an undefined alias 
+#11.29 	1 	describe() on an undefined alias 
 		'num' => 4
 		,'pig' => q\#!/usr/bin/python
 from org.apache.pig.scripting import Pig
@@ -540,7 +613,7 @@ result = P.bind({'in1':input1, 'in2':inp
 
 
 	}, {
-                # illustrate(alias) 	 
+#  11.27 	1 	illustrate(alias) 	 
 		'num' => 5
 		,'pig' => q\#!/usr/bin/python
 from org.apache.pig.scripting import Pig
@@ -570,7 +643,7 @@ result = P.bind({'in1':input1, 'in2':inp
                        ,'expected_err_regex' => "ERROR 1121"
 
 	}, {
-                # explain(alias) 
+#  11.28 	1 	explain(alias) 
 		'num' => 6
 		,'pig' => q\#!/usr/bin/python
 from org.apache.pig.scripting import Pig
@@ -637,10 +710,14 @@ Pig.fs("-copyFromLocal :TMP:/iterator_ou
 		},
 	]
 	}, {
+# 12.2 import python modules 	 
+# 12.1 python comments 	 
+# 12.6 fs lists a file 	 
+
+
 	'name' => 'Jython_Misc'
        ,'tests' => [
                 {
-                # fs commands: lists a file 	 
 		'num' => 1
 		,'pig' => q\#!/usr/bin/python
 # JYTHON COMMENT
@@ -701,8 +778,8 @@ P.bind().runSingle()
                 'name' => 'Jython_Properties',
                 'tests' => [
                         {
-                        # check if property is passed to Pig
                         'num' => 1 
+                        ,'ignore' => 1 # This is a good test except that we can't verify it.
 		,'pig' => q\#!/usr/bin/python
 # JYTHON COMMENT
 from org.apache.pig.scripting import Pig
@@ -714,7 +791,7 @@ store A into ':OUTPATH:';""")
 Q = P.bind()
 
 prop = Properties()
-prop.put("pig.default.load.func", "wrong")
+prop.put("mapred.job.name", "friendship")
 result = Q.runSingle(prop)
 
 if result.isSuccessful():
@@ -722,8 +799,10 @@ if result.isSuccessful():
 else:
 	raise "Pig job FAILED"    
 \
-                       ,'rc'=> 6
-                       ,'expected_err_regex' => "ERROR 1070: Could not resolve wrong using imports"
+
+                        ,'sql' => "select name, age, gpa+0.00 from studenttab10k;" 
+                        ,'floatpostprocess' => 1
+                        ,'delimiter' => '	'
 
 	}
 	]
@@ -732,7 +811,7 @@ else:
                 'name' => 'Jython_Error',
                 'tests' => [
 	                {
-                                    # run a script that returns single negative result 
+                        # run a script that returns single negative result 
 			            'num' => 1
 			            ,'pig' => q\#!/usr/bin/python
 from org.apache.pig.scripting import Pig
@@ -755,18 +834,103 @@ else:
 
 		                ,'rc' => 6
                         ,'expected_err_regex' => "ERROR 1121"
+			        },
+                    {
+                        # run a script that returns single negative result 
+			            'num' => 2
+			            ,'pig' => q\#!/usr/bin/python
+from org.apache.pig.scripting import Pig
+
+input= ":INPATH:/singlefile/studenttab10k"
+output = ":OUTPATH:"
+
+P = Pig.compile("""A = load '$in' as (name, age, gpa); store A into '$out';""")
+
+Q = P.bind({'in':input, 'out':bad_output}) 
+
+result = Q.runSingle()
+
+if result.isSuccessful():
+    print "Pig job PASSED"
+
+else:
+    raise "Pig job FAILED"    
+\ 
+
+                        ,'rc' => 6
+                        ,'expected_err_regex' => "name 'bad_output' is not defined"
 			        },{
-                                # compileFromFile for pig script file that does not exist throws IOException 	 
+                        # bind an undefined input parameter 	 
+		                'num' => 3
+		                ,'pig' => q\#!/usr/bin/python
+from org.apache.pig.scripting import Pig
+
+input= ":INPATH:/singlefile/studenttab10k"
+output = ":OUTPATH:"
+
+P = Pig.compile("""A = load '$in' as (name, age, gpa); store A into '$out';""")
+
+Q = P.bind({'in':invalid_parameter, 'out':output}) 
+
+result = Q.runSingle()
+
+if result.isSuccessful():
+    print "Pig job PASSED"
+
+else:
+    raise "Pig job FAILED"    
+\
+
+                        ,'expected_err_regex' => "ERROR 1121"
+                       ,'rc'=> 6
+
+		            },
+                    {
+                        #  compileFromFile for pig script file that does not exist throws IOException 	 
 		                'num' => 4
 		                ,'pig' => q\#!/usr/bin/python
-import os
 from org.apache.pig.scripting import Pig
 
 # intentionally don't create pig script
 
-pig_script = ":TMP:/script.pig"
+pig_script = tmp_dir + "/script.pig"
 
-os.remove(pig_script)
+#execute pig script
+input1= ":INPATH:/singlefile/studenttab10k"
+input2= ":INPATH:/singlefile/votertab10k"
+output1= ":OUTPATH:.1"
+output2= ":OUTPATH:.2"
+
+result = Pig.compileFromFile(pig_script).bind({'in1':input1,'in2':input2, 'out1':output1, 'out2':output2 }).run()
+
+if result.isSuccessful():
+    print "Pig job PASSED"
+
+else:
+    raise "Pig job FAILED"    
+\
+
+                       ,'expected_err_regex' => "ERROR 1121"
+                       ,'rc'=> 6
+		            },
+                    {
+                        #  compileFromFile for pig script file that does not have read permissions throws IOException 	
+		                'num' => 5
+		                ,'pig' => q\#!/usr/bin/python
+from org.apache.pig.scripting import Pig
+
+#create pig script
+
+pig_script = ":TMP:/script.pig"
+pigfile = open( pig_script, 'w')
+#no read permissions and file is left open until afer compile statement
+pigfile.write("""
+A = load ':INPATH:/singlefile/studenttab10k' as (name, age, gpa);
+B = load ':INPATH:/singlefile/votertab10k' as (name, age, registration, contributions);
+store A into '$out1';
+store B into '$out2';
+""")
+pigfile.close()
 
 #execute pig script
 input1= ":INPATH:/singlefile/studenttab10k"
@@ -774,9 +938,11 @@ input2= ":INPATH:/singlefile/votertab10k
 output1= ":OUTPATH:.1"
 output2= ":OUTPATH:.2"
 
-results = Pig.compileFromFile(pig_script).bind({'in1':input1,'in2':input2, 'out1':output1, 'out2':output2 }).run()
+result = Pig.compileFromFile(pig_script).bind({'in1':input1,'in2':input2, 'out1':output1, 'out2':output2 }).run()
+
+pigfile.close()
 
-if results[0].isSuccessful():
+if result.isSuccessful():
     print "Pig job PASSED"
 
 else:
@@ -811,16 +977,13 @@ else:
                        ,'expected_err_regex' => "ERROR 1121"
                     },
                     {
-                        # iter.next for an alias that is undefined 
+                        # 11.10 iter.next for an alias that is undefined 
 		                'num' => 7
 		                ,'pig' => q\#!/usr/bin/python
 from org.apache.pig.scripting import Pig
 
 #create pig script
 
-out1= ":OUTPATH:.1"
-out2= ":OUTPATH:.2"
-
 P = Pig.compile("""A = load ':INPATH:/singlefile/studenttab10k' as (name, age, gpa);
 B= filter A by age < 50;
 store B into '$out1';
@@ -829,10 +992,9 @@ D = filter C by name matches '^fred*';
 store D into '$out2';
 """)
 
-results = P.bind().run()
-iter = results[0].result("E").iterator()
+result = P.bind().run()
 
-if results[0].isSuccessful():
+if result.isSuccessful():
     print "Pig job PASSED"
 
 else:
@@ -848,6 +1010,30 @@ else:
                 'tests' => [
                     {
                         # sql command
+		                'num' => 1
+                                ,'java_params' => ['-Dhcat.bin=:HCATBIN:']
+		                ,'pig' => q\#!/usr/bin/python
+from org.apache.pig.scripting import Pig
+
+#create pig script
+
+Pig.sql("""sql drop table if exists pig_script_hcat_ddl_1;""")
+ret = Pig.sql("""sql create table pig_script_hcat_ddl_1(name string,
+age int,
+gpa double)
+stored as textfile;
+""")
+
+if ret==0:
+    print "SQL command PASSED"
+
+else:
+    raise "SQL command FAILED"    
+\
+                       ,'rc' => 0
+                    },
+                    {
+                        # sql command
 		                'num' => 2
 		                ,'pig' => q\#!/usr/bin/python
 from org.apache.pig.scripting import Pig

Modified: pig/branches/spark/test/e2e/pig/tools/generate/generate_data.pl
URL: http://svn.apache.org/viewvc/pig/branches/spark/test/e2e/pig/tools/generate/generate_data.pl?rev=1784224&r1=1784223&r2=1784224&view=diff
==============================================================================
--- pig/branches/spark/test/e2e/pig/tools/generate/generate_data.pl (original)
+++ pig/branches/spark/test/e2e/pig/tools/generate/generate_data.pl Fri Feb 24 03:34:37 2017
@@ -41,6 +41,7 @@ our @lastName = ("allen", "brown", "cars
 #	rankaacd: RANK BY a ASC , c DESC
 #	rankaaba: RANK BY a ASC , b ASC
 #	a,b,c:    values
+#	tail:     long value in order to create multiple mappers
 ############################################################################
 our @rankedTuples = (
 	"1,21,5,7,1,1,0,8,8","2,26,2,3,2,5,1,9,10","3,30,24,21,2,3,1,3,10","4,6,10,8,3,4,1,7,2",
@@ -500,10 +501,22 @@ sub getBulkCopyCmd(){
             my $randf = rand(10);
             printf HDFS "%d:%d:%d:%d:%d:%dL:%.2ff:%.2f\n", $tid, $i, $rand5, $rand100, $rand1000, $rand1000, $randf, $randf;
         }
-    } elsif ($filetype eq "ranking") {
+    }  elsif ($filetype eq "ranking") {
         for (my $i = 0; $i < $numRows; $i++) {
             my $tuple = $rankedTuples[int($i)];
-            printf HDFS "$tuple\n";
+            printf HDFS "$tuple,";
+            for my $j ( 0 .. 1000000) {
+				printf HDFS "%d",$j;
+			}
+			printf HDFS "\n";
+        }
+    } elsif ($filetype eq "biggish") {
+        for (my $i = 1; $i < $numRows; $i++) {
+            printf HDFS "$i,$i,";
+            for my $j ( 0 .. 1000) {
+				printf HDFS "%d",$j;
+            }
+            printf HDFS "\n";
         }
     } elsif ($filetype eq "utf8Student") {
         srand(3.14159 + $numRows);

Modified: pig/branches/spark/test/org/apache/pig/TestLoadStoreFuncLifeCycle.java
URL: http://svn.apache.org/viewvc/pig/branches/spark/test/org/apache/pig/TestLoadStoreFuncLifeCycle.java?rev=1784224&r1=1784223&r2=1784224&view=diff
==============================================================================
--- pig/branches/spark/test/org/apache/pig/TestLoadStoreFuncLifeCycle.java (original)
+++ pig/branches/spark/test/org/apache/pig/TestLoadStoreFuncLifeCycle.java Fri Feb 24 03:34:37 2017
@@ -360,7 +360,7 @@ public class TestLoadStoreFuncLifeCycle
             // result, the number of StoreFunc instances is greater by 1 in
             // Hadoop-2.0.x.
             assertTrue("storer instanciation count increasing: " + Storer.count,
-                    Storer.count <= 5);
+                    Storer.count <= (org.apache.pig.impl.util.Utils.isHadoop2() ? 5 : 4));
         }
     }
 

Modified: pig/branches/spark/test/org/apache/pig/TestMain.java
URL: http://svn.apache.org/viewvc/pig/branches/spark/test/org/apache/pig/TestMain.java?rev=1784224&r1=1784223&r2=1784224&view=diff
==============================================================================
--- pig/branches/spark/test/org/apache/pig/TestMain.java (original)
+++ pig/branches/spark/test/org/apache/pig/TestMain.java Fri Feb 24 03:34:37 2017
@@ -24,10 +24,8 @@ import static org.junit.Assert.assertTru
 import static org.junit.Assert.fail;
 
 import java.io.BufferedWriter;
-import java.io.BufferedReader;
 import java.io.File;
 import java.io.FileWriter;
-import java.io.FileReader;
 import java.io.IOException;
 import java.util.Properties;
 
@@ -37,7 +35,6 @@ import org.apache.pig.impl.logicalLayer.
 import org.apache.pig.parser.ParserException;
 import org.apache.pig.parser.SourceLocation;
 import org.apache.pig.test.TestPigRunner.TestNotificationListener;
-import org.apache.pig.test.Util;
 import org.apache.pig.tools.parameters.ParameterSubstitutionException;
 import org.apache.pig.tools.pigstats.PigStats;
 import org.junit.Test;
@@ -155,35 +152,6 @@ public class TestMain {
     }
 
 
-    @Test
-    public void testParseInputScript() throws Exception {
-        File input = Util.createInputFile("tmp", "",
-                new String[]{"{(1,1.0)}\ttestinputstring1",
-                        "{(2,2.0)}\ttestinputstring1",
-                        "{(3,3.0)}\ttestinputstring1",
-                        "{(4,4.0)}\ttestinputstring1"}
-        );
-        File out = new File(System.getProperty("java.io.tmpdir")+"/testParseInputScriptOut");
-        File scriptFile = Util.createInputFile("pigScript", "",
-                new String[]{"A = load '"+input.getAbsolutePath()+"' as (a:{(x:chararray, y:float)}, b:chararray);",
-                        "B = foreach A generate\n" +
-                                "    b,\n" +
-                                "    (bag{tuple(long)}) a.x as ax:{(x:long)};",
-                        "store B into '"+out.getAbsolutePath()+"';"}
-        );
-
-        Main.run(new String[]{"-x", "local", scriptFile.getAbsolutePath()}, null);
-        BufferedReader file = new BufferedReader(new FileReader(new File(out.getAbsolutePath()+"/part-m-00000")));
-        String line;
-        int count = 0;
-        while(( line = file.readLine()) != null) {
-            count++;
-        }
-        assertEquals(4,count);
-        Util.deleteDirectory(new File(out.getAbsolutePath()));
-        assertTrue(!new File(out.getAbsolutePath()).exists());
-    }
-
     public static class TestNotificationListener2 extends TestNotificationListener {
         protected boolean hadArgs = false;
         public TestNotificationListener2() {}

Modified: pig/branches/spark/test/org/apache/pig/builtin/TestAvroStorage.java
URL: http://svn.apache.org/viewvc/pig/branches/spark/test/org/apache/pig/builtin/TestAvroStorage.java?rev=1784224&r1=1784223&r2=1784224&view=diff
==============================================================================
--- pig/branches/spark/test/org/apache/pig/builtin/TestAvroStorage.java (original)
+++ pig/branches/spark/test/org/apache/pig/builtin/TestAvroStorage.java Fri Feb 24 03:34:37 2017
@@ -709,19 +709,6 @@ public class TestAvroStorage {
     }
 
     @Test
-    public void testGroupWithRepeatedSubRecords() throws Exception {
-      final String input = basedir + "data/avro/uncompressed/recordWithRepeatedSubRecords.avro";
-      final String check = basedir + "data/avro/uncompressed/recordWithRepeatedSubRecords.avro";
-      testAvroStorage(true, basedir + "code/pig/group_test.pig",
-          ImmutableMap.of(
-              "INFILE",           input,
-              "AVROSTORAGE_OUT_2", "-f " + basedir + "schema/recordWithRepeatedSubRecords.avsc",
-              "OUTFILE",          createOutputName())
-        );
-      verifyResults(createOutputName(),check);
-    }
-
-    @Test
     public void testLoadDirectory() throws Exception {
       final String input = basedir + "data/avro/uncompressed/testdirectory";
       final String check = basedir + "data/avro/uncompressed/testDirectoryCounts.avro";

Modified: pig/branches/spark/test/org/apache/pig/builtin/TestOrcStorage.java
URL: http://svn.apache.org/viewvc/pig/branches/spark/test/org/apache/pig/builtin/TestOrcStorage.java?rev=1784224&r1=1784223&r2=1784224&view=diff
==============================================================================
--- pig/branches/spark/test/org/apache/pig/builtin/TestOrcStorage.java (original)
+++ pig/branches/spark/test/org/apache/pig/builtin/TestOrcStorage.java Fri Feb 24 03:34:37 2017
@@ -195,7 +195,7 @@ public class TestOrcStorage {
         Reader reader = OrcFile.createReader(fs, Util.getFirstPartFile(new Path(OUTPUT1)));
         assertEquals(reader.getNumberOfRows(), 2);
 
-        RecordReader rows = reader.rows();
+        RecordReader rows = reader.rows(null);
         Object row = rows.next(null);
         StructObjectInspector soi = (StructObjectInspector)reader.getObjectInspector();
         IntWritable intWritable = (IntWritable)soi.getStructFieldData(row,
@@ -291,7 +291,7 @@ public class TestOrcStorage {
         ObjectInspector oi = orcReader.getObjectInspector();
         StructObjectInspector soi = (StructObjectInspector) oi;
 
-        RecordReader reader = orcReader.rows();
+        RecordReader reader = orcReader.rows(null);
         Object row = null;
 
         while (reader.hasNext()) {
@@ -326,9 +326,9 @@ public class TestOrcStorage {
         Reader orcReaderActual = OrcFile.createReader(fs, orcFile);
         StructObjectInspector soiActual = (StructObjectInspector) orcReaderActual.getObjectInspector();
 
-        RecordReader readerExpected = orcReaderExpected.rows();
+        RecordReader readerExpected = orcReaderExpected.rows(null);
         Object expectedRow = null;
-        RecordReader readerActual = orcReaderActual.rows();
+        RecordReader readerActual = orcReaderActual.rows(null);
         Object actualRow = null;
 
         while (readerExpected.hasNext()) {

Modified: pig/branches/spark/test/org/apache/pig/data/TestSchemaTuple.java
URL: http://svn.apache.org/viewvc/pig/branches/spark/test/org/apache/pig/data/TestSchemaTuple.java?rev=1784224&r1=1784223&r2=1784224&view=diff
==============================================================================
--- pig/branches/spark/test/org/apache/pig/data/TestSchemaTuple.java (original)
+++ pig/branches/spark/test/org/apache/pig/data/TestSchemaTuple.java Fri Feb 24 03:34:37 2017
@@ -17,9 +17,9 @@
  */
 package org.apache.pig.data;
 
+import static junit.framework.Assert.assertEquals;
 import static org.apache.pig.builtin.mock.Storage.resetData;
 import static org.apache.pig.builtin.mock.Storage.tuple;
-import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertNull;
@@ -599,33 +599,33 @@ public class TestSchemaTuple {
         Data data = resetData(pigServer);
 
         data.set("foo1",
-            tuple(0, 0),
-            tuple(1, 1),
-            tuple(2, 2),
-            tuple(3, 3),
-            tuple(4, 4),
-            tuple(5, 5),
-            tuple(6, 6),
-            tuple(7, 7),
-            tuple(8, 8),
-            tuple(9, 9)
+            tuple(0),
+            tuple(1),
+            tuple(2),
+            tuple(3),
+            tuple(4),
+            tuple(5),
+            tuple(6),
+            tuple(7),
+            tuple(8),
+            tuple(9)
             );
 
         data.set("foo2",
-            tuple(0, 0),
-            tuple(1, 1),
-            tuple(2, 2),
-            tuple(3, 3),
-            tuple(4, 4),
-            tuple(5, 5),
-            tuple(6, 6),
-            tuple(7, 7),
-            tuple(8, 8),
-            tuple(9, 9)
+            tuple(0),
+            tuple(1),
+            tuple(2),
+            tuple(3),
+            tuple(4),
+            tuple(5),
+            tuple(6),
+            tuple(7),
+            tuple(8),
+            tuple(9)
             );
 
-        pigServer.registerQuery("A = LOAD 'foo1' USING mock.Storage() as (x:int, y:int);");
-        pigServer.registerQuery("B = LOAD 'foo2' USING mock.Storage() as (x:int, y:int);");
+        pigServer.registerQuery("A = LOAD 'foo1' USING mock.Storage() as (x:int);");
+        pigServer.registerQuery("B = LOAD 'foo2' USING mock.Storage() as (x:int);");
         if (preSort) {
             pigServer.registerQuery("A = ORDER A BY x ASC;");
             pigServer.registerQuery("B = ORDER B BY x ASC;");
@@ -638,24 +638,20 @@ public class TestSchemaTuple {
             if (!out.hasNext()) {
                 throw new Exception("Output should have had more elements! Failed on element: " + i);
             }
-            assertEquals(tuple(i, i, i, i), out.next());
+            assertEquals(tuple(i, i), out.next());
         }
         assertFalse(out.hasNext());
 
-        pigServer.registerQuery("STORE D INTO 'bar1' USING mock.Storage();");
-        pigServer.registerQuery("E = JOIN A by (x, y),  B by (x, y) using '"+joinType+"';");
-        pigServer.registerQuery("F = ORDER E BY $0 ASC;");
-        pigServer.registerQuery("STORE F INTO 'bar2' USING mock.Storage();");
+        pigServer.registerQuery("STORE D INTO 'bar' USING mock.Storage();");
 
-        List<Tuple> bar1 = data.get("bar1");
-        List<Tuple> bar2 = data.get("bar2");
+        List<Tuple> tuples = data.get("bar");
 
-        assertEquals("Output does not have enough elements! List: " + bar1, 10, bar1.size());
-        assertEquals("Output does not have enough elements! List: " + bar2, 10, bar2.size());
+        if (tuples.size() != 10) {
+            throw new Exception("Output does not have enough elements! List: " + tuples);
+        }
 
         for (int i = 0; i < 10; i++) {
-            assertEquals(tuple(i, i, i, i), bar1.get(i));
-            assertEquals(tuple(i, i, i, i), bar2.get(i));
+            assertEquals(tuple(i, i), tuples.get(i));
         }
 
     }

Modified: pig/branches/spark/test/org/apache/pig/parser/TestQueryParser.java
URL: http://svn.apache.org/viewvc/pig/branches/spark/test/org/apache/pig/parser/TestQueryParser.java?rev=1784224&r1=1784223&r2=1784224&view=diff
==============================================================================
--- pig/branches/spark/test/org/apache/pig/parser/TestQueryParser.java (original)
+++ pig/branches/spark/test/org/apache/pig/parser/TestQueryParser.java Fri Feb 24 03:34:37 2017
@@ -652,14 +652,4 @@ public class TestQueryParser {
     public void testSplit2() throws Exception {
         shouldPass("SPLIT logs INTO logins IF command == 'login', all_quits IF command == 'quit';");
     }
-
-    @Test
-    public void testBigDecimalParsing() throws Exception {
-        shouldPass("B = FILTER A BY $1 < 1234567890.123456789BD;");
-    }
-
-    @Test
-    public void testBigIntegerParsing() throws Exception {
-        shouldPass("B = FILTER A BY $1 < 1234567890123456789BI;");
-    }
 }

Modified: pig/branches/spark/test/org/apache/pig/parser/TestQueryParserUtils.java
URL: http://svn.apache.org/viewvc/pig/branches/spark/test/org/apache/pig/parser/TestQueryParserUtils.java?rev=1784224&r1=1784223&r2=1784224&view=diff
==============================================================================
--- pig/branches/spark/test/org/apache/pig/parser/TestQueryParserUtils.java (original)
+++ pig/branches/spark/test/org/apache/pig/parser/TestQueryParserUtils.java Fri Feb 24 03:34:37 2017
@@ -19,20 +19,10 @@ package org.apache.pig.parser;
 
 import static org.junit.Assert.assertEquals;
 
-import java.io.IOException;
 import java.util.Properties;
 
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapreduce.InputFormat;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.RecordReader;
 import org.apache.pig.ExecType;
-import org.apache.pig.LoadFunc;
-import org.apache.pig.NonFSLoadFunc;
-import org.apache.pig.PigServer;
 import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MRConfiguration;
-import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigSplit;
-import org.apache.pig.data.Tuple;
 import org.apache.pig.impl.PigContext;
 import org.apache.pig.test.Util;
 import org.junit.Test;
@@ -82,76 +72,43 @@ public class TestQueryParserUtils {
         QueryParserUtils.setHdfsServers("hello://nn1/tmp", pc);
         assertEquals(null, props.getProperty(MRConfiguration.JOB_HDFS_SERVERS));
 
-        // webhdfs
-        props.remove(MRConfiguration.JOB_HDFS_SERVERS);
-        QueryParserUtils.setHdfsServers("webhdfs://nn1/tmp", pc);
-        assertEquals("webhdfs://nn1", props.getProperty(MRConfiguration.JOB_HDFS_SERVERS));
-        QueryParserUtils.setHdfsServers("webhdfs://nn1:50070/tmp", pc);
-        assertEquals("webhdfs://nn1,webhdfs://nn1:50070", props.getProperty(MRConfiguration.JOB_HDFS_SERVERS));
-
-        // har with webhfs
-        QueryParserUtils.setHdfsServers("har://webhdfs-nn1:50070/tmp", pc);
-        assertEquals("webhdfs://nn1,webhdfs://nn1:50070", props.getProperty(MRConfiguration.JOB_HDFS_SERVERS));
-        QueryParserUtils.setHdfsServers("har://webhdfs-nn2:50070/tmp", pc);
-        assertEquals("webhdfs://nn1,webhdfs://nn1:50070,webhdfs://nn2:50070", props.getProperty(MRConfiguration.JOB_HDFS_SERVERS));
-        props.remove(MRConfiguration.JOB_HDFS_SERVERS);
-        QueryParserUtils.setHdfsServers("har://webhdfs-nn1/tmp", pc);
-        assertEquals("webhdfs://nn1", props.getProperty(MRConfiguration.JOB_HDFS_SERVERS));
-
-        //viewfs
-        props.remove(MRConfiguration.JOB_HDFS_SERVERS);
-        QueryParserUtils.setHdfsServers("viewfs:/tmp", pc);
-        assertEquals("viewfs://", props.getProperty(MRConfiguration.JOB_HDFS_SERVERS));
-        QueryParserUtils.setHdfsServers("viewfs:///tmp", pc);
-        assertEquals("viewfs://", props.getProperty(MRConfiguration.JOB_HDFS_SERVERS));
-        QueryParserUtils.setHdfsServers("viewfs://cluster1/tmp", pc);
-        assertEquals("viewfs://,viewfs://cluster1", props.getProperty(MRConfiguration.JOB_HDFS_SERVERS));
-
-        //har with viewfs
-        props.remove(MRConfiguration.JOB_HDFS_SERVERS);
-        QueryParserUtils.setHdfsServers("har://viewfs/tmp", pc);
-        assertEquals("viewfs://", props.getProperty(MRConfiguration.JOB_HDFS_SERVERS));
-        QueryParserUtils.setHdfsServers("har://viewfs-cluster1/tmp", pc);
-        assertEquals("viewfs://,viewfs://cluster1", props.getProperty(MRConfiguration.JOB_HDFS_SERVERS));
-
-    }
-
+        if(org.apache.pig.impl.util.Utils.isHadoop23() || org.apache.pig.impl.util.Utils.isHadoop2()) {
+            // webhdfs
+            props.remove(MRConfiguration.JOB_HDFS_SERVERS);
+            QueryParserUtils.setHdfsServers("webhdfs://nn1/tmp", pc);
+            assertEquals("webhdfs://nn1", props.getProperty(MRConfiguration.JOB_HDFS_SERVERS));
+            QueryParserUtils.setHdfsServers("webhdfs://nn1:50070/tmp", pc);
+            assertEquals("webhdfs://nn1,webhdfs://nn1:50070", props.getProperty(MRConfiguration.JOB_HDFS_SERVERS));
+
+            // har with webhfs
+            QueryParserUtils.setHdfsServers("har://webhdfs-nn1:50070/tmp", pc);
+            assertEquals("webhdfs://nn1,webhdfs://nn1:50070", props.getProperty(MRConfiguration.JOB_HDFS_SERVERS));
+            QueryParserUtils.setHdfsServers("har://webhdfs-nn2:50070/tmp", pc);
+            assertEquals("webhdfs://nn1,webhdfs://nn1:50070,webhdfs://nn2:50070", props.getProperty(MRConfiguration.JOB_HDFS_SERVERS));
+            props.remove(MRConfiguration.JOB_HDFS_SERVERS);
+            QueryParserUtils.setHdfsServers("har://webhdfs-nn1/tmp", pc);
+            assertEquals("webhdfs://nn1", props.getProperty(MRConfiguration.JOB_HDFS_SERVERS));
+
+            //viewfs
+            props.remove(MRConfiguration.JOB_HDFS_SERVERS);
+            QueryParserUtils.setHdfsServers("viewfs:/tmp", pc);
+            assertEquals("viewfs://", props.getProperty(MRConfiguration.JOB_HDFS_SERVERS));
+            QueryParserUtils.setHdfsServers("viewfs:///tmp", pc);
+            assertEquals("viewfs://", props.getProperty(MRConfiguration.JOB_HDFS_SERVERS));
+            QueryParserUtils.setHdfsServers("viewfs://cluster1/tmp", pc);
+            assertEquals("viewfs://,viewfs://cluster1", props.getProperty(MRConfiguration.JOB_HDFS_SERVERS));
+
+            //har with viewfs
+            props.remove(MRConfiguration.JOB_HDFS_SERVERS);
+            QueryParserUtils.setHdfsServers("har://viewfs/tmp", pc);
+            assertEquals("viewfs://", props.getProperty(MRConfiguration.JOB_HDFS_SERVERS));
+            QueryParserUtils.setHdfsServers("har://viewfs-cluster1/tmp", pc);
+            assertEquals("viewfs://,viewfs://cluster1", props.getProperty(MRConfiguration.JOB_HDFS_SERVERS));
 
-    @Test
-    public void testNonFSLoadFunc() throws Exception {
-        PigServer pigServer = new PigServer(Util.getLocalTestMode(), new Properties());
-        pigServer.registerQuery("A =  load 'hbase://query/SELECT ID, NAME, DATE FROM HIRES WHERE DATE > TO_DATE(\"1990-12-21 05:55:00.000\")' using org.apache.pig.parser.TestQueryParserUtils$DummyNonFSLoader();");
-        pigServer.shutdown();
-    }
 
-    /**
-     * Test class for testNonFSLoadFuncNoSetHdfsServersCall test case
-     */
-    public static class DummyNonFSLoader extends LoadFunc implements NonFSLoadFunc {
-
-        @Override
-        public void setLocation(String location, Job job) throws IOException {
-            throw new RuntimeException("Should not be called");
         }
 
-        @Override
-        public InputFormat getInputFormat() throws IOException {
-            throw new RuntimeException("Should not be called");
-        }
-
-        @Override
-        public void prepareToRead(RecordReader reader, PigSplit split) throws IOException {
-            throw new RuntimeException("Should not be called");
-        }
 
-        @Override
-        public Tuple getNext() throws IOException {
-            throw new RuntimeException("Should not be called");
-        }
-
-        @Override
-        public String relativeToAbsolutePath(String location, Path curDir) throws IOException {
-            return location;
-        }
     }
+
 }

Modified: pig/branches/spark/test/org/apache/pig/test/TestBZip.java
URL: http://svn.apache.org/viewvc/pig/branches/spark/test/org/apache/pig/test/TestBZip.java?rev=1784224&r1=1784223&r2=1784224&view=diff
==============================================================================
--- pig/branches/spark/test/org/apache/pig/test/TestBZip.java (original)
+++ pig/branches/spark/test/org/apache/pig/test/TestBZip.java Fri Feb 24 03:34:37 2017
@@ -43,6 +43,7 @@ import org.apache.hadoop.mapreduce.Input
 import org.apache.pig.PigServer;
 import org.apache.pig.backend.hadoop.datastorage.ConfigurationUtil;
 import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MRConfiguration;
+import org.apache.pig.backend.hadoop.executionengine.shims.HadoopShims;
 import org.apache.pig.data.DataType;
 import org.apache.pig.data.Tuple;
 import org.apache.pig.impl.PigContext;
@@ -66,10 +67,16 @@ public class TestBZip {
 
     @Parameters(name = "pig.bzip.use.hadoop.inputformat = {0}.")
     public static Iterable<Object[]> data() {
-        return Arrays.asList(new Object[][] {
-            { false  },
-            { true   }
-        });
+        if ( HadoopShims.isHadoopYARN() ) {
+            return Arrays.asList(new Object[][] {
+                { false  },
+                { true   }
+            });
+        } else {
+            return Arrays.asList(new Object[][] {
+                { false }
+            });
+        }
     }
 
     public TestBZip (Boolean useBzipFromHadoop) {

Modified: pig/branches/spark/test/org/apache/pig/test/TestBuiltin.java
URL: http://svn.apache.org/viewvc/pig/branches/spark/test/org/apache/pig/test/TestBuiltin.java?rev=1784224&r1=1784223&r2=1784224&view=diff
==============================================================================
--- pig/branches/spark/test/org/apache/pig/test/TestBuiltin.java (original)
+++ pig/branches/spark/test/org/apache/pig/test/TestBuiltin.java Fri Feb 24 03:34:37 2017
@@ -130,7 +130,6 @@ import org.apache.pig.data.DefaultBagFac
 import org.apache.pig.data.Tuple;
 import org.apache.pig.data.TupleFactory;
 import org.apache.pig.impl.PigContext;
-import org.apache.pig.impl.io.FileLocalizer;
 import org.apache.pig.impl.io.ReadToEndLoader;
 import org.apache.pig.impl.logicalLayer.FrontendException;
 import org.apache.pig.impl.logicalLayer.schema.Schema;
@@ -3207,31 +3206,29 @@ public class TestBuiltin {
     @Test
     public void testUniqueID() throws Exception {
         Util.resetStateForExecModeSwitch();
+        String inputFileName = "testUniqueID.txt";
+        Util.createInputFile(cluster, inputFileName, new String[]
+            {"1\n2\n3\n4\n5\n1\n2\n3\n4\n5\n"});
         Properties copyproperties = new Properties();
         copyproperties.putAll(cluster.getProperties());
         PigServer pigServer = new PigServer(cluster.getExecType(), copyproperties);
-
-        // running with 2 mappers each taking 5 records
-        String TMP_DIR = FileLocalizer.getTemporaryPath(pigServer.getPigContext()).toUri().getPath();
-        Util.createInputFile(cluster, TMP_DIR + "/input1.txt", new String[] {"1\n2\n3\n4\n5"});
-        Util.createInputFile(cluster, TMP_DIR + "/input2.txt", new String[] {"1\n2\n3\n4\n5"});
+        pigServer.getPigContext().getProperties().setProperty("mapred.max.split.size", "10");
         pigServer.getPigContext().getProperties().setProperty("pig.noSplitCombination", "true");
-
-        pigServer.registerQuery("A = load '" + TMP_DIR + "' as (name);");
+        pigServer.registerQuery("A = load '" + inputFileName + "' as (name);");
         pigServer.registerQuery("B = foreach A generate name, UniqueID();");
         Iterator<Tuple> iter = pigServer.openIterator("B");
         if (!Util.isSparkExecType(cluster.getExecType())) {
-            assertEquals("0-0", iter.next().get(1));
-            assertEquals("0-1", iter.next().get(1));
-            assertEquals("0-2", iter.next().get(1));
-            assertEquals("0-3", iter.next().get(1));
-            assertEquals("0-4", iter.next().get(1));
-            assertEquals("1-0", iter.next().get(1));
-            assertEquals("1-1", iter.next().get(1));
-            assertEquals("1-2", iter.next().get(1));
-            assertEquals("1-3", iter.next().get(1));
-            assertEquals("1-4", iter.next().get(1));
-        } else {
+            assertEquals(iter.next().get(1), "0-0");
+            assertEquals(iter.next().get(1), "0-1");
+            assertEquals(iter.next().get(1), "0-2");
+            assertEquals(iter.next().get(1), "0-3");
+            assertEquals(iter.next().get(1), "0-4");
+            assertEquals(iter.next().get(1), "1-0");
+            assertEquals(iter.next().get(1), "1-1");
+            assertEquals(iter.next().get(1), "1-2");
+            assertEquals(iter.next().get(1), "1-3");
+            assertEquals(iter.next().get(1), "1-4");
+        } else{
             //there will be 2 InputSplits when mapred.max.split.size is 10(byte) for the testUniqueID.txt(20 bytes)
             //Split0:
             //            1\n
@@ -3247,35 +3244,34 @@ public class TestBuiltin {
             //            5\n
             //The size of Split0 is 12 not 10 because LineRecordReader#nextKeyValue will read one more line
             //More detail see PIG-4383
-            assertEquals("0-0", iter.next().get(1));
-            assertEquals("0-1", iter.next().get(1));
-            assertEquals("0-2", iter.next().get(1));
-            assertEquals("0-3", iter.next().get(1));
-            assertEquals("0-4", iter.next().get(1));
-            assertEquals("0-5", iter.next().get(1));
-            assertEquals("1-0", iter.next().get(1));
-            assertEquals("1-1", iter.next().get(1));
-            assertEquals("1-2", iter.next().get(1));
-            assertEquals("1-3", iter.next().get(1));
+            assertEquals(iter.next().get(1), "0-0");
+            assertEquals(iter.next().get(1), "0-1");
+            assertEquals(iter.next().get(1), "0-2");
+            assertEquals(iter.next().get(1), "0-3");
+            assertEquals(iter.next().get(1), "0-4");
+            assertEquals(iter.next().get(1), "0-5");
+            assertEquals(iter.next().get(1), "1-0");
+            assertEquals(iter.next().get(1), "1-1");
+            assertEquals(iter.next().get(1), "1-2");
+            assertEquals(iter.next().get(1), "1-3");
         }
-        Util.deleteFile(cluster, TMP_DIR + "/input1.txt");
-        Util.deleteFile(cluster, TMP_DIR + "/input2.txt");
+        Util.deleteFile(cluster, inputFileName);
     }
 
     @Test
     public void testRANDOMWithJob() throws Exception {
         Util.resetStateForExecModeSwitch();
+        String inputFileName = "testRANDOM.txt";
+        Util.createInputFile(cluster, inputFileName, new String[]
+            {"1\n2\n3\n4\n5\n1\n2\n3\n4\n5\n"});
+
         Properties copyproperties = new Properties();
         copyproperties.putAll(cluster.getProperties());
         PigServer pigServer = new PigServer(cluster.getExecType(), copyproperties);
-
-        // running with 2 mappers each taking 5 records
-        String TMP_DIR = FileLocalizer.getTemporaryPath(pigServer.getPigContext()).toUri().getPath();
-        Util.createInputFile(cluster, TMP_DIR + "/input1.txt", new String[] {"1\n2\n3\n4\n5"});
-        Util.createInputFile(cluster, TMP_DIR + "/input2.txt", new String[] {"1\n2\n3\n4\n5"});
+        // running with two mappers
+        pigServer.getPigContext().getProperties().setProperty("mapred.max.split.size", "10");
         pigServer.getPigContext().getProperties().setProperty("pig.noSplitCombination", "true");
-
-        pigServer.registerQuery("A = load '" + TMP_DIR + "' as (name);");
+        pigServer.registerQuery("A = load '" + inputFileName + "' as (name);");
         pigServer.registerQuery("B = foreach A generate name, RANDOM();");
         Iterator<Tuple> iter = pigServer.openIterator("B");
         double [] mapper1 = new double[5];
@@ -3298,8 +3294,7 @@ public class TestBuiltin {
         for( int i = 0; i < 5; i++ ){
             assertNotEquals(mapper1[i], mapper2[i], 0.0001);
         }
-        Util.deleteFile(cluster, TMP_DIR + "/input1.txt");
-        Util.deleteFile(cluster, TMP_DIR + "/input2.txt");
+        Util.deleteFile(cluster, inputFileName);
     }