You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@pig.apache.org by da...@apache.org on 2012/03/15 00:19:32 UTC

svn commit: r1300782 - in /pig/branches/branch-0.10: CHANGES.txt test/e2e/pig/deployers/ExistingClusterDeployer.pm

Author: daijy
Date: Wed Mar 14 23:19:31 2012
New Revision: 1300782

URL: http://svn.apache.org/viewvc?rev=1300782&view=rev
Log:
PIG-2588: e2e harness: use pig command for cluster deploy

Modified:
    pig/branches/branch-0.10/CHANGES.txt
    pig/branches/branch-0.10/test/e2e/pig/deployers/ExistingClusterDeployer.pm

Modified: pig/branches/branch-0.10/CHANGES.txt
URL: http://svn.apache.org/viewvc/pig/branches/branch-0.10/CHANGES.txt?rev=1300782&r1=1300781&r2=1300782&view=diff
==============================================================================
--- pig/branches/branch-0.10/CHANGES.txt (original)
+++ pig/branches/branch-0.10/CHANGES.txt Wed Mar 14 23:19:31 2012
@@ -170,6 +170,8 @@ PIG-2228: support partial aggregation in
 
 BUG FIXES
 
+PIG-2588: e2e harness: use pig command for cluster deploy (thw via daijy)
+
 PIG-2565: Support IMPORT for macros stored in S3 Buckets (daijy)
 
 PIG-2570: LimitOptimizer fails with dynamic LIMIT argument (daijy)

Modified: pig/branches/branch-0.10/test/e2e/pig/deployers/ExistingClusterDeployer.pm
URL: http://svn.apache.org/viewvc/pig/branches/branch-0.10/test/e2e/pig/deployers/ExistingClusterDeployer.pm?rev=1300782&r1=1300781&r2=1300782&view=diff
==============================================================================
--- pig/branches/branch-0.10/test/e2e/pig/deployers/ExistingClusterDeployer.pm (original)
+++ pig/branches/branch-0.10/test/e2e/pig/deployers/ExistingClusterDeployer.pm Wed Mar 14 23:19:31 2012
@@ -80,7 +80,7 @@ sub checkPrerequisites
     }
 
     # Run a quick and easy Hadoop command to make sure we can
-    $self->runHadoopCmd($cfg, $log, "fs -ls /");
+    $self->runPigCmd($cfg, $log, "fs -ls /");
 
 }
 
@@ -217,7 +217,7 @@ sub generateData
     );
 
 	# Create the HDFS directories
-	$self->runHadoopCmd($cfg, $log, "fs -mkdir $cfg->{'inpathbase'}");
+	$self->runPigCmd($cfg, $log, "fs -mkdir $cfg->{'inpathbase'}");
 
     foreach my $table (@tables) {
 		print "Generating data for $table->{'name'}\n";
@@ -227,9 +227,9 @@ sub generateData
 		$self->runCmd($log, \@cmd);
 
 		# Copy the data to HDFS
-		my $hadoop = "fs -copyFromLocal $table->{'name'} ".
+		my $hadoop = "copyFromLocal $table->{'name'} ".
 			"$cfg->{'inpathbase'}/$table->{'hdfs'}";
-		$self->runHadoopCmd($cfg, $log, $hadoop);
+		$self->runPigCmd($cfg, $log, $hadoop);
 
     }
 }
@@ -323,17 +323,18 @@ sub confirmUndeployment
 # it can use the existing utilities to build Pig commands and switch
 # naturally to local mode with everything else.
 
-sub runHadoopCmd($$$$)
+sub runPigCmd($$$$)
 {
     my ($self, $cfg, $log, $c) = @_;
 
+    my @pigCmd = ("$cfg->{'pigpath'}/bin/pig");
+    push(@pigCmd, '-e');
+    push(@pigCmd, split(' ', $c));
+
     # set the PIG_CLASSPATH environment variable
-    $ENV{'HADOOP_CLASSPATH'} = "$cfg->{'hadoopconfdir'}";
+    $ENV{'PIG_CLASSPATH'} = "$cfg->{'hadoopconfdir'}";
                           
-    my @cmd = ("$cfg->{'hadoopbin'}");
-    push(@cmd, split(' ', $c));
-
-    $self->runCmd($log, \@cmd);
+    $self->runCmd($log, \@pigCmd);
 }
 
 sub runCmd($$$)