You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@pig.apache.org by da...@apache.org on 2012/03/15 00:18:11 UTC
svn commit: r1300781 - in /pig/branches/branch-0.9: CHANGES.txt
test/e2e/pig/deployers/ExistingClusterDeployer.pm
Author: daijy
Date: Wed Mar 14 23:18:11 2012
New Revision: 1300781
URL: http://svn.apache.org/viewvc?rev=1300781&view=rev
Log:
PIG-2588: e2e harness: use pig command for cluster deploy
Modified:
pig/branches/branch-0.9/CHANGES.txt
pig/branches/branch-0.9/test/e2e/pig/deployers/ExistingClusterDeployer.pm
Modified: pig/branches/branch-0.9/CHANGES.txt
URL: http://svn.apache.org/viewvc/pig/branches/branch-0.9/CHANGES.txt?rev=1300781&r1=1300780&r2=1300781&view=diff
==============================================================================
--- pig/branches/branch-0.9/CHANGES.txt (original)
+++ pig/branches/branch-0.9/CHANGES.txt Wed Mar 14 23:18:11 2012
@@ -22,6 +22,8 @@ Release 0.9.3 - Unreleased
BUG FIXES
+PIG-2588: e2e harness: use pig command for cluster deploy (thw via daijy)
+
PIG-2576: Change in behavior for UDFContext.getUDFContext().getJobConf() in front-end (thw via daijy)
PIG-2572: e2e harness deploy fails when using pig that does not bundle hadoop (thw via daijy)
Modified: pig/branches/branch-0.9/test/e2e/pig/deployers/ExistingClusterDeployer.pm
URL: http://svn.apache.org/viewvc/pig/branches/branch-0.9/test/e2e/pig/deployers/ExistingClusterDeployer.pm?rev=1300781&r1=1300780&r2=1300781&view=diff
==============================================================================
--- pig/branches/branch-0.9/test/e2e/pig/deployers/ExistingClusterDeployer.pm (original)
+++ pig/branches/branch-0.9/test/e2e/pig/deployers/ExistingClusterDeployer.pm Wed Mar 14 23:18:11 2012
@@ -80,7 +80,7 @@ sub checkPrerequisites
}
# Run a quick and easy Hadoop command to make sure we can
- $self->runHadoopCmd($cfg, $log, "fs -ls /");
+ $self->runPigCmd($cfg, $log, "fs -ls /");
}
@@ -217,7 +217,7 @@ sub generateData
);
# Create the HDFS directories
- $self->runHadoopCmd($cfg, $log, "fs -mkdir $cfg->{'inpathbase'}");
+ $self->runPigCmd($cfg, $log, "fs -mkdir $cfg->{'inpathbase'}");
foreach my $table (@tables) {
print "Generating data for $table->{'name'}\n";
@@ -227,9 +227,9 @@ sub generateData
$self->runCmd($log, \@cmd);
# Copy the data to HDFS
- my $hadoop = "fs -copyFromLocal $table->{'name'} ".
+ my $hadoop = "copyFromLocal $table->{'name'} ".
"$cfg->{'inpathbase'}/$table->{'hdfs'}";
- $self->runHadoopCmd($cfg, $log, $hadoop);
+ $self->runPigCmd($cfg, $log, $hadoop);
}
}
@@ -323,17 +323,18 @@ sub confirmUndeployment
# it can use the existing utilities to build Pig commands and switch
# naturally to local mode with everything else.
-sub runHadoopCmd($$$$)
+sub runPigCmd($$$$)
{
my ($self, $cfg, $log, $c) = @_;
+ my @pigCmd = ("$cfg->{'pigpath'}/bin/pig");
+ push(@pigCmd, '-e');
+ push(@pigCmd, split(' ', $c));
+
# set the PIG_CLASSPATH environment variable
- $ENV{'HADOOP_CLASSPATH'} = "$cfg->{'hadoopconfdir'}";
+ $ENV{'PIG_CLASSPATH'} = "$cfg->{'hadoopconfdir'}";
- my @cmd = ("$cfg->{'hadoopbin'}");
- push(@cmd, split(' ', $c));
-
- $self->runCmd($log, \@cmd);
+ $self->runCmd($log, \@pigCmd);
}
sub runCmd($$$)