You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@pig.apache.org by ol...@apache.org on 2009/08/05 01:40:08 UTC
svn commit: r801025 - in /hadoop/pig/branches/branch-0.3: ./ bin/
src/org/apache/pig/backend/hadoop/datastorage/
src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/
src/org/apache/pig/impl/io/ src/org/apache/pig/tools/pigstats/ test/org/a...
Author: olga
Date: Tue Aug 4 23:40:08 2009
New Revision: 801025
URL: http://svn.apache.org/viewvc?rev=801025&view=rev
Log:
reverted unintentional commit
Modified:
hadoop/pig/branches/branch-0.3/bin/pig
hadoop/pig/branches/branch-0.3/build.xml
hadoop/pig/branches/branch-0.3/src/org/apache/pig/backend/hadoop/datastorage/HConfiguration.java
hadoop/pig/branches/branch-0.3/src/org/apache/pig/backend/hadoop/datastorage/HDataStorage.java
hadoop/pig/branches/branch-0.3/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/MRCompiler.java
hadoop/pig/branches/branch-0.3/src/org/apache/pig/impl/io/NullableBytesWritable.java
hadoop/pig/branches/branch-0.3/src/org/apache/pig/tools/pigstats/PigStats.java
hadoop/pig/branches/branch-0.3/test/org/apache/pig/test/MiniCluster.java
Modified: hadoop/pig/branches/branch-0.3/bin/pig
URL: http://svn.apache.org/viewvc/hadoop/pig/branches/branch-0.3/bin/pig?rev=801025&r1=801024&r2=801025&view=diff
==============================================================================
--- hadoop/pig/branches/branch-0.3/bin/pig (original)
+++ hadoop/pig/branches/branch-0.3/bin/pig Tue Aug 4 23:40:08 2009
@@ -34,7 +34,7 @@
#
# PIG_ROOT_LOGGER The root appender. Default is INFO,console
#
-# PIG_HADOOP_VERSION Version of hadoop to run with. Default is 18 (0.18).
+# PIG_HADOOP_VERSION Version of hadoop to run with. Default is 20 (0.20).
cygwin=false
case "`uname`" in
@@ -136,7 +136,7 @@
done
# Set the version for Hadoop, default to 17
-PIG_HADOOP_VERSION="${PIG_HADOOP_VERSION:-18}"
+PIG_HADOOP_VERSION="${PIG_HADOOP_VERSION:-20}"
# add libs to CLASSPATH. There can be more than one version of the hadoop
# libraries in the lib dir, so don't blindly add them all. Only add the one
# that matche PIG_HADOOP_VERSION.
Modified: hadoop/pig/branches/branch-0.3/build.xml
URL: http://svn.apache.org/viewvc/hadoop/pig/branches/branch-0.3/build.xml?rev=801025&r1=801024&r2=801025&view=diff
==============================================================================
--- hadoop/pig/branches/branch-0.3/build.xml (original)
+++ hadoop/pig/branches/branch-0.3/build.xml Tue Aug 4 23:40:08 2009
@@ -47,14 +47,14 @@
<!-- property name="build.encoding" value="ISO-8859-1" / -->
<property name="build.encoding" value="UTF8" />
<!-- TODO with only one version of hadoop in the lib folder we do not need that anymore -->
- <property name="hadoop.jarfile" value="hadoop18.jar" />
+ <property name="hadoop.jarfile" value="hadoop20.jar" />
<property name="hbase.jarfile" value="hbase-0.18.1.jar" />
<property name="hbase.test.jarfile" value="hbase-0.18.1-test.jar" />
<!-- javac properties -->
<property name="javac.debug" value="on" />
<property name="javac.optimize" value="on" />
- <property name="javac.deprecation" value="on" />
+ <property name="javac.deprecation" value="off" />
<property name="javac.version" value="1.5" />
<property name="javac.args" value="" />
<!-- default warnings option -->
@@ -446,6 +446,7 @@
<!-- Excluded under Windows.-->
<exclude name="**/TestHBaseStorage.java" if="isWindows" />
<!-- Excluced because we don't want to run them -->
+ <exclude name="**/TestHBaseStorage.java" />
<exclude name="**/PigExecTestCase.java" />
<exclude name="**/TypeCheckingTestUtil.java" />
<exclude name="**/TypeGraphPrinter.java" />
Modified: hadoop/pig/branches/branch-0.3/src/org/apache/pig/backend/hadoop/datastorage/HConfiguration.java
URL: http://svn.apache.org/viewvc/hadoop/pig/branches/branch-0.3/src/org/apache/pig/backend/hadoop/datastorage/HConfiguration.java?rev=801025&r1=801024&r2=801025&view=diff
==============================================================================
--- hadoop/pig/branches/branch-0.3/src/org/apache/pig/backend/hadoop/datastorage/HConfiguration.java (original)
+++ hadoop/pig/branches/branch-0.3/src/org/apache/pig/backend/hadoop/datastorage/HConfiguration.java Tue Aug 4 23:40:08 2009
@@ -23,7 +23,6 @@
import java.util.Enumeration;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.mapred.JobConf;
import java.util.Properties;
Modified: hadoop/pig/branches/branch-0.3/src/org/apache/pig/backend/hadoop/datastorage/HDataStorage.java
URL: http://svn.apache.org/viewvc/hadoop/pig/branches/branch-0.3/src/org/apache/pig/backend/hadoop/datastorage/HDataStorage.java?rev=801025&r1=801024&r2=801025&view=diff
==============================================================================
--- hadoop/pig/branches/branch-0.3/src/org/apache/pig/backend/hadoop/datastorage/HDataStorage.java (original)
+++ hadoop/pig/branches/branch-0.3/src/org/apache/pig/backend/hadoop/datastorage/HDataStorage.java Tue Aug 4 23:40:08 2009
@@ -30,7 +30,7 @@
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.dfs.DistributedFileSystem;
+import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.conf.Configuration;
import org.apache.pig.PigException;
import org.apache.pig.backend.datastorage.ContainerDescriptor;
Modified: hadoop/pig/branches/branch-0.3/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/MRCompiler.java
URL: http://svn.apache.org/viewvc/hadoop/pig/branches/branch-0.3/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/MRCompiler.java?rev=801025&r1=801024&r2=801025&view=diff
==============================================================================
--- hadoop/pig/branches/branch-0.3/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/MRCompiler.java (original)
+++ hadoop/pig/branches/branch-0.3/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/MRCompiler.java Tue Aug 4 23:40:08 2009
@@ -30,6 +30,7 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.mapred.JobConf;
import org.apache.pig.FuncSpec;
import org.apache.pig.PigException;
import org.apache.pig.PigWarning;
@@ -1323,10 +1324,10 @@
ExecutionEngine eng = pigContext.getExecutionEngine();
if(eng instanceof HExecutionEngine){
try {
- val = Math.round(0.9f * ((HExecutionEngine)eng).getJobClient().getDefaultReduces());
+ val = ((JobConf)((HExecutionEngine)eng).getJobClient().getConf()).getNumReduceTasks();
if(val<=0)
val = 1;
- } catch (IOException e) {
+ } catch (Exception e) {
int errCode = 6015;
String msg = "Problem getting the default number of reduces from the Job Client.";
throw new MRCompilerException(msg, errCode, PigException.REMOTE_ENVIRONMENT, e);
Modified: hadoop/pig/branches/branch-0.3/src/org/apache/pig/impl/io/NullableBytesWritable.java
URL: http://svn.apache.org/viewvc/hadoop/pig/branches/branch-0.3/src/org/apache/pig/impl/io/NullableBytesWritable.java?rev=801025&r1=801024&r2=801025&view=diff
==============================================================================
--- hadoop/pig/branches/branch-0.3/src/org/apache/pig/impl/io/NullableBytesWritable.java (original)
+++ hadoop/pig/branches/branch-0.3/src/org/apache/pig/impl/io/NullableBytesWritable.java Tue Aug 4 23:40:08 2009
@@ -39,6 +39,6 @@
public Object getValueAsPigType() {
BytesWritable bw = (BytesWritable)mValue;
- return isNull() ? null : new DataByteArray(bw.get(), 0, bw.getSize());
+ return isNull() ? null : new DataByteArray(bw.getBytes(), 0, bw.getLength());
}
}
Modified: hadoop/pig/branches/branch-0.3/src/org/apache/pig/tools/pigstats/PigStats.java
URL: http://svn.apache.org/viewvc/hadoop/pig/branches/branch-0.3/src/org/apache/pig/tools/pigstats/PigStats.java?rev=801025&r1=801024&r2=801025&view=diff
==============================================================================
--- hadoop/pig/branches/branch-0.3/src/org/apache/pig/tools/pigstats/PigStats.java (original)
+++ hadoop/pig/branches/branch-0.3/src/org/apache/pig/tools/pigstats/PigStats.java Tue Aug 4 23:40:08 2009
@@ -148,13 +148,14 @@
try {
counters = rj.getCounters();
Counters.Group taskgroup = counters.getGroup("org.apache.hadoop.mapred.Task$Counter");
- Counters.Group hdfsgroup = counters.getGroup("org.apache.hadoop.mapred.Task$FileSystemCounter");
+ Counters.Group hdfsgroup = counters.getGroup("FileSystemCounters");
jobStats.put("PIG_STATS_MAP_INPUT_RECORDS", (Long.valueOf(taskgroup.getCounterForName("MAP_INPUT_RECORDS").getCounter())).toString());
jobStats.put("PIG_STATS_MAP_OUTPUT_RECORDS", (Long.valueOf(taskgroup.getCounterForName("MAP_OUTPUT_RECORDS").getCounter())).toString());
jobStats.put("PIG_STATS_REDUCE_INPUT_RECORDS", (Long.valueOf(taskgroup.getCounterForName("REDUCE_INPUT_RECORDS").getCounter())).toString());
jobStats.put("PIG_STATS_REDUCE_OUTPUT_RECORDS", (Long.valueOf(taskgroup.getCounterForName("REDUCE_OUTPUT_RECORDS").getCounter())).toString());
- jobStats.put("PIG_STATS_BYTES_WRITTEN", (Long.valueOf(hdfsgroup.getCounterForName("HDFS_WRITE").getCounter())).toString());
+ jobStats.put("PIG_STATS_BYTES_WRITTEN", (Long.valueOf(hdfsgroup.getCounterForName("HDFS_BYTES_WRITTEN").getCounter())).toString());
+
} catch (IOException e) {
// TODO Auto-generated catch block
String error = "Unable to get the counters.";
Modified: hadoop/pig/branches/branch-0.3/test/org/apache/pig/test/MiniCluster.java
URL: http://svn.apache.org/viewvc/hadoop/pig/branches/branch-0.3/test/org/apache/pig/test/MiniCluster.java?rev=801025&r1=801024&r2=801025&view=diff
==============================================================================
--- hadoop/pig/branches/branch-0.3/test/org/apache/pig/test/MiniCluster.java (original)
+++ hadoop/pig/branches/branch-0.3/test/org/apache/pig/test/MiniCluster.java Tue Aug 4 23:40:08 2009
@@ -20,7 +20,7 @@
import java.io.*;
import java.util.Properties;
-import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.dfs.MiniDFSCluster;
import org.apache.hadoop.mapred.MiniMRCluster;
import org.apache.hadoop.conf.Configuration;
@@ -68,7 +68,7 @@
m_conf.setInt("mapred.submit.replication", 2);
m_conf.set("dfs.datanode.address", "0.0.0.0:0");
m_conf.set("dfs.datanode.http.address", "0.0.0.0:0");
- m_conf.writeXml(new FileOutputStream(conf_file));
+ m_conf.write(new FileOutputStream(conf_file));
// Set the system properties needed by Pig
System.setProperty("cluster", m_conf.get("mapred.job.tracker"));