You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@pig.apache.org by da...@apache.org on 2013/03/18 21:57:00 UTC
svn commit: r1457991 - in /pig/trunk: CHANGES.txt ivy.xml
ivy/libraries.properties
shims/test/hadoop23/org/apache/pig/test/MiniCluster.java
test/org/apache/pig/test/TestParser.java
test/org/apache/pig/test/TestPigSplit.java
Author: daijy
Date: Mon Mar 18 20:57:00 2013
New Revision: 1457991
URL: http://svn.apache.org/r1457991
Log:
PIG-3248: Upgrade hadoop-2.0.0-alpha to hadoop-2.0.3-alpha
Modified:
pig/trunk/CHANGES.txt
pig/trunk/ivy.xml
pig/trunk/ivy/libraries.properties
pig/trunk/shims/test/hadoop23/org/apache/pig/test/MiniCluster.java
pig/trunk/test/org/apache/pig/test/TestParser.java
pig/trunk/test/org/apache/pig/test/TestPigSplit.java
Modified: pig/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/pig/trunk/CHANGES.txt?rev=1457991&r1=1457990&r2=1457991&view=diff
==============================================================================
--- pig/trunk/CHANGES.txt (original)
+++ pig/trunk/CHANGES.txt Mon Mar 18 20:57:00 2013
@@ -28,6 +28,8 @@ PIG-3174: Remove rpm and deb artifacts
IMPROVEMENTS
+PIG-3248: Upgrade hadoop-2.0.0-alpha to hadoop-2.0.3-alpha (daijy)
+
PIG-3235: Add log4j.properties for unit tests (cheolsoo)
PIG-3236: parametrize snapshot and staging repo id (gkesavan via daijy)
Modified: pig/trunk/ivy.xml
URL: http://svn.apache.org/viewvc/pig/trunk/ivy.xml?rev=1457991&r1=1457990&r2=1457991&view=diff
==============================================================================
--- pig/trunk/ivy.xml (original)
+++ pig/trunk/ivy.xml Mon Mar 18 20:57:00 2013
@@ -160,6 +160,8 @@
rev="${hadoop-mapreduce.version}" conf="hadoop23->master"/>
<dependency org="org.apache.hadoop" name="hadoop-yarn-server-resourcemanager"
rev="${hadoop-mapreduce.version}" conf="hadoop23->master"/>
+ <dependency org="org.apache.hadoop" name="hadoop-yarn-client"
+ rev="${hadoop-mapreduce.version}" conf="hadoop23->master"/>
<dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-hs"
rev="${hadoop-mapreduce.version}" conf="hadoop23->master"/>
<dependency org="org.mortbay.jetty" name="jetty" rev="${jetty.version}"
Modified: pig/trunk/ivy/libraries.properties
URL: http://svn.apache.org/viewvc/pig/trunk/ivy/libraries.properties?rev=1457991&r1=1457990&r2=1457991&view=diff
==============================================================================
--- pig/trunk/ivy/libraries.properties (original)
+++ pig/trunk/ivy/libraries.properties Mon Mar 18 20:57:00 2013
@@ -38,9 +38,9 @@ guava.version=11.0
jersey-core.version=1.8
hadoop-core.version=1.0.0
hadoop-test.version=1.0.0
-hadoop-common.version=2.0.0-alpha
-hadoop-hdfs.version=2.0.0-alpha
-hadoop-mapreduce.version=2.0.0-alpha
+hadoop-common.version=2.0.3-alpha
+hadoop-hdfs.version=2.0.3-alpha
+hadoop-mapreduce.version=2.0.3-alpha
hbase.version=0.94.1
hsqldb.version=1.8.0.10
hive.version=0.8.0
Modified: pig/trunk/shims/test/hadoop23/org/apache/pig/test/MiniCluster.java
URL: http://svn.apache.org/viewvc/pig/trunk/shims/test/hadoop23/org/apache/pig/test/MiniCluster.java?rev=1457991&r1=1457990&r2=1457991&view=diff
==============================================================================
--- pig/trunk/shims/test/hadoop23/org/apache/pig/test/MiniCluster.java (original)
+++ pig/trunk/shims/test/hadoop23/org/apache/pig/test/MiniCluster.java Mon Mar 18 20:57:00 2013
@@ -69,6 +69,8 @@ public class MiniCluster extends MiniGen
// Builds and starts the mini dfs and mapreduce clusters
Configuration config = new Configuration();
+ config.set("yarn.scheduler.capacity.root.queues", "default");
+ config.set("yarn.scheduler.capacity.root.default.capacity", "100");
m_dfs = new MiniDFSCluster(config, dataNodes, true, null);
m_fileSys = m_dfs.getFileSystem();
m_dfs_conf = m_dfs.getConfiguration(0);
Modified: pig/trunk/test/org/apache/pig/test/TestParser.java
URL: http://svn.apache.org/viewvc/pig/trunk/test/org/apache/pig/test/TestParser.java?rev=1457991&r1=1457990&r2=1457991&view=diff
==============================================================================
--- pig/trunk/test/org/apache/pig/test/TestParser.java (original)
+++ pig/trunk/test/org/apache/pig/test/TestParser.java Mon Mar 18 20:57:00 2013
@@ -79,38 +79,44 @@ public class TestParser {
pigServer.openIterator("vals");
}
- @Test(expected = IOException.class)
+ @Test
public void testRemoteServerList() throws ExecException, IOException {
Properties pigProperties = pigServer.getPigContext().getProperties();
pigProperties.setProperty("fs.default.name", "hdfs://a.com:8020");
Configuration conf;
+
+ Data data = Storage.resetData(pigServer.getPigContext());
+ data.set("/user/pig/1.txt");// no data
- pigServer.registerQuery("a = load '/user/pig/1.txt';");
+ pigServer.registerQuery("a = load '/user/pig/1.txt' using mock.Storage;");
conf = ConfigurationUtil.toConfiguration(pigProperties);
assertTrue(conf.get("mapreduce.job.hdfs-servers") == null ||
- conf.get("mapreduce.job.hdfs-servers").equals("hdfs://a.com:8020"));
+ conf.get("mapreduce.job.hdfs-servers").equals(pigProperties.get("fs.default.name"))||
+ conf.get("mapreduce.job.hdfs-servers").equals(pigProperties.get("fs.defaultFS")));
- pigServer.registerQuery("a = load 'hdfs://a.com/user/pig/1.txt';");
+ pigServer.registerQuery("a = load 'hdfs://a.com/user/pig/1.txt' using mock.Storage;");
conf = ConfigurationUtil.toConfiguration(pigProperties);
assertTrue(pigProperties.getProperty("mapreduce.job.hdfs-servers") == null ||
- conf.get("mapreduce.job.hdfs-servers").equals("hdfs://a.com:8020"));
+ conf.get("mapreduce.job.hdfs-servers").equals(pigProperties.get("fs.default.name"))||
+ conf.get("mapreduce.job.hdfs-servers").equals(pigProperties.get("fs.defaultFS")));
- pigServer.registerQuery("a = load 'har:///1.txt';");
+ pigServer.registerQuery("a = load 'har:///1.txt' using mock.Storage;");
conf = ConfigurationUtil.toConfiguration(pigProperties);
assertTrue(pigProperties.getProperty("mapreduce.job.hdfs-servers") == null ||
- conf.get("mapreduce.job.hdfs-servers").equals("hdfs://a.com:8020"));
+ conf.get("mapreduce.job.hdfs-servers").equals(pigProperties.get("fs.default.name"))||
+ conf.get("mapreduce.job.hdfs-servers").equals(pigProperties.get("fs.defaultFS")));
- pigServer.registerQuery("a = load 'hdfs://b.com/user/pig/1.txt';");
+ pigServer.registerQuery("a = load 'hdfs://b.com/user/pig/1.txt' using mock.Storage;");
conf = ConfigurationUtil.toConfiguration(pigProperties);
assertTrue(conf.get("mapreduce.job.hdfs-servers") != null &&
conf.get("mapreduce.job.hdfs-servers").contains("hdfs://b.com"));
- pigServer.registerQuery("a = load 'har://hdfs-c.com/user/pig/1.txt';");
+ pigServer.registerQuery("a = load 'har://hdfs-c.com/user/pig/1.txt' using mock.Storage;");
conf = ConfigurationUtil.toConfiguration(pigProperties);
assertTrue(conf.get("mapreduce.job.hdfs-servers") != null &&
conf.get("mapreduce.job.hdfs-servers").contains("hdfs://c.com"));
- pigServer.registerQuery("a = load 'hdfs://d.com:8020/user/pig/1.txt';");
+ pigServer.registerQuery("a = load 'hdfs://d.com:8020/user/pig/1.txt' using mock.Storage;");
conf = ConfigurationUtil.toConfiguration(pigProperties);
assertTrue(conf.get("mapreduce.job.hdfs-servers") != null &&
conf.get("mapreduce.job.hdfs-servers").contains("hdfs://d.com:8020"));
@@ -135,23 +141,24 @@ public class TestParser {
+ pigProperties.getProperty("mapreduce.job.hdfs-servers"));
conf = ConfigurationUtil.toConfiguration(pigProperties);
assertTrue(conf.get("mapreduce.job.hdfs-servers") == null ||
- conf.get("mapreduce.job.hdfs-servers").equals("hdfs://a.com:8020"));
+ conf.get("mapreduce.job.hdfs-servers").equals(pigProperties.get("fs.default.name"))||
+ conf.get("mapreduce.job.hdfs-servers").equals(pigProperties.get("fs.defaultFS")));
- pigServer.registerQuery("store a into 'hdfs://b.com/user/pig/1.txt';");
+ pigServer.registerQuery("store a into 'hdfs://b.com/user/pig/1.txt' using mock.Storage;");
System.out.println("hdfs-servers: "
+ pigProperties.getProperty("mapreduce.job.hdfs-servers"));
conf = ConfigurationUtil.toConfiguration(pigProperties);
assertTrue(conf.get("mapreduce.job.hdfs-servers") != null &&
conf.get("mapreduce.job.hdfs-servers").contains("hdfs://b.com"));
- pigServer.registerQuery("store a into 'har://hdfs-c.com:8020/user/pig/1.txt';");
+ pigServer.registerQuery("store a into 'har://hdfs-c.com:8020/user/pig/1.txt' using mock.Storage;");
System.out.println("hdfs-servers: "
+ pigProperties.getProperty("mapreduce.job.hdfs-servers"));
conf = ConfigurationUtil.toConfiguration(pigProperties);
assertTrue(conf.get("mapreduce.job.hdfs-servers") != null &&
conf.get("mapreduce.job.hdfs-servers").contains("hdfs://c.com:8020"));
- pigServer.registerQuery("store a into 'hdfs://d.com:8020/user/pig/1.txt';");
+ pigServer.registerQuery("store a into 'hdfs://d.com:8020/user/pig/1.txt' using mock.Storage;");
System.out.println("hdfs-servers: "
+ pigProperties.getProperty("mapreduce.job.hdfs-servers"));
conf = ConfigurationUtil.toConfiguration(pigProperties);
Modified: pig/trunk/test/org/apache/pig/test/TestPigSplit.java
URL: http://svn.apache.org/viewvc/pig/trunk/test/org/apache/pig/test/TestPigSplit.java?rev=1457991&r1=1457990&r2=1457991&view=diff
==============================================================================
--- pig/trunk/test/org/apache/pig/test/TestPigSplit.java (original)
+++ pig/trunk/test/org/apache/pig/test/TestPigSplit.java Mon Mar 18 20:57:00 2013
@@ -108,7 +108,7 @@ public class TestPigSplit {
createInput(new String[] { "0\ta" });
pigServer.registerQuery("a = load '" + inputFileName + "';");
- for (int i = 0; i < 500; i++) {
+ for (int i = 0; i < 200; i++) {
pigServer.registerQuery("a = filter a by $0 == '1';");
}
Iterator<Tuple> iter = pigServer.openIterator("a");
@@ -158,4 +158,4 @@ public class TestPigSplit {
Iterator<Tuple> iter = pigServer.openIterator("a");
assertFalse(iter.hasNext());
}
-}
\ No newline at end of file
+}