You are viewing a plain text version of this content. The canonical link for it is here.
Posted to mapreduce-commits@hadoop.apache.org by to...@apache.org on 2013/04/01 18:47:42 UTC
svn commit: r1463203 [4/4] - in
/hadoop/common/branches/HDFS-347/hadoop-mapreduce-project: ./ bin/ conf/
hadoop-mapreduce-client/
hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/
hadoop-mapreduce-client/hadoop...
Modified: hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestNetworkedJob.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestNetworkedJob.java?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestNetworkedJob.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestNetworkedJob.java Mon Apr 1 16:47:16 2013
@@ -18,24 +18,37 @@
package org.apache.hadoop.mapred;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNull;
+import static org.junit.Assert.*;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.DataInputStream;
+import java.io.DataOutput;
+import java.io.DataOutputStream;
import java.io.File;
import java.io.IOException;
+import java.io.PrintStream;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapreduce.security.token.delegation.DelegationTokenIdentifier;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.ClusterStatus.BlackListInfo;
+import org.apache.hadoop.mapred.JobClient.NetworkedJob;
+import org.apache.hadoop.mapred.JobClient.TaskStatusFilter;
import org.apache.hadoop.mapred.lib.IdentityMapper;
import org.apache.hadoop.mapred.lib.IdentityReducer;
+import org.apache.hadoop.mapreduce.Cluster.JobTrackerStatus;
import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.yarn.YarnException;
import org.junit.Test;
-
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.token.Token;
public class TestNetworkedJob {
private static String TEST_ROOT_DIR = new File(System.getProperty(
@@ -44,8 +57,7 @@ public class TestNetworkedJob {
private static Path inFile = new Path(testDir, "in");
private static Path outDir = new Path(testDir, "out");
- @SuppressWarnings("deprecation")
- @Test
+ @Test (timeout=5000)
public void testGetNullCounters() throws Exception {
//mock creation
Job mockJob = mock(Job.class);
@@ -57,7 +69,7 @@ public class TestNetworkedJob {
verify(mockJob).getCounters();
}
- @Test
+ @Test (timeout=500000)
public void testGetJobStatus() throws IOException, InterruptedException,
ClassNotFoundException {
MiniMRClientCluster mr = null;
@@ -105,4 +117,278 @@ public class TestNetworkedJob {
}
}
}
+/**
+ * test JobConf
+ * @throws Exception
+ */
+ @SuppressWarnings( "deprecation" )
+ @Test (timeout=500000)
+ public void testNetworkedJob() throws Exception {
+ // mock creation
+ MiniMRClientCluster mr = null;
+ FileSystem fileSys = null;
+
+ try {
+ Configuration conf = new Configuration();
+ mr = MiniMRClientClusterFactory.create(this.getClass(), 2, conf);
+
+ JobConf job = new JobConf(mr.getConfig());
+
+ fileSys = FileSystem.get(job);
+ fileSys.delete(testDir, true);
+ FSDataOutputStream out = fileSys.create(inFile, true);
+ out.writeBytes("This is a test file");
+ out.close();
+
+ FileInputFormat.setInputPaths(job, inFile);
+ FileOutputFormat.setOutputPath(job, outDir);
+
+ job.setInputFormat(TextInputFormat.class);
+ job.setOutputFormat(TextOutputFormat.class);
+
+ job.setMapperClass(IdentityMapper.class);
+ job.setReducerClass(IdentityReducer.class);
+ job.setNumReduceTasks(0);
+
+ JobClient client = new JobClient(mr.getConfig());
+
+ RunningJob rj = client.submitJob(job);
+ JobID jobId = rj.getID();
+ NetworkedJob runningJob = (NetworkedJob) client.getJob(jobId);
+ runningJob.setJobPriority(JobPriority.HIGH.name());
+ // test getters
+ assertTrue(runningJob.getConfiguration().toString()
+ .endsWith("0001/job.xml"));
+ assertEquals(runningJob.getID(), jobId);
+ assertEquals(runningJob.getJobID(), jobId.toString());
+ assertEquals(runningJob.getJobName(), "N/A");
+ assertTrue(runningJob.getJobFile().endsWith(
+ ".staging/" + runningJob.getJobID() + "/job.xml"));
+ assertTrue(runningJob.getTrackingURL().length() > 0);
+ assertTrue(runningJob.mapProgress() == 0.0f);
+ assertTrue(runningJob.reduceProgress() == 0.0f);
+ assertTrue(runningJob.cleanupProgress() == 0.0f);
+ assertTrue(runningJob.setupProgress() == 0.0f);
+
+ TaskCompletionEvent[] tce = runningJob.getTaskCompletionEvents(0);
+ assertEquals(tce.length, 0);
+
+ assertEquals(runningJob.getHistoryUrl(),"");
+ assertFalse(runningJob.isRetired());
+ assertEquals( runningJob.getFailureInfo(),"");
+ assertEquals(runningJob.getJobStatus().getJobName(), "N/A");
+ assertEquals(client.getMapTaskReports(jobId).length, 0);
+
+ try {
+ client.getSetupTaskReports(jobId);
+ } catch (YarnException e) {
+ assertEquals(e.getMessage(), "Unrecognized task type: JOB_SETUP");
+ }
+ try {
+ client.getCleanupTaskReports(jobId);
+ } catch (YarnException e) {
+ assertEquals(e.getMessage(), "Unrecognized task type: JOB_CLEANUP");
+ }
+ assertEquals(client.getReduceTaskReports(jobId).length, 0);
+ // test ClusterStatus
+ ClusterStatus status = client.getClusterStatus(true);
+ assertEquals(status.getActiveTrackerNames().size(), 2);
+ // it method does not implemented and always return empty array or null;
+ assertEquals(status.getBlacklistedTrackers(), 0);
+ assertEquals(status.getBlacklistedTrackerNames().size(), 0);
+ assertEquals(status.getBlackListedTrackersInfo().size(), 0);
+ assertEquals(status.getJobTrackerStatus(), JobTrackerStatus.RUNNING);
+ assertEquals(status.getMapTasks(), 1);
+ assertEquals(status.getMaxMapTasks(), 20);
+ assertEquals(status.getMaxReduceTasks(), 4);
+ assertEquals(status.getNumExcludedNodes(), 0);
+ assertEquals(status.getReduceTasks(), 1);
+ assertEquals(status.getTaskTrackers(), 2);
+ assertEquals(status.getTTExpiryInterval(), 0);
+ assertEquals(status.getJobTrackerStatus(), JobTrackerStatus.RUNNING);
+
+ // test read and write
+ ByteArrayOutputStream dataOut = new ByteArrayOutputStream();
+ status.write(new DataOutputStream(dataOut));
+ ClusterStatus status2 = new ClusterStatus();
+
+ status2.readFields(new DataInputStream(new ByteArrayInputStream(dataOut
+ .toByteArray())));
+ assertEquals(status.getActiveTrackerNames(),
+ status2.getActiveTrackerNames());
+ assertEquals(status.getBlackListedTrackersInfo(),
+ status2.getBlackListedTrackersInfo());
+ assertEquals(status.getMapTasks(), status2.getMapTasks());
+
+ try {
+ } catch (RuntimeException e) {
+ assertTrue(e.getMessage().endsWith("not found on CLASSPATH"));
+ }
+
+ // test taskStatusfilter
+ JobClient.setTaskOutputFilter(job, TaskStatusFilter.ALL);
+ assertEquals(JobClient.getTaskOutputFilter(job), TaskStatusFilter.ALL);
+
+ // runningJob.setJobPriority(JobPriority.HIGH.name());
+
+ // test default map
+ assertEquals(client.getDefaultMaps(), 20);
+ assertEquals(client.getDefaultReduces(), 4);
+ assertEquals(client.getSystemDir().getName(), "jobSubmitDir");
+ // test queue information
+ JobQueueInfo[] rootQueueInfo = client.getRootQueues();
+ assertEquals(rootQueueInfo.length, 1);
+ assertEquals(rootQueueInfo[0].getQueueName(), "default");
+ JobQueueInfo[] qinfo = client.getQueues();
+ assertEquals(qinfo.length, 1);
+ assertEquals(qinfo[0].getQueueName(), "default");
+ assertEquals(client.getChildQueues("default").length, 0);
+ assertEquals(client.getJobsFromQueue("default").length, 1);
+ assertTrue(client.getJobsFromQueue("default")[0].getJobFile().endsWith(
+ "/job.xml"));
+
+ JobQueueInfo qi = client.getQueueInfo("default");
+ assertEquals(qi.getQueueName(), "default");
+ assertEquals(qi.getQueueState(), "running");
+
+ QueueAclsInfo[] aai = client.getQueueAclsForCurrentUser();
+ assertEquals(aai.length, 2);
+ assertEquals(aai[0].getQueueName(), "root");
+ assertEquals(aai[1].getQueueName(), "default");
+ // test token
+ Token<DelegationTokenIdentifier> token = client
+ .getDelegationToken(new Text(UserGroupInformation.getCurrentUser()
+ .getShortUserName()));
+ assertEquals(token.getKind().toString(), "RM_DELEGATION_TOKEN");
+
+ // test JobClient
+
+
+ // The following asserts read JobStatus twice and ensure the returned
+ // JobStatus objects correspond to the same Job.
+ assertEquals("Expected matching JobIDs", jobId, client.getJob(jobId)
+ .getJobStatus().getJobID());
+ assertEquals("Expected matching startTimes", rj.getJobStatus()
+ .getStartTime(), client.getJob(jobId).getJobStatus().getStartTime());
+ } finally {
+ if (fileSys != null) {
+ fileSys.delete(testDir, true);
+ }
+ if (mr != null) {
+ mr.stop();
+ }
+ }
+ }
+
+ /**
+ * test BlackListInfo class
+ *
+ * @throws IOException
+ */
+ @Test (timeout=5000)
+ public void testBlackListInfo() throws IOException {
+ BlackListInfo info = new BlackListInfo();
+ info.setBlackListReport("blackListInfo");
+ info.setReasonForBlackListing("reasonForBlackListing");
+ info.setTrackerName("trackerName");
+ ByteArrayOutputStream byteOut = new ByteArrayOutputStream();
+ DataOutput out = new DataOutputStream(byteOut);
+ info.write(out);
+ BlackListInfo info2 = new BlackListInfo();
+ info2.readFields(new DataInputStream(new ByteArrayInputStream(byteOut
+ .toByteArray())));
+ assertEquals(info, info);
+ assertEquals(info.toString(), info.toString());
+ assertEquals(info.getTrackerName(), "trackerName");
+ assertEquals(info.getReasonForBlackListing(), "reasonForBlackListing");
+ assertEquals(info.getBlackListReport(), "blackListInfo");
+
+ }
+/**
+ * test run from command line JobQueueClient
+ * @throws Exception
+ */
+ @Test (timeout=500000)
+ public void testJobQueueClient() throws Exception {
+ MiniMRClientCluster mr = null;
+ FileSystem fileSys = null;
+ PrintStream oldOut = System.out;
+ try {
+ Configuration conf = new Configuration();
+ mr = MiniMRClientClusterFactory.create(this.getClass(), 2, conf);
+
+ JobConf job = new JobConf(mr.getConfig());
+
+ fileSys = FileSystem.get(job);
+ fileSys.delete(testDir, true);
+ FSDataOutputStream out = fileSys.create(inFile, true);
+ out.writeBytes("This is a test file");
+ out.close();
+
+ FileInputFormat.setInputPaths(job, inFile);
+ FileOutputFormat.setOutputPath(job, outDir);
+
+ job.setInputFormat(TextInputFormat.class);
+ job.setOutputFormat(TextOutputFormat.class);
+
+ job.setMapperClass(IdentityMapper.class);
+ job.setReducerClass(IdentityReducer.class);
+ job.setNumReduceTasks(0);
+
+ JobClient client = new JobClient(mr.getConfig());
+
+ client.submitJob(job);
+
+ JobQueueClient jobClient = new JobQueueClient(job);
+
+ ByteArrayOutputStream bytes = new ByteArrayOutputStream();
+ System.setOut(new PrintStream(bytes));
+ String[] arg = { "-list" };
+ jobClient.run(arg);
+ assertTrue(bytes.toString().contains("Queue Name : default"));
+ assertTrue(bytes.toString().contains("Queue State : running"));
+ bytes = new ByteArrayOutputStream();
+ System.setOut(new PrintStream(bytes));
+ String[] arg1 = { "-showacls" };
+ jobClient.run(arg1);
+ assertTrue(bytes.toString().contains("Queue acls for user :"));
+ assertTrue(bytes.toString().contains(
+ "root ADMINISTER_QUEUE,SUBMIT_APPLICATIONS"));
+ assertTrue(bytes.toString().contains(
+ "default ADMINISTER_QUEUE,SUBMIT_APPLICATIONS"));
+
+ // test for info and default queue
+
+ bytes = new ByteArrayOutputStream();
+ System.setOut(new PrintStream(bytes));
+ String[] arg2 = { "-info", "default" };
+ jobClient.run(arg2);
+ assertTrue(bytes.toString().contains("Queue Name : default"));
+ assertTrue(bytes.toString().contains("Queue State : running"));
+ assertTrue(bytes.toString().contains("Scheduling Info"));
+
+ // test for info , default queue and jobs
+ bytes = new ByteArrayOutputStream();
+ System.setOut(new PrintStream(bytes));
+ String[] arg3 = { "-info", "default", "-showJobs" };
+ jobClient.run(arg3);
+ assertTrue(bytes.toString().contains("Queue Name : default"));
+ assertTrue(bytes.toString().contains("Queue State : running"));
+ assertTrue(bytes.toString().contains("Scheduling Info"));
+ assertTrue(bytes.toString().contains("job_1"));
+
+ String[] arg4 = {};
+ jobClient.run(arg4);
+
+
+ } finally {
+ System.setOut(oldOut);
+ if (fileSys != null) {
+ fileSys.delete(testDir, true);
+ }
+ if (mr != null) {
+ mr.stop();
+ }
+ }
+ }
}
Modified: hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestStatisticsCollector.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestStatisticsCollector.java?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestStatisticsCollector.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestStatisticsCollector.java Mon Apr 1 16:47:16 2013
@@ -17,6 +17,8 @@
*/
package org.apache.hadoop.mapred;
+import java.util.Map;
+
import junit.framework.TestCase;
import org.apache.hadoop.mapred.StatisticsCollector.TimeWindow;
@@ -24,6 +26,7 @@ import org.apache.hadoop.mapred.Statisti
public class TestStatisticsCollector extends TestCase{
+ @SuppressWarnings("rawtypes")
public void testMovingWindow() throws Exception {
StatisticsCollector collector = new StatisticsCollector(1);
TimeWindow window = new TimeWindow("test", 6, 2);
@@ -78,6 +81,28 @@ public class TestStatisticsCollector ext
collector.update();
assertEquals((10+10+10+12+13+14), stat.getValues().get(window).getValue());
assertEquals(95, stat.getValues().get(sincStart).getValue());
+
+ // test Stat class
+ Map updaters= collector.getUpdaters();
+ assertEquals(updaters.size(),2);
+ Map<String, Stat> ststistics=collector.getStatistics();
+ assertNotNull(ststistics.get("m1"));
+
+ Stat newStat= collector.createStat("m2");
+ assertEquals(newStat.name, "m2");
+ Stat st=collector.removeStat("m1");
+ assertEquals(st.name, "m1");
+ assertEquals((10+10+10+12+13+14), stat.getValues().get(window).getValue());
+ assertEquals(95, stat.getValues().get(sincStart).getValue());
+ st=collector.removeStat("m1");
+ // try to remove stat again
+ assertNull(st);
+ collector.start();
+ // waiting 2,5 sec
+ Thread.sleep(2500);
+ assertEquals(69, stat.getValues().get(window).getValue());
+ assertEquals(95, stat.getValues().get(sincStart).getValue());
+
}
}
Modified: hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextInputFormat.java?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextInputFormat.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextInputFormat.java Mon Apr 1 16:47:16 2013
@@ -61,11 +61,12 @@ public class TestTextInputFormat {
throw new RuntimeException("init failure", e);
}
}
+ @SuppressWarnings("deprecation")
private static Path workDir =
new Path(new Path(System.getProperty("test.build.data", "/tmp")),
"TestTextInputFormat").makeQualified(localFs);
- @Test
+ @Test (timeout=500000)
public void testFormat() throws Exception {
JobConf job = new JobConf(defaultConf);
Path file = new Path(workDir, "test.txt");
@@ -145,7 +146,7 @@ public class TestTextInputFormat {
}
}
- @Test
+ @Test (timeout=900000)
public void testSplitableCodecs() throws IOException {
JobConf conf = new JobConf(defaultConf);
int seed = new Random().nextInt();
@@ -250,7 +251,7 @@ public class TestTextInputFormat {
bufsz);
}
- @Test
+ @Test (timeout=5000)
public void testUTF8() throws Exception {
LineReader in = makeStream("abcd\u20acbdcd\u20ac");
Text line = new Text();
@@ -269,7 +270,7 @@ public class TestTextInputFormat {
*
* @throws Exception
*/
- @Test
+ @Test (timeout=5000)
public void testNewLines() throws Exception {
final String STR = "a\nbb\n\nccc\rdddd\r\r\r\n\r\neeeee";
final int STRLENBYTES = STR.getBytes().length;
@@ -309,7 +310,7 @@ public class TestTextInputFormat {
*
* @throws Exception
*/
- @Test
+ @Test (timeout=5000)
public void testMaxLineLength() throws Exception {
final String STR = "a\nbb\n\nccc\rdddd\r\neeeee";
final int STRLENBYTES = STR.getBytes().length;
@@ -334,7 +335,7 @@ public class TestTextInputFormat {
}
}
- @Test
+ @Test (timeout=5000)
public void testMRMaxLine() throws Exception {
final int MAXPOS = 1024 * 1024;
final int MAXLINE = 10 * 1024;
@@ -354,6 +355,9 @@ public class TestTextInputFormat {
position += b.length;
return b.length;
}
+ public void reset() {
+ position=0;
+ }
};
final LongWritable key = new LongWritable();
final Text val = new Text();
@@ -362,8 +366,14 @@ public class TestTextInputFormat {
conf.setInt(org.apache.hadoop.mapreduce.lib.input.
LineRecordReader.MAX_LINE_LENGTH, MAXLINE);
conf.setInt("io.file.buffer.size", BUF); // used by LRR
- final LineRecordReader lrr = new LineRecordReader(infNull, 0, MAXPOS, conf);
+ // test another constructor
+ LineRecordReader lrr = new LineRecordReader(infNull, 0, MAXPOS, conf);
+ assertFalse("Read a line from null", lrr.next(key, val));
+ infNull.reset();
+ lrr = new LineRecordReader(infNull, 0L, MAXLINE, MAXPOS);
assertFalse("Read a line from null", lrr.next(key, val));
+
+
}
private static void writeFile(FileSystem fs, Path name,
@@ -400,7 +410,7 @@ public class TestTextInputFormat {
/**
* Test using the gzip codec for reading
*/
- @Test
+ @Test (timeout=5000)
public void testGzip() throws IOException {
JobConf job = new JobConf(defaultConf);
CompressionCodec gzip = new GzipCodec();
@@ -434,7 +444,7 @@ public class TestTextInputFormat {
/**
* Test using the gzip codec and an empty input file
*/
- @Test
+ @Test (timeout=5000)
public void testGzipEmpty() throws IOException {
JobConf job = new JobConf(defaultConf);
CompressionCodec gzip = new GzipCodec();
Modified: hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextOutputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextOutputFormat.java?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextOutputFormat.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextOutputFormat.java Mon Apr 1 16:47:16 2013
@@ -44,7 +44,6 @@ public class TestTextOutputFormat extend
"data"),
FileOutputCommitter.TEMP_DIR_NAME), "_" + attempt);
- @SuppressWarnings("unchecked")
public void testFormat() throws Exception {
JobConf job = new JobConf();
job.set(JobContext.TASK_ATTEMPT_ID, attempt);
@@ -59,8 +58,8 @@ public class TestTextOutputFormat extend
// A reporter that does nothing
Reporter reporter = Reporter.NULL;
- TextOutputFormat theOutputFormat = new TextOutputFormat();
- RecordWriter theRecordWriter =
+ TextOutputFormat<Object,Object> theOutputFormat = new TextOutputFormat<Object,Object>();
+ RecordWriter<Object,Object> theRecordWriter =
theOutputFormat.getRecordWriter(localFs, job, file, reporter);
Text key1 = new Text("key1");
@@ -95,7 +94,6 @@ public class TestTextOutputFormat extend
}
- @SuppressWarnings("unchecked")
public void testFormatWithCustomSeparator() throws Exception {
JobConf job = new JobConf();
String separator = "\u0001";
@@ -112,8 +110,8 @@ public class TestTextOutputFormat extend
// A reporter that does nothing
Reporter reporter = Reporter.NULL;
- TextOutputFormat theOutputFormat = new TextOutputFormat();
- RecordWriter theRecordWriter =
+ TextOutputFormat<Object,Object> theOutputFormat = new TextOutputFormat<Object,Object>();
+ RecordWriter<Object,Object> theRecordWriter =
theOutputFormat.getRecordWriter(localFs, job, file, reporter);
Text key1 = new Text("key1");
@@ -147,7 +145,61 @@ public class TestTextOutputFormat extend
assertEquals(output, expectedOutput.toString());
}
-
+ /**
+ * test compressed file
+ * @throws IOException
+ */
+ public void testCompress() throws IOException{
+ JobConf job = new JobConf();
+ String separator = "\u0001";
+ job.set("mapreduce.output.textoutputformat.separator", separator);
+ job.set(JobContext.TASK_ATTEMPT_ID, attempt);
+ job.set(org.apache.hadoop.mapreduce.lib.output.FileOutputFormat.COMPRESS,"true");
+
+ FileOutputFormat.setOutputPath(job, workDir.getParent().getParent());
+ FileOutputFormat.setWorkOutputPath(job, workDir);
+ FileSystem fs = workDir.getFileSystem(job);
+ if (!fs.mkdirs(workDir)) {
+ fail("Failed to create output directory");
+ }
+ String file = "test.txt";
+
+ // A reporter that does nothing
+ Reporter reporter = Reporter.NULL;
+
+ TextOutputFormat<Object,Object> theOutputFormat = new TextOutputFormat<Object,Object>();
+ RecordWriter<Object,Object> theRecordWriter =
+ theOutputFormat.getRecordWriter(localFs, job, file, reporter);
+ Text key1 = new Text("key1");
+ Text key2 = new Text("key2");
+ Text val1 = new Text("val1");
+ Text val2 = new Text("val2");
+ NullWritable nullWritable = NullWritable.get();
+
+ try {
+ theRecordWriter.write(key1, val1);
+ theRecordWriter.write(null, nullWritable);
+ theRecordWriter.write(null, val1);
+ theRecordWriter.write(nullWritable, val2);
+ theRecordWriter.write(key2, nullWritable);
+ theRecordWriter.write(key1, null);
+ theRecordWriter.write(null, null);
+ theRecordWriter.write(key2, val2);
+
+ } finally {
+ theRecordWriter.close(reporter);
+ }
+ File expectedFile = new File(new Path(workDir, file).toString());
+ StringBuffer expectedOutput = new StringBuffer();
+ expectedOutput.append(key1).append(separator).append(val1).append("\n");
+ expectedOutput.append(val1).append("\n");
+ expectedOutput.append(val2).append("\n");
+ expectedOutput.append(key2).append("\n");
+ expectedOutput.append(key1).append("\n");
+ expectedOutput.append(key2).append(separator).append(val2).append("\n");
+ String output = UtilsForTests.slurp(expectedFile);
+ assertEquals(output, expectedOutput.toString());
+ }
public static void main(String[] args) throws Exception {
new TestTextOutputFormat().testFormat();
}
Modified: hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/pipes/TestPipeApplication.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/pipes/TestPipeApplication.java?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/pipes/TestPipeApplication.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/pipes/TestPipeApplication.java Mon Apr 1 16:47:16 2013
@@ -47,6 +47,7 @@ import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapred.IFile.Writer;
import org.apache.hadoop.mapreduce.MRJobConfig;
+import org.apache.hadoop.mapreduce.security.TokenCache;
import org.apache.hadoop.mapred.Counters;
import org.apache.hadoop.mapred.Counters.Counter;
import org.apache.hadoop.mapred.Counters.Group;
@@ -106,7 +107,7 @@ public class TestPipeApplication {
Token<ApplicationTokenIdentifier> token = new Token<ApplicationTokenIdentifier>(
"user".getBytes(), "password".getBytes(), new Text("kind"), new Text(
"service"));
- conf.getCredentials().addToken(new Text("ShuffleAndJobToken"), token);
+ TokenCache.setJobToken(token, conf.getCredentials());
conf.setBoolean(MRJobConfig.SKIP_RECORDS, true);
TestTaskReporter reporter = new TestTaskReporter();
PipesMapRunner<FloatWritable, NullWritable, IntWritable, Text> runner = new PipesMapRunner<FloatWritable, NullWritable, IntWritable, Text>();
@@ -171,7 +172,7 @@ public class TestPipeApplication {
"user".getBytes(), "password".getBytes(), new Text("kind"), new Text(
"service"));
- conf.getCredentials().addToken(new Text("ShuffleAndJobToken"), token);
+ TokenCache.setJobToken(token, conf.getCredentials());
FakeCollector output = new FakeCollector(new Counters.Counter(),
new Progress());
FileSystem fs = new RawLocalFileSystem();
@@ -391,7 +392,7 @@ public class TestPipeApplication {
Token<ApplicationTokenIdentifier> token = new Token<ApplicationTokenIdentifier>(
"user".getBytes(), "password".getBytes(), new Text("kind"), new Text(
"service"));
- conf.getCredentials().addToken(new Text("ShuffleAndJobToken"), token);
+ TokenCache.setJobToken(token, conf.getCredentials());
File fCommand = getFileCommand("org.apache.hadoop.mapred.pipes.PipeReducerStub");
conf.set(MRJobConfig.CACHE_LOCALFILES, fCommand.getAbsolutePath());
Modified: hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestCombineFileInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestCombineFileInputFormat.java?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestCombineFileInputFormat.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestCombineFileInputFormat.java Mon Apr 1 16:47:16 2013
@@ -20,11 +20,14 @@ package org.apache.hadoop.mapreduce.lib.
import java.io.IOException;
import java.io.OutputStream;
import java.net.URI;
+import java.util.HashMap;
import java.util.List;
import java.util.ArrayList;
+import java.util.Set;
import java.util.zip.GZIPOutputStream;
import java.util.concurrent.TimeoutException;
+import junit.framework.Assert;
import junit.framework.TestCase;
import org.apache.hadoop.fs.*;
@@ -42,9 +45,13 @@ import org.apache.hadoop.mapreduce.Recor
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.TaskAttemptID;
import org.apache.hadoop.mapreduce.TaskType;
+import org.apache.hadoop.mapreduce.lib.input.CombineFileInputFormat.OneBlockInfo;
+import org.apache.hadoop.mapreduce.lib.input.CombineFileInputFormat.OneFileInfo;
import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
import org.junit.Test;
+import com.google.common.collect.HashMultiset;
+
public class TestCombineFileInputFormat extends TestCase {
private static final String rack1[] = new String[] {
@@ -476,23 +483,23 @@ public class TestCombineFileInputFormat
assertEquals(BLOCKSIZE, fileSplit.getLength(1));
assertEquals("host3.rack3.com", fileSplit.getLocations()[0]);
fileSplit = (CombineFileSplit) splits.get(1);
- assertEquals(file3.getName(), fileSplit.getPath(0).getName());
- assertEquals(2 * BLOCKSIZE, fileSplit.getOffset(0));
+ assertEquals(file2.getName(), fileSplit.getPath(0).getName());
+ assertEquals(0, fileSplit.getOffset(0));
assertEquals(BLOCKSIZE, fileSplit.getLength(0));
- assertEquals(file4.getName(), fileSplit.getPath(1).getName());
- assertEquals(0, fileSplit.getOffset(1));
+ assertEquals(file2.getName(), fileSplit.getPath(1).getName());
+ assertEquals(BLOCKSIZE, fileSplit.getOffset(1));
assertEquals(BLOCKSIZE, fileSplit.getLength(1));
- assertEquals("host3.rack3.com", fileSplit.getLocations()[0]);
+ assertEquals("host2.rack2.com", fileSplit.getLocations()[0]);
fileSplit = (CombineFileSplit) splits.get(2);
assertEquals(2, fileSplit.getNumPaths());
assertEquals(1, fileSplit.getLocations().length);
- assertEquals(file4.getName(), fileSplit.getPath(0).getName());
- assertEquals(BLOCKSIZE, fileSplit.getOffset(0));
+ assertEquals(file1.getName(), fileSplit.getPath(0).getName());
+ assertEquals(0, fileSplit.getOffset(0));
assertEquals(BLOCKSIZE, fileSplit.getLength(0));
- assertEquals(file4.getName(), fileSplit.getPath(1).getName());
+ assertEquals(file3.getName(), fileSplit.getPath(1).getName());
assertEquals(2 * BLOCKSIZE, fileSplit.getOffset(1));
assertEquals(BLOCKSIZE, fileSplit.getLength(1));
- assertEquals("host3.rack3.com", fileSplit.getLocations()[0]);
+ assertEquals("host1.rack1.com", fileSplit.getLocations()[0]);
// maximum split size is 3 blocks
inFormat = new DummyInputFormat();
@@ -504,7 +511,7 @@ public class TestCombineFileInputFormat
for (InputSplit split : splits) {
System.out.println("File split(Test5): " + split);
}
- assertEquals(4, splits.size());
+ assertEquals(3, splits.size());
fileSplit = (CombineFileSplit) splits.get(0);
assertEquals(3, fileSplit.getNumPaths());
assertEquals(1, fileSplit.getLocations().length);
@@ -519,32 +526,28 @@ public class TestCombineFileInputFormat
assertEquals(BLOCKSIZE, fileSplit.getLength(2));
assertEquals("host3.rack3.com", fileSplit.getLocations()[0]);
fileSplit = (CombineFileSplit) splits.get(1);
- assertEquals(file4.getName(), fileSplit.getPath(0).getName());
+ assertEquals(file2.getName(), fileSplit.getPath(0).getName());
assertEquals(0, fileSplit.getOffset(0));
assertEquals(BLOCKSIZE, fileSplit.getLength(0));
- assertEquals(file4.getName(), fileSplit.getPath(1).getName());
+ assertEquals(file2.getName(), fileSplit.getPath(1).getName());
assertEquals(BLOCKSIZE, fileSplit.getOffset(1));
assertEquals(BLOCKSIZE, fileSplit.getLength(1));
assertEquals(file4.getName(), fileSplit.getPath(2).getName());
- assertEquals( 2 * BLOCKSIZE, fileSplit.getOffset(2));
+ assertEquals(0, fileSplit.getOffset(2));
assertEquals(BLOCKSIZE, fileSplit.getLength(2));
- assertEquals("host3.rack3.com", fileSplit.getLocations()[0]);
+ assertEquals("host2.rack2.com", fileSplit.getLocations()[0]);
fileSplit = (CombineFileSplit) splits.get(2);
- assertEquals(2, fileSplit.getNumPaths());
+ assertEquals(3, fileSplit.getNumPaths());
assertEquals(1, fileSplit.getLocations().length);
- assertEquals(file2.getName(), fileSplit.getPath(0).getName());
+ assertEquals(file1.getName(), fileSplit.getPath(0).getName());
assertEquals(0, fileSplit.getOffset(0));
assertEquals(BLOCKSIZE, fileSplit.getLength(0));
- assertEquals(file2.getName(), fileSplit.getPath(1).getName());
+ assertEquals(file4.getName(), fileSplit.getPath(1).getName());
assertEquals(BLOCKSIZE, fileSplit.getOffset(1));
assertEquals(BLOCKSIZE, fileSplit.getLength(1));
- assertEquals("host2.rack2.com", fileSplit.getLocations()[0]);
- fileSplit = (CombineFileSplit) splits.get(3);
- assertEquals(1, fileSplit.getNumPaths());
- assertEquals(1, fileSplit.getLocations().length);
- assertEquals(file1.getName(), fileSplit.getPath(0).getName());
- assertEquals(0, fileSplit.getOffset(0));
- assertEquals(BLOCKSIZE, fileSplit.getLength(0));
+ assertEquals(file4.getName(), fileSplit.getPath(2).getName());
+ assertEquals(2*BLOCKSIZE, fileSplit.getOffset(2));
+ assertEquals(BLOCKSIZE, fileSplit.getLength(2));
assertEquals("host1.rack1.com", fileSplit.getLocations()[0]);
// maximum split size is 4 blocks
@@ -713,6 +716,56 @@ public class TestCombineFileInputFormat
DFSTestUtil.waitReplication(fileSys, name, replication);
}
+ public void testNodeInputSplit() throws IOException, InterruptedException {
+ // Regression test for MAPREDUCE-4892. There are 2 nodes with all blocks on
+ // both nodes. The grouping ensures that both nodes get splits instead of
+ // just the first node
+ DummyInputFormat inFormat = new DummyInputFormat();
+ int numBlocks = 12;
+ long totLength = 0;
+ long blockSize = 100;
+ long maxSize = 200;
+ long minSizeNode = 50;
+ long minSizeRack = 50;
+ String[] locations = { "h1", "h2" };
+ String[] racks = new String[0];
+ Path path = new Path("hdfs://file");
+
+ OneBlockInfo[] blocks = new OneBlockInfo[numBlocks];
+ for(int i=0; i<numBlocks; ++i) {
+ blocks[i] = new OneBlockInfo(path, i*blockSize, blockSize, locations, racks);
+ totLength += blockSize;
+ }
+
+ List<InputSplit> splits = new ArrayList<InputSplit>();
+ HashMap<String, Set<String>> rackToNodes =
+ new HashMap<String, Set<String>>();
+ HashMap<String, List<OneBlockInfo>> rackToBlocks =
+ new HashMap<String, List<OneBlockInfo>>();
+ HashMap<OneBlockInfo, String[]> blockToNodes =
+ new HashMap<OneBlockInfo, String[]>();
+ HashMap<String, List<OneBlockInfo>> nodeToBlocks =
+ new HashMap<String, List<OneBlockInfo>>();
+
+ OneFileInfo.populateBlockInfo(blocks, rackToBlocks, blockToNodes,
+ nodeToBlocks, rackToNodes);
+
+ inFormat.createSplits(nodeToBlocks, blockToNodes, rackToBlocks, totLength,
+ maxSize, minSizeNode, minSizeRack, splits);
+
+ int expectedSplitCount = (int)(totLength/maxSize);
+ Assert.assertEquals(expectedSplitCount, splits.size());
+ HashMultiset<String> nodeSplits = HashMultiset.create();
+ for(int i=0; i<expectedSplitCount; ++i) {
+ InputSplit inSplit = splits.get(i);
+ Assert.assertEquals(maxSize, inSplit.getLength());
+ Assert.assertEquals(1, inSplit.getLocations().length);
+ nodeSplits.add(inSplit.getLocations()[0]);
+ }
+ Assert.assertEquals(3, nodeSplits.count(locations[0]));
+ Assert.assertEquals(3, nodeSplits.count(locations[1]));
+ }
+
public void testSplitPlacementForCompressedFiles() throws Exception {
MiniDFSCluster dfs = null;
FileSystem fileSys = null;
@@ -889,24 +942,24 @@ public class TestCombineFileInputFormat
assertEquals(f3.getLen(), fileSplit.getLength(0));
assertEquals(hosts3[0], fileSplit.getLocations()[0]); // should be on r3
fileSplit = (CombineFileSplit) splits.get(1);
- assertEquals(file4.getName(), fileSplit.getPath(0).getName());
+ assertEquals(file2.getName(), fileSplit.getPath(0).getName());
assertEquals(0, fileSplit.getOffset(0));
- assertEquals(f4.getLen(), fileSplit.getLength(0));
- assertEquals(hosts3[0], fileSplit.getLocations()[0]); // should be on r3
+ assertEquals(f2.getLen(), fileSplit.getLength(0));
+ assertEquals(hosts2[0], fileSplit.getLocations()[0]); // should be on r3
fileSplit = (CombineFileSplit) splits.get(2);
assertEquals(1, fileSplit.getNumPaths());
assertEquals(1, fileSplit.getLocations().length);
- assertEquals(file2.getName(), fileSplit.getPath(0).getName());
+ assertEquals(file1.getName(), fileSplit.getPath(0).getName());
assertEquals(0, fileSplit.getOffset(0));
- assertEquals(f2.getLen(), fileSplit.getLength(0));
- assertEquals(hosts2[0], fileSplit.getLocations()[0]); // should be on r2
+ assertEquals(f1.getLen(), fileSplit.getLength(0));
+ assertEquals(hosts1[0], fileSplit.getLocations()[0]); // should be on r2
fileSplit = (CombineFileSplit) splits.get(3);
assertEquals(1, fileSplit.getNumPaths());
assertEquals(1, fileSplit.getLocations().length);
- assertEquals(file1.getName(), fileSplit.getPath(0).getName());
+ assertEquals(file4.getName(), fileSplit.getPath(0).getName());
assertEquals(0, fileSplit.getOffset(0));
- assertEquals(f1.getLen(), fileSplit.getLength(0));
- assertEquals(hosts1[0], fileSplit.getLocations()[0]); // should be on r1
+ assertEquals(f4.getLen(), fileSplit.getLength(0));
+ assertEquals(hosts3[0], fileSplit.getLocations()[0]); // should be on r1
// maximum split size is twice file1's length
inFormat = new DummyInputFormat();
Modified: hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java Mon Apr 1 16:47:16 2013
@@ -25,6 +25,8 @@ import java.io.FileOutputStream;
import java.io.IOException;
import java.net.URI;
import java.security.PrivilegedExceptionAction;
+import java.util.HashMap;
+import java.util.Map;
import java.util.jar.JarOutputStream;
import java.util.zip.ZipEntry;
import org.apache.commons.io.FileUtils;
@@ -103,6 +105,8 @@ public class TestMRJobs {
private static Path TEST_ROOT_DIR = new Path("target",
TestMRJobs.class.getName() + "-tmpDir").makeQualified(localFs);
static Path APP_JAR = new Path(TEST_ROOT_DIR, "MRAppJar.jar");
+ private static final String OUTPUT_ROOT_DIR = "/tmp/" +
+ TestMRJobs.class.getSimpleName();
@BeforeClass
public static void setup() throws IOException {
@@ -140,7 +144,7 @@ public class TestMRJobs {
}
}
- @Test
+ @Test (timeout = 300000)
public void testSleepJob() throws IOException, InterruptedException,
ClassNotFoundException {
LOG.info("\n\n\nStarting testSleepJob().");
@@ -211,7 +215,7 @@ public class TestMRJobs {
}
}
- @Test
+ @Test (timeout = 30000)
public void testRandomWriter() throws IOException, InterruptedException,
ClassNotFoundException {
@@ -226,8 +230,7 @@ public class TestMRJobs {
mrCluster.getConfig().set(RandomTextWriterJob.TOTAL_BYTES, "3072");
mrCluster.getConfig().set(RandomTextWriterJob.BYTES_PER_MAP, "1024");
Job job = randomWriterJob.createJob(mrCluster.getConfig());
- Path outputDir =
- new Path(mrCluster.getTestWorkDir().getAbsolutePath(), "random-output");
+ Path outputDir = new Path(OUTPUT_ROOT_DIR, "random-output");
FileOutputFormat.setOutputPath(job, outputDir);
job.setSpeculativeExecution(false);
job.addFileToClassPath(APP_JAR); // The AppMaster jar itself.
@@ -274,7 +277,7 @@ public class TestMRJobs {
&& counters.findCounter(JobCounter.SLOTS_MILLIS_MAPS).getValue() != 0);
}
- @Test
+ @Test (timeout = 30000)
public void testFailingMapper() throws IOException, InterruptedException,
ClassNotFoundException {
@@ -342,9 +345,8 @@ public class TestMRJobs {
job.setMapperClass(FailingMapper.class);
job.setNumReduceTasks(0);
- FileOutputFormat.setOutputPath(job,
- new Path(mrCluster.getTestWorkDir().getAbsolutePath(),
- "failmapper-output"));
+ FileOutputFormat.setOutputPath(job, new Path(OUTPUT_ROOT_DIR,
+ "failmapper-output"));
job.addFileToClassPath(APP_JAR); // The AppMaster jar itself.
job.submit();
String trackingUrl = job.getTrackingURL();
@@ -357,7 +359,7 @@ public class TestMRJobs {
return job;
}
- //@Test
+ //@Test (timeout = 30000)
public void testSleepJobWithSecurityOn() throws IOException,
InterruptedException, ClassNotFoundException {
@@ -425,14 +427,22 @@ public class TestMRJobs {
Assert.assertEquals(2, archives.length);
// Check lengths of the files
- Assert.assertEquals(1, localFs.getFileStatus(files[1]).getLen());
- Assert.assertTrue(localFs.getFileStatus(files[2]).getLen() > 1);
+ Map<String, Path> filesMap = pathsToMap(files);
+ Assert.assertTrue(filesMap.containsKey("distributed.first.symlink"));
+ Assert.assertEquals(1, localFs.getFileStatus(
+ filesMap.get("distributed.first.symlink")).getLen());
+ Assert.assertTrue(filesMap.containsKey("distributed.second.jar"));
+ Assert.assertTrue(localFs.getFileStatus(
+ filesMap.get("distributed.second.jar")).getLen() > 1);
// Check extraction of the archive
- Assert.assertTrue(localFs.exists(new Path(archives[0],
- "distributed.jar.inside3")));
- Assert.assertTrue(localFs.exists(new Path(archives[1],
- "distributed.jar.inside4")));
+ Map<String, Path> archivesMap = pathsToMap(archives);
+ Assert.assertTrue(archivesMap.containsKey("distributed.third.jar"));
+ Assert.assertTrue(localFs.exists(new Path(
+ archivesMap.get("distributed.third.jar"), "distributed.jar.inside3")));
+ Assert.assertTrue(archivesMap.containsKey("distributed.fourth.jar"));
+ Assert.assertTrue(localFs.exists(new Path(
+ archivesMap.get("distributed.fourth.jar"), "distributed.jar.inside4")));
// Check the class loaders
LOG.info("Java Classpath: " + System.getProperty("java.class.path"));
@@ -460,6 +470,23 @@ public class TestMRJobs {
Assert.assertTrue(FileUtils.isSymlink(jobJarDir));
Assert.assertTrue(jobJarDir.isDirectory());
}
+
+ /**
+ * Returns a mapping of the final component of each path to the corresponding
+ * Path instance. This assumes that every given Path has a unique string in
+ * the final path component, which is true for these tests.
+ *
+ * @param paths Path[] to map
+ * @return Map<String, Path> mapping the final component of each path to the
+ * corresponding Path instance
+ */
+ private static Map<String, Path> pathsToMap(Path[] paths) {
+ Map<String, Path> map = new HashMap<String, Path>();
+ for (Path path: paths) {
+ map.put(path.getName(), path);
+ }
+ return map;
+ }
}
public void _testDistributedCache(String jobJarPath) throws Exception {
@@ -515,7 +542,7 @@ public class TestMRJobs {
trackingUrl.endsWith(jobId.substring(jobId.lastIndexOf("_")) + "/"));
}
- @Test
+ @Test (timeout = 300000)
public void testDistributedCache() throws Exception {
// Test with a local (file:///) Job Jar
Path localJobJarPath = makeJobJarWithLib(TEST_ROOT_DIR.toUri().toString());
Modified: hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobsWithHistoryService.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobsWithHistoryService.java?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobsWithHistoryService.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobsWithHistoryService.java Mon Apr 1 16:47:16 2013
@@ -20,6 +20,7 @@ package org.apache.hadoop.mapreduce.v2;
import java.io.File;
import java.io.IOException;
+import java.util.EnumSet;
import java.util.List;
import junit.framework.Assert;
@@ -58,6 +59,9 @@ public class TestMRJobsWithHistoryServic
private static final Log LOG =
LogFactory.getLog(TestMRJobsWithHistoryService.class);
+ private static final EnumSet<RMAppState> TERMINAL_RM_APP_STATES =
+ EnumSet.of(RMAppState.FINISHED, RMAppState.FAILED, RMAppState.KILLED);
+
private static MiniMRYarnCluster mrCluster;
private static Configuration conf = new Configuration();
@@ -108,7 +112,7 @@ public class TestMRJobsWithHistoryServic
}
}
- @Test
+ @Test (timeout = 30000)
public void testJobHistoryData() throws IOException, InterruptedException,
AvroRemoteException, ClassNotFoundException {
if (!(new File(MiniMRYarnCluster.APPJAR)).exists()) {
@@ -129,12 +133,24 @@ public class TestMRJobsWithHistoryServic
Counters counterMR = job.getCounters();
JobId jobId = TypeConverter.toYarn(job.getJobID());
ApplicationId appID = jobId.getAppId();
+ int pollElapsed = 0;
while (true) {
Thread.sleep(1000);
- if (mrCluster.getResourceManager().getRMContext().getRMApps()
- .get(appID).getState().equals(RMAppState.FINISHED))
+ pollElapsed += 1000;
+
+ if (TERMINAL_RM_APP_STATES.contains(
+ mrCluster.getResourceManager().getRMContext().getRMApps().get(appID)
+ .getState())) {
+ break;
+ }
+
+ if (pollElapsed >= 60000) {
+ LOG.warn("application did not reach terminal state within 60 seconds");
break;
+ }
}
+ Assert.assertEquals(RMAppState.FINISHED, mrCluster.getResourceManager()
+ .getRMContext().getRMApps().get(appID).getState());
Counters counterHS = job.getCounters();
//TODO the Assert below worked. need to check
//Should we compare each field or convert to V2 counter and compare
Modified: hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/testshell/ExternalMapReduce.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/testshell/ExternalMapReduce.java?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/testshell/ExternalMapReduce.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/testshell/ExternalMapReduce.java Mon Apr 1 16:47:16 2013
@@ -72,7 +72,7 @@ public class ExternalMapReduce extends C
}
//fork off ls to see if the file exists.
// java file.exists() will not work on
- // cygwin since it is a symlink
+ // Windows since it is a symlink
String[] argv = new String[7];
argv[0] = "ls";
argv[1] = "files_tmp";
Modified: hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/FadvisedChunkedFile.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/FadvisedChunkedFile.java?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/FadvisedChunkedFile.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/FadvisedChunkedFile.java Mon Apr 1 16:47:16 2013
@@ -69,8 +69,10 @@ public class FadvisedChunkedFile extends
}
if (manageOsCache && getEndOffset() - getStartOffset() > 0) {
try {
- NativeIO.posixFadviseIfPossible(fd, getStartOffset(), getEndOffset()
- - getStartOffset(), NativeIO.POSIX_FADV_DONTNEED);
+ NativeIO.POSIX.posixFadviseIfPossible(
+ fd,
+ getStartOffset(), getEndOffset() - getStartOffset(),
+ NativeIO.POSIX.POSIX_FADV_DONTNEED);
} catch (Throwable t) {
LOG.warn("Failed to manage OS cache for " + identifier, t);
}
Modified: hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/FadvisedFileRegion.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/FadvisedFileRegion.java?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/FadvisedFileRegion.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/FadvisedFileRegion.java Mon Apr 1 16:47:16 2013
@@ -71,8 +71,9 @@ public class FadvisedFileRegion extends
}
if (manageOsCache && getCount() > 0) {
try {
- NativeIO.posixFadviseIfPossible(fd, getPosition(), getCount(),
- NativeIO.POSIX_FADV_DONTNEED);
+ NativeIO.POSIX.posixFadviseIfPossible(
+ fd, getPosition(), getCount(),
+ NativeIO.POSIX.POSIX_FADV_DONTNEED);
} catch (Throwable t) {
LOG.warn("Failed to manage OS cache for " + identifier, t);
}
Modified: hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java Mon Apr 1 16:47:16 2013
@@ -88,6 +88,7 @@ import org.jboss.netty.channel.ChannelFu
import org.jboss.netty.channel.ChannelHandlerContext;
import org.jboss.netty.channel.ChannelPipeline;
import org.jboss.netty.channel.ChannelPipelineFactory;
+import org.jboss.netty.channel.ChannelStateEvent;
import org.jboss.netty.channel.Channels;
import org.jboss.netty.channel.ExceptionEvent;
import org.jboss.netty.channel.MessageEvent;
@@ -121,7 +122,7 @@ public class ShuffleHandler extends Abst
public static final String SHUFFLE_READAHEAD_BYTES = "mapreduce.shuffle.readahead.bytes";
public static final int DEFAULT_SHUFFLE_READAHEAD_BYTES = 4 * 1024 * 1024;
-
+
// pattern to identify errors related to the client closing the socket early
// idea borrowed from Netty SslHandler
private static final Pattern IGNORABLE_ERROR_MESSAGE = Pattern.compile(
@@ -133,15 +134,15 @@ public class ShuffleHandler extends Abst
private final ChannelGroup accepted = new DefaultChannelGroup();
protected HttpPipelineFactory pipelineFact;
private int sslFileBufferSize;
-
+
/**
* Should the shuffle use posix_fadvise calls to manage the OS cache during
* sendfile
*/
private boolean manageOsCache;
private int readaheadLength;
+ private int maxShuffleConnections;
private ReadaheadPool readaheadPool = ReadaheadPool.getInstance();
-
public static final String MAPREDUCE_SHUFFLE_SERVICEID =
"mapreduce.shuffle";
@@ -159,6 +160,9 @@ public class ShuffleHandler extends Abst
public static final int DEFAULT_SUFFLE_SSL_FILE_BUFFER_SIZE = 60 * 1024;
+ public static final String MAX_SHUFFLE_CONNECTIONS = "mapreduce.shuffle.max.connections";
+ public static final int DEFAULT_MAX_SHUFFLE_CONNECTIONS = 0; // 0 implies no limit
+
@Metrics(about="Shuffle output metrics", context="mapred")
static class ShuffleMetrics implements ChannelFutureListener {
@Metric("Shuffle output in bytes")
@@ -270,6 +274,9 @@ public class ShuffleHandler extends Abst
readaheadLength = conf.getInt(SHUFFLE_READAHEAD_BYTES,
DEFAULT_SHUFFLE_READAHEAD_BYTES);
+ maxShuffleConnections = conf.getInt(MAX_SHUFFLE_CONNECTIONS,
+ DEFAULT_MAX_SHUFFLE_CONNECTIONS);
+
ThreadFactory bossFactory = new ThreadFactoryBuilder()
.setNameFormat("ShuffleHandler Netty Boss #%d")
.build();
@@ -400,6 +407,21 @@ public class ShuffleHandler extends Abst
}
@Override
+ public void channelOpen(ChannelHandlerContext ctx, ChannelStateEvent evt)
+ throws Exception {
+ if ((maxShuffleConnections > 0) && (accepted.size() >= maxShuffleConnections)) {
+ LOG.info(String.format("Current number of shuffle connections (%d) is " +
+ "greater than or equal to the max allowed shuffle connections (%d)",
+ accepted.size(), maxShuffleConnections));
+ evt.getChannel().close();
+ return;
+ }
+ accepted.add(evt.getChannel());
+ super.channelOpen(ctx, evt);
+
+ }
+
+ @Override
public void messageReceived(ChannelHandlerContext ctx, MessageEvent evt)
throws Exception {
HttpRequest request = (HttpRequest) evt.getMessage();
@@ -527,15 +549,19 @@ public class ShuffleHandler extends Abst
ContainerLocalizer.USERCACHE + "/" + user + "/"
+ ContainerLocalizer.APPCACHE + "/"
+ ConverterUtils.toString(appID) + "/output" + "/" + mapId;
- LOG.debug("DEBUG0 " + base);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("DEBUG0 " + base);
+ }
// Index file
Path indexFileName = lDirAlloc.getLocalPathToRead(
base + "/file.out.index", conf);
// Map-output file
Path mapOutputFileName = lDirAlloc.getLocalPathToRead(
base + "/file.out", conf);
- LOG.debug("DEBUG1 " + base + " : " + mapOutputFileName + " : " +
- indexFileName);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("DEBUG1 " + base + " : " + mapOutputFileName + " : "
+ + indexFileName);
+ }
final IndexRecord info =
indexCache.getIndexInformation(mapId, reduce, indexFileName, user);
final ShuffleHeader header =
@@ -620,6 +646,5 @@ public class ShuffleHandler extends Abst
sendError(ctx, INTERNAL_SERVER_ERROR);
}
}
-
}
}
Modified: hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestShuffleHandler.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestShuffleHandler.java?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestShuffleHandler.java (original)
+++ hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestShuffleHandler.java Mon Apr 1 16:47:16 2013
@@ -24,13 +24,15 @@ import static org.apache.hadoop.test.Moc
import static org.apache.hadoop.test.MockitoMaker.stub;
import static org.jboss.netty.buffer.ChannelBuffers.wrappedBuffer;
import static org.junit.Assert.assertEquals;
-
import java.io.DataInputStream;
import java.io.IOException;
import java.net.HttpURLConnection;
+import java.net.SocketException;
import java.net.URL;
import java.util.ArrayList;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.DataOutputBuffer;
import org.apache.hadoop.mapreduce.task.reduce.ShuffleHeader;
@@ -47,10 +49,13 @@ import org.jboss.netty.handler.codec.htt
import org.junit.Assert;
import org.junit.Test;
+
public class TestShuffleHandler {
- static final long MiB = 1024 * 1024;
+ static final long MiB = 1024 * 1024;
+ private static final Log LOG = LogFactory.getLog(TestShuffleHandler.class);
- @Test public void testSerializeMeta() throws Exception {
+ @Test (timeout = 10000)
+ public void testSerializeMeta() throws Exception {
assertEquals(1, ShuffleHandler.deserializeMetaData(
ShuffleHandler.serializeMetaData(1)));
assertEquals(-1, ShuffleHandler.deserializeMetaData(
@@ -59,7 +64,8 @@ public class TestShuffleHandler {
ShuffleHandler.serializeMetaData(8080)));
}
- @Test public void testShuffleMetrics() throws Exception {
+ @Test (timeout = 10000)
+ public void testShuffleMetrics() throws Exception {
MetricsSystem ms = new MetricsSystemImpl();
ShuffleHandler sh = new ShuffleHandler(ms);
ChannelFuture cf = make(stub(ChannelFuture.class).
@@ -88,7 +94,7 @@ public class TestShuffleHandler {
assertGauge("ShuffleConnections", connections, rb);
}
- @Test
+ @Test (timeout = 10000)
public void testClientClosesConnection() throws Exception {
final ArrayList<Throwable> failures = new ArrayList<Throwable>(1);
Configuration conf = new Configuration();
@@ -159,4 +165,84 @@ public class TestShuffleHandler {
Assert.assertTrue("sendError called when client closed connection",
failures.size() == 0);
}
+
+ @Test (timeout = 10000)
+ public void testMaxConnections() throws Exception {
+
+ Configuration conf = new Configuration();
+ conf.setInt(ShuffleHandler.SHUFFLE_PORT_CONFIG_KEY, 0);
+ conf.setInt(ShuffleHandler.MAX_SHUFFLE_CONNECTIONS, 3);
+ ShuffleHandler shuffleHandler = new ShuffleHandler() {
+ @Override
+ protected Shuffle getShuffle(Configuration conf) {
+ // replace the shuffle handler with one stubbed for testing
+ return new Shuffle(conf) {
+ @Override
+ protected void verifyRequest(String appid, ChannelHandlerContext ctx,
+ HttpRequest request, HttpResponse response, URL requestUri)
+ throws IOException {
+ }
+ @Override
+ protected ChannelFuture sendMapOutput(ChannelHandlerContext ctx,
+ Channel ch, String user, String jobId, String mapId, int reduce)
+ throws IOException {
+ // send a shuffle header and a lot of data down the channel
+ // to trigger a broken pipe
+ ShuffleHeader header =
+ new ShuffleHeader("dummy_header", 5678, 5678, 1);
+ DataOutputBuffer dob = new DataOutputBuffer();
+ header.write(dob);
+ ch.write(wrappedBuffer(dob.getData(), 0, dob.getLength()));
+ dob = new DataOutputBuffer();
+ for (int i=0; i<100000; ++i) {
+ header.write(dob);
+ }
+ return ch.write(wrappedBuffer(dob.getData(), 0, dob.getLength()));
+ }
+ };
+ }
+ };
+ shuffleHandler.init(conf);
+ shuffleHandler.start();
+
+ // setup connections
+ int connAttempts = 3;
+ HttpURLConnection conns[] = new HttpURLConnection[connAttempts];
+
+ for (int i = 0; i < connAttempts; i++) {
+ String URLstring = "http://127.0.0.1:"
+ + shuffleHandler.getConfig().get(ShuffleHandler.SHUFFLE_PORT_CONFIG_KEY)
+ + "/mapOutput?job=job_12345_1&reduce=1&map=attempt_12345_1_m_"
+ + i + "_0";
+ URL url = new URL(URLstring);
+ conns[i] = (HttpURLConnection)url.openConnection();
+ }
+
+ // Try to open numerous connections
+ for (int i = 0; i < connAttempts; i++) {
+ conns[i].connect();
+ }
+
+ //Ensure first connections are okay
+ conns[0].getInputStream();
+ int rc = conns[0].getResponseCode();
+ Assert.assertEquals(HttpURLConnection.HTTP_OK, rc);
+
+ conns[1].getInputStream();
+ rc = conns[1].getResponseCode();
+ Assert.assertEquals(HttpURLConnection.HTTP_OK, rc);
+
+ // This connection should be closed because it to above the limit
+ try {
+ conns[2].getInputStream();
+ rc = conns[2].getResponseCode();
+ Assert.fail("Expected a SocketException");
+ } catch (SocketException se) {
+ LOG.info("Expected - connection should not be open");
+ } catch (Exception e) {
+ Assert.fail("Expected a SocketException");
+ }
+
+ shuffleHandler.stop();
+ }
}
Modified: hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml (original)
+++ hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml Mon Apr 1 16:47:16 2013
@@ -130,7 +130,7 @@
<scope>test</scope>
</dependency>
<dependency>
- <groupId>org.jboss.netty</groupId>
+ <groupId>io.netty</groupId>
<artifactId>netty</artifactId>
</dependency>
<dependency>
@@ -176,6 +176,10 @@
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
+ <environmentVariables>
+ <!-- HADOOP_HOME required for tests on Windows to find winutils -->
+ <HADOOP_HOME>${basedir}/../../../hadoop-common-project/hadoop-common/target</HADOOP_HOME>
+ </environmentVariables>
<properties>
<property>
<name>listener</name>
Modified: hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/pom.xml?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/pom.xml (original)
+++ hadoop/common/branches/HDFS-347/hadoop-mapreduce-project/pom.xml Mon Apr 1 16:47:16 2013
@@ -61,7 +61,7 @@
<artifactId>ant</artifactId>
</exclusion>
<exclusion>
- <groupId>org.jboss.netty</groupId>
+ <groupId>io.netty</groupId>
<artifactId>netty</artifactId>
</exclusion>
<exclusion>
@@ -151,7 +151,7 @@
<artifactId>junit</artifactId>
</dependency>
<dependency>
- <groupId>org.jboss.netty</groupId>
+ <groupId>io.netty</groupId>
<artifactId>netty</artifactId>
</dependency>
<dependency>
@@ -182,15 +182,8 @@
<target if="tar">
<!-- Using Unix script to preserve symlinks -->
<echo file="${project.build.directory}/dist-maketar.sh">
-
- which cygpath 2> /dev/null
- if [ $? = 1 ]; then
- BUILD_DIR="${project.build.directory}"
- else
- BUILD_DIR=`cygpath --unix '${project.build.directory}'`
- fi
- cd $BUILD_DIR
- tar czf ${project.artifactId}-${project.version}.tar.gz ${project.artifactId}-${project.version}
+ cd "${project.build.directory}"
+ tar cf - ${project.artifactId}-${project.version} | gzip > ${project.artifactId}-${project.version}.tar.gz
</echo>
<exec executable="sh" dir="${project.build.directory}" failonerror="true">
<arg line="./dist-maketar.sh"/>