You are viewing a plain text version of this content. The canonical link for it is here.
Posted to mapreduce-commits@hadoop.apache.org by ss...@apache.org on 2012/10/19 20:49:47 UTC
svn commit: r1400219 [2/2] - in
/hadoop/common/branches/MR-3902/hadoop-mapreduce-project: ./
hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/
hadoop-mapreduce-client/hadoop-mapreduce-client-app/s...
Modified: hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java?rev=1400219&r1=1400218&r2=1400219&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java Fri Oct 19 18:49:38 2012
@@ -50,8 +50,10 @@ import org.apache.hadoop.mapreduce.v2.ap
import org.apache.hadoop.mapreduce.v2.api.records.TaskState;
import org.apache.hadoop.mapreduce.v2.app.client.ClientService;
import org.apache.hadoop.mapreduce.v2.app.job.Job;
+import org.apache.hadoop.mapreduce.v2.app.job.JobStateInternal;
import org.apache.hadoop.mapreduce.v2.app.job.Task;
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
+import org.apache.hadoop.mapreduce.v2.app.job.TaskAttemptStateInternal;
import org.apache.hadoop.mapreduce.v2.app.job.event.JobEvent;
import org.apache.hadoop.mapreduce.v2.app.job.event.JobEventType;
import org.apache.hadoop.mapreduce.v2.app.job.event.JobFinishEvent;
@@ -60,6 +62,7 @@ import org.apache.hadoop.mapreduce.v2.ap
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent;
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEventType;
import org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl;
+import org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl;
import org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncher;
import org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherEvent;
import org.apache.hadoop.mapreduce.v2.app.rm.ContainerAllocator;
@@ -240,6 +243,24 @@ public class MRApp extends MRAppMaster {
return job;
}
+ public void waitForInternalState(TaskAttemptImpl attempt,
+ TaskAttemptStateInternal finalState) throws Exception {
+ int timeoutSecs = 0;
+ TaskAttemptReport report = attempt.getReport();
+ TaskAttemptStateInternal iState = attempt.getInternalState();
+ while (!finalState.equals(iState) && timeoutSecs++ < 20) {
+ System.out.println("TaskAttempt Internal State is : " + iState
+ + " Waiting for Internal state : " + finalState + " progress : "
+ + report.getProgress());
+ Thread.sleep(500);
+ report = attempt.getReport();
+ iState = attempt.getInternalState();
+ }
+ System.out.println("TaskAttempt Internal State is : " + iState);
+ Assert.assertEquals("TaskAttempt Internal state is not correct (timedout)",
+ finalState, iState);
+ }
+
public void waitForState(TaskAttempt attempt,
TaskAttemptState finalState) throws Exception {
int timeoutSecs = 0;
@@ -501,18 +522,18 @@ public class MRApp extends MRAppMaster {
//override the init transition
private final TestInitTransition initTransition = new TestInitTransition(
maps, reduces);
- StateMachineFactory<JobImpl, JobState, JobEventType, JobEvent> localFactory
- = stateMachineFactory.addTransition(JobState.NEW,
- EnumSet.of(JobState.INITED, JobState.FAILED),
+ StateMachineFactory<JobImpl, JobStateInternal, JobEventType, JobEvent> localFactory
+ = stateMachineFactory.addTransition(JobStateInternal.NEW,
+ EnumSet.of(JobStateInternal.INITED, JobStateInternal.FAILED),
JobEventType.JOB_INIT,
// This is abusive.
initTransition);
- private final StateMachine<JobState, JobEventType, JobEvent>
+ private final StateMachine<JobStateInternal, JobEventType, JobEvent>
localStateMachine;
@Override
- protected StateMachine<JobState, JobEventType, JobEvent> getStateMachine() {
+ protected StateMachine<JobStateInternal, JobEventType, JobEvent> getStateMachine() {
return localStateMachine;
}
Modified: hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestFail.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestFail.java?rev=1400219&r1=1400218&r2=1400219&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestFail.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestFail.java Fri Oct 19 18:49:38 2012
@@ -36,8 +36,10 @@ import org.apache.hadoop.mapreduce.v2.ap
import org.apache.hadoop.mapreduce.v2.app.job.Job;
import org.apache.hadoop.mapreduce.v2.app.job.Task;
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
+import org.apache.hadoop.mapreduce.v2.app.job.TaskAttemptStateInternal;
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent;
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEventType;
+import org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl;
import org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncher;
import org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherEvent;
import org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl;
@@ -190,7 +192,8 @@ public class TestFail {
Assert.assertEquals("Num attempts is not correct", maxAttempts, attempts
.size());
TaskAttempt attempt = attempts.values().iterator().next();
- app.waitForState(attempt, TaskAttemptState.ASSIGNED);
+ app.waitForInternalState((TaskAttemptImpl) attempt,
+ TaskAttemptStateInternal.ASSIGNED);
app.getDispatcher().getEventHandler().handle(
new TaskAttemptEvent(attempt.getID(),
TaskAttemptEventType.TA_CONTAINER_COMPLETED));
Modified: hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRMContainerAllocator.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRMContainerAllocator.java?rev=1400219&r1=1400218&r2=1400219&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRMContainerAllocator.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRMContainerAllocator.java Fri Oct 19 18:49:38 2012
@@ -48,7 +48,6 @@ import org.apache.hadoop.mapreduce.MRJob
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
import org.apache.hadoop.mapreduce.v2.api.records.JobState;
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
-import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState;
import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskState;
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
@@ -56,11 +55,13 @@ import org.apache.hadoop.mapreduce.v2.ap
import org.apache.hadoop.mapreduce.v2.app.job.Job;
import org.apache.hadoop.mapreduce.v2.app.job.Task;
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
+import org.apache.hadoop.mapreduce.v2.app.job.TaskAttemptStateInternal;
import org.apache.hadoop.mapreduce.v2.app.job.event.JobUpdatedNodesEvent;
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerAssignedEvent;
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent;
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEventType;
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptKillEvent;
+import org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl;
import org.apache.hadoop.mapreduce.v2.app.rm.ContainerAllocator;
import org.apache.hadoop.mapreduce.v2.app.rm.ContainerFailedEvent;
import org.apache.hadoop.mapreduce.v2.app.rm.ContainerRequestEvent;
@@ -411,8 +412,8 @@ public class TestRMContainerAllocator {
// Wait till all map-attempts request for containers
for (Task t : job.getTasks().values()) {
if (t.getType() == TaskType.MAP) {
- mrApp.waitForState(t.getAttempts().values().iterator().next(),
- TaskAttemptState.UNASSIGNED);
+ mrApp.waitForInternalState((TaskAttemptImpl) t.getAttempts().values()
+ .iterator().next(), TaskAttemptStateInternal.UNASSIGNED);
}
}
amDispatcher.await();
@@ -562,8 +563,8 @@ public class TestRMContainerAllocator {
amDispatcher.await();
// Wait till all map-attempts request for containers
for (Task t : job.getTasks().values()) {
- mrApp.waitForState(t.getAttempts().values().iterator().next(),
- TaskAttemptState.UNASSIGNED);
+ mrApp.waitForInternalState((TaskAttemptImpl) t.getAttempts().values()
+ .iterator().next(), TaskAttemptStateInternal.UNASSIGNED);
}
amDispatcher.await();
Modified: hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java?rev=1400219&r1=1400218&r2=1400219&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java Fri Oct 19 18:49:38 2012
@@ -42,8 +42,8 @@ import org.apache.hadoop.mapreduce.jobhi
import org.apache.hadoop.mapreduce.security.token.JobTokenSecretManager;
import org.apache.hadoop.mapreduce.split.JobSplit.TaskSplitMetaInfo;
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
-import org.apache.hadoop.mapreduce.v2.api.records.JobState;
import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
+import org.apache.hadoop.mapreduce.v2.app.job.JobStateInternal;
import org.apache.hadoop.mapreduce.v2.app.job.Task;
import org.apache.hadoop.mapreduce.v2.app.job.event.JobDiagnosticsUpdateEvent;
import org.apache.hadoop.mapreduce.v2.app.job.event.JobEvent;
@@ -77,11 +77,11 @@ public class TestJobImpl {
tasks.put(mockTask.getID(), mockTask);
mockJob.tasks = tasks;
- when(mockJob.getState()).thenReturn(JobState.ERROR);
+ when(mockJob.getInternalState()).thenReturn(JobStateInternal.ERROR);
JobEvent mockJobEvent = mock(JobEvent.class);
- JobState state = trans.transition(mockJob, mockJobEvent);
+ JobStateInternal state = trans.transition(mockJob, mockJobEvent);
Assert.assertEquals("Incorrect state returned from JobNoTasksCompletedTransition",
- JobState.ERROR, state);
+ JobStateInternal.ERROR, state);
}
@Test
@@ -96,9 +96,12 @@ public class TestJobImpl {
when(mockJob.getCommitter()).thenReturn(mockCommitter);
when(mockJob.getEventHandler()).thenReturn(mockEventHandler);
when(mockJob.getJobContext()).thenReturn(mockJobContext);
- when(mockJob.finished(JobState.KILLED)).thenReturn(JobState.KILLED);
- when(mockJob.finished(JobState.FAILED)).thenReturn(JobState.FAILED);
- when(mockJob.finished(JobState.SUCCEEDED)).thenReturn(JobState.SUCCEEDED);
+ when(mockJob.finished(JobStateInternal.KILLED)).thenReturn(
+ JobStateInternal.KILLED);
+ when(mockJob.finished(JobStateInternal.FAILED)).thenReturn(
+ JobStateInternal.FAILED);
+ when(mockJob.finished(JobStateInternal.SUCCEEDED)).thenReturn(
+ JobStateInternal.SUCCEEDED);
try {
doThrow(new IOException()).when(mockCommitter).commitJob(any(JobContext.class));
@@ -106,11 +109,11 @@ public class TestJobImpl {
// commitJob stubbed out, so this can't happen
}
doNothing().when(mockEventHandler).handle(any(JobHistoryEvent.class));
- JobState jobState = JobImpl.checkJobCompleteSuccess(mockJob);
+ JobStateInternal jobState = JobImpl.checkJobCompleteSuccess(mockJob);
Assert.assertNotNull("checkJobCompleteSuccess incorrectly returns null " +
"for successful job", jobState);
Assert.assertEquals("checkJobCompleteSuccess returns incorrect state",
- JobState.FAILED, jobState);
+ JobStateInternal.FAILED, jobState);
verify(mockJob).abortJob(
eq(org.apache.hadoop.mapreduce.JobStatus.State.FAILED));
}
@@ -129,7 +132,8 @@ public class TestJobImpl {
when(mockJob.getJobContext()).thenReturn(mockJobContext);
doNothing().when(mockJob).setFinishTime();
doNothing().when(mockJob).logJobHistoryFinishedEvent();
- when(mockJob.finished(any(JobState.class))).thenReturn(JobState.SUCCEEDED);
+ when(mockJob.finished(any(JobStateInternal.class))).thenReturn(
+ JobStateInternal.SUCCEEDED);
try {
doNothing().when(mockCommitter).commitJob(any(JobContext.class));
@@ -141,7 +145,7 @@ public class TestJobImpl {
"for successful job",
JobImpl.checkJobCompleteSuccess(mockJob));
Assert.assertEquals("checkJobCompleteSuccess returns incorrect state",
- JobState.SUCCEEDED, JobImpl.checkJobCompleteSuccess(mockJob));
+ JobStateInternal.SUCCEEDED, JobImpl.checkJobCompleteSuccess(mockJob));
}
@Test
Modified: hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestTaskImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestTaskImpl.java?rev=1400219&r1=1400218&r2=1400219&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestTaskImpl.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestTaskImpl.java Fri Oct 19 18:49:38 2012
@@ -26,7 +26,6 @@ import static org.mockito.Mockito.when;
import java.io.IOException;
import java.util.ArrayList;
-import java.util.Collection;
import java.util.List;
import java.util.Map;
@@ -48,13 +47,13 @@ import org.apache.hadoop.mapreduce.v2.ap
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
import org.apache.hadoop.mapreduce.v2.app.AppContext;
import org.apache.hadoop.mapreduce.v2.app.TaskAttemptListener;
+import org.apache.hadoop.mapreduce.v2.app.job.TaskStateInternal;
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskEvent;
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskEventType;
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskTAttemptEvent;
import org.apache.hadoop.mapreduce.v2.app.metrics.MRAppMetrics;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.token.Token;
-import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.yarn.Clock;
import org.apache.hadoop.yarn.SystemClock;
import org.apache.hadoop.yarn.api.records.ApplicationId;
@@ -338,7 +337,7 @@ public class TestTaskImpl {
* {@link TaskState#KILL_WAIT}
*/
private void assertTaskKillWaitState() {
- assertEquals(TaskState.KILL_WAIT, mockTask.getState());
+ assertEquals(TaskStateInternal.KILL_WAIT, mockTask.getInternalState());
}
/**
Modified: hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/launcher/TestContainerLauncher.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/launcher/TestContainerLauncher.java?rev=1400219&r1=1400218&r2=1400219&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/launcher/TestContainerLauncher.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/launcher/TestContainerLauncher.java Fri Oct 19 18:49:38 2012
@@ -46,6 +46,8 @@ import org.apache.hadoop.mapreduce.v2.ap
import org.apache.hadoop.mapreduce.v2.app.job.Job;
import org.apache.hadoop.mapreduce.v2.app.job.Task;
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
+import org.apache.hadoop.mapreduce.v2.app.job.TaskAttemptStateInternal;
+import org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl;
import org.apache.hadoop.mapreduce.v2.util.MRBuilderUtils;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.yarn.api.ContainerManager;
@@ -260,7 +262,8 @@ public class TestContainerLauncher {
attempts.size());
TaskAttempt attempt = attempts.values().iterator().next();
- app.waitForState(attempt, TaskAttemptState.ASSIGNED);
+ app.waitForInternalState((TaskAttemptImpl) attempt,
+ TaskAttemptStateInternal.ASSIGNED);
app.waitForState(job, JobState.FAILED);
Modified: hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java?rev=1400219&r1=1400218&r2=1400219&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java Fri Oct 19 18:49:38 2012
@@ -128,14 +128,26 @@ public class TypeConverter {
return taskId;
}
- public static TaskAttemptState toYarn(org.apache.hadoop.mapred.TaskStatus.State state) {
- if (state == org.apache.hadoop.mapred.TaskStatus.State.KILLED_UNCLEAN) {
- return TaskAttemptState.KILLED;
- }
- if (state == org.apache.hadoop.mapred.TaskStatus.State.FAILED_UNCLEAN) {
+ public static TaskAttemptState toYarn(
+ org.apache.hadoop.mapred.TaskStatus.State state) {
+ switch (state) {
+ case COMMIT_PENDING:
+ return TaskAttemptState.COMMIT_PENDING;
+ case FAILED:
+ case FAILED_UNCLEAN:
return TaskAttemptState.FAILED;
+ case KILLED:
+ case KILLED_UNCLEAN:
+ return TaskAttemptState.KILLED;
+ case RUNNING:
+ return TaskAttemptState.RUNNING;
+ case SUCCEEDED:
+ return TaskAttemptState.SUCCEEDED;
+ case UNASSIGNED:
+ return TaskAttemptState.STARTING;
+ default:
+ throw new YarnException("Unrecognized State: " + state);
}
- return TaskAttemptState.valueOf(state.toString());
}
public static Phase toYarn(org.apache.hadoop.mapred.TaskStatus.Phase phase) {
@@ -309,7 +321,6 @@ public class TypeConverter {
return org.apache.hadoop.mapred.JobStatus.PREP;
case RUNNING:
return org.apache.hadoop.mapred.JobStatus.RUNNING;
- case KILL_WAIT:
case KILLED:
return org.apache.hadoop.mapred.JobStatus.KILLED;
case SUCCEEDED:
@@ -329,7 +340,6 @@ public class TypeConverter {
return org.apache.hadoop.mapred.TIPStatus.PENDING;
case RUNNING:
return org.apache.hadoop.mapred.TIPStatus.RUNNING;
- case KILL_WAIT:
case KILLED:
return org.apache.hadoop.mapred.TIPStatus.KILLED;
case SUCCEEDED:
Modified: hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/JobState.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/JobState.java?rev=1400219&r1=1400218&r2=1400219&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/JobState.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/JobState.java Fri Oct 19 18:49:38 2012
@@ -24,7 +24,6 @@ public enum JobState {
RUNNING,
SUCCEEDED,
FAILED,
- KILL_WAIT,
KILLED,
ERROR
}
Modified: hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/TaskAttemptState.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/TaskAttemptState.java?rev=1400219&r1=1400218&r2=1400219&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/TaskAttemptState.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/TaskAttemptState.java Fri Oct 19 18:49:38 2012
@@ -20,16 +20,10 @@ package org.apache.hadoop.mapreduce.v2.a
public enum TaskAttemptState {
NEW,
- UNASSIGNED,
- ASSIGNED,
+ STARTING,
RUNNING,
- COMMIT_PENDING,
- SUCCESS_CONTAINER_CLEANUP,
- SUCCEEDED,
- FAIL_CONTAINER_CLEANUP,
- FAIL_TASK_CLEANUP,
- FAILED,
- KILL_CONTAINER_CLEANUP,
- KILL_TASK_CLEANUP,
+ COMMIT_PENDING,
+ SUCCEEDED,
+ FAILED,
KILLED
}
Modified: hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/TaskState.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/TaskState.java?rev=1400219&r1=1400218&r2=1400219&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/TaskState.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/TaskState.java Fri Oct 19 18:49:38 2012
@@ -19,5 +19,5 @@
package org.apache.hadoop.mapreduce.v2.api.records;
public enum TaskState {
- NEW, SCHEDULED, RUNNING, SUCCEEDED, FAILED, KILL_WAIT, KILLED
+ NEW, SCHEDULED, RUNNING, SUCCEEDED, FAILED, KILLED
}
Modified: hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java?rev=1400219&r1=1400218&r2=1400219&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java Fri Oct 19 18:49:38 2012
@@ -49,8 +49,8 @@ import org.apache.hadoop.mapreduce.v2.ap
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
import org.apache.hadoop.yarn.ContainerLogAppender;
import org.apache.hadoop.yarn.YarnException;
-import org.apache.hadoop.yarn.api.ApplicationConstants.Environment;
import org.apache.hadoop.yarn.api.ApplicationConstants;
+import org.apache.hadoop.yarn.api.ApplicationConstants.Environment;
import org.apache.hadoop.yarn.api.records.LocalResource;
import org.apache.hadoop.yarn.api.records.LocalResourceType;
import org.apache.hadoop.yarn.api.records.LocalResourceVisibility;
@@ -100,15 +100,10 @@ public class MRApps extends Apps {
public static enum TaskAttemptStateUI {
NEW(
new TaskAttemptState[] { TaskAttemptState.NEW,
- TaskAttemptState.UNASSIGNED, TaskAttemptState.ASSIGNED }),
+ TaskAttemptState.STARTING }),
RUNNING(
new TaskAttemptState[] { TaskAttemptState.RUNNING,
- TaskAttemptState.COMMIT_PENDING,
- TaskAttemptState.SUCCESS_CONTAINER_CLEANUP,
- TaskAttemptState.FAIL_CONTAINER_CLEANUP,
- TaskAttemptState.FAIL_TASK_CLEANUP,
- TaskAttemptState.KILL_CONTAINER_CLEANUP,
- TaskAttemptState.KILL_TASK_CLEANUP }),
+ TaskAttemptState.COMMIT_PENDING }),
SUCCESSFUL(new TaskAttemptState[] { TaskAttemptState.SUCCEEDED}),
FAILED(new TaskAttemptState[] { TaskAttemptState.FAILED}),
KILLED(new TaskAttemptState[] { TaskAttemptState.KILLED});
Modified: hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/proto/mr_protos.proto
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/proto/mr_protos.proto?rev=1400219&r1=1400218&r2=1400219&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/proto/mr_protos.proto (original)
+++ hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/proto/mr_protos.proto Fri Oct 19 18:49:38 2012
@@ -50,8 +50,7 @@ enum TaskStateProto {
TS_RUNNING = 3;
TS_SUCCEEDED = 4;
TS_FAILED = 5;
- TS_KILL_WAIT = 6;
- TS_KILLED = 7;
+ TS_KILLED = 6;
}
enum PhaseProto {
@@ -93,18 +92,12 @@ message TaskReportProto {
enum TaskAttemptStateProto {
TA_NEW = 1;
- TA_UNASSIGNED = 2;
- TA_ASSIGNED = 3;
- TA_RUNNING = 4;
- TA_COMMIT_PENDING = 5;
- TA_SUCCESS_CONTAINER_CLEANUP = 6;
- TA_SUCCEEDED = 7;
- TA_FAIL_CONTAINER_CLEANUP = 8;
- TA_FAIL_TASK_CLEANUP = 9;
- TA_FAILED = 10;
- TA_KILL_CONTAINER_CLEANUP = 11;
- TA_KILL_TASK_CLEANUP = 12;
- TA_KILLED = 13;
+ TA_STARTING = 2;
+ TA_RUNNING = 3;
+ TA_COMMIT_PENDING = 4;
+ TA_SUCCEEDED = 5;
+ TA_FAILED = 6;
+ TA_KILLED = 7;
}
message TaskAttemptReportProto {
@@ -131,9 +124,8 @@ enum JobStateProto {
J_RUNNING = 3;
J_SUCCEEDED = 4;
J_FAILED = 5;
- J_KILL_WAIT = 6;
- J_KILLED = 7;
- J_ERROR = 8;
+ J_KILLED = 6;
+ J_ERROR = 7;
}
message JobReportProto {
Modified: hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ConfigUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ConfigUtil.java?rev=1400219&r1=1400218&r2=1400219&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ConfigUtil.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ConfigUtil.java Fri Oct 19 18:49:38 2012
@@ -519,6 +519,8 @@ public class ConfigUtil {
new String[] {
MRJobConfig.MR_AM_SECURITY_SERVICE_AUTHORIZATION_CLIENT
});
+ Configuration.addDeprecation("mapreduce.user.classpath.first",
+ MRJobConfig.MAPREDUCE_JOB_USER_CLASSPATH_FIRST);
}
public static void main(String[] args) {
Modified: hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTask.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTask.java?rev=1400219&r1=1400218&r2=1400219&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTask.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTask.java Fri Oct 19 18:49:38 2012
@@ -122,7 +122,12 @@ public class CompletedTask implements Ta
loadAllTaskAttempts();
this.report = Records.newRecord(TaskReport.class);
report.setTaskId(taskId);
- report.setStartTime(taskInfo.getStartTime());
+ long minLaunchTime = Long.MAX_VALUE;
+ for(TaskAttempt attempt: attempts.values()) {
+ minLaunchTime = Math.min(minLaunchTime, attempt.getLaunchTime());
+ }
+ minLaunchTime = minLaunchTime == Long.MAX_VALUE ? -1 : minLaunchTime;
+ report.setStartTime(minLaunchTime);
report.setFinishTime(taskInfo.getFinishTime());
report.setTaskState(getState());
report.setProgress(getProgress());
Modified: hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestDFSIO.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestDFSIO.java?rev=1400219&r1=1400218&r2=1400219&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestDFSIO.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestDFSIO.java Fri Oct 19 18:49:38 2012
@@ -672,37 +672,38 @@ public class TestDFSIO implements Tool {
return -1;
}
- for (int i = 0; i < args.length; i++) { // parse command line
- if (args[i].startsWith("-read")) {
+ for (int i = 0; i < args.length; i++) { // parse command line
+ if (args[i].toLowerCase().startsWith("-read")) {
testType = TestType.TEST_TYPE_READ;
- } else if (args[i].equals("-write")) {
+ } else if (args[i].equalsIgnoreCase("-write")) {
testType = TestType.TEST_TYPE_WRITE;
- } else if (args[i].equals("-append")) {
+ } else if (args[i].equalsIgnoreCase("-append")) {
testType = TestType.TEST_TYPE_APPEND;
- } else if (args[i].equals("-random")) {
- if(testType != TestType.TEST_TYPE_READ) return -1;
+ } else if (args[i].equalsIgnoreCase("-random")) {
+ if (testType != TestType.TEST_TYPE_READ) return -1;
testType = TestType.TEST_TYPE_READ_RANDOM;
- } else if (args[i].equals("-backward")) {
- if(testType != TestType.TEST_TYPE_READ) return -1;
+ } else if (args[i].equalsIgnoreCase("-backward")) {
+ if (testType != TestType.TEST_TYPE_READ) return -1;
testType = TestType.TEST_TYPE_READ_BACKWARD;
- } else if (args[i].equals("-skip")) {
- if(testType != TestType.TEST_TYPE_READ) return -1;
+ } else if (args[i].equalsIgnoreCase("-skip")) {
+ if (testType != TestType.TEST_TYPE_READ) return -1;
testType = TestType.TEST_TYPE_READ_SKIP;
- } else if (args[i].equals("-clean")) {
+ } else if (args[i].equalsIgnoreCase("-clean")) {
testType = TestType.TEST_TYPE_CLEANUP;
- } else if (args[i].startsWith("-seq")) {
+ } else if (args[i].toLowerCase().startsWith("-seq")) {
isSequential = true;
- } else if (args[i].startsWith("-compression")) {
+ } else if (args[i].toLowerCase().startsWith("-compression")) {
compressionClass = args[++i];
- } else if (args[i].equals("-nrFiles")) {
+ } else if (args[i].equalsIgnoreCase("-nrfiles")) {
nrFiles = Integer.parseInt(args[++i]);
- } else if (args[i].equals("-fileSize") || args[i].equals("-size")) {
+ } else if (args[i].equalsIgnoreCase("-filesize")
+ || args[i].equalsIgnoreCase("-size")) {
nrBytes = parseSize(args[++i]);
- } else if (args[i].equals("-skipSize")) {
+ } else if (args[i].equalsIgnoreCase("-skipsize")) {
skipSize = parseSize(args[++i]);
- } else if (args[i].equals("-bufferSize")) {
+ } else if (args[i].equalsIgnoreCase("-buffersize")) {
bufferSize = Integer.parseInt(args[++i]);
- } else if (args[i].equals("-resFile")) {
+ } else if (args[i].equalsIgnoreCase("-resfile")) {
resFileName = args[++i];
} else {
System.err.println("Illegal argument: " + args[i]);
Modified: hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestCombineFileInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestCombineFileInputFormat.java?rev=1400219&r1=1400218&r2=1400219&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestCombineFileInputFormat.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestCombineFileInputFormat.java Fri Oct 19 18:49:38 2012
@@ -317,7 +317,7 @@ public class TestCombineFileInputFormat
for (InputSplit split : splits) {
System.out.println("File split(Test0): " + split);
}
- assertEquals(splits.size(), 1);
+ assertEquals(1, splits.size());
CombineFileSplit fileSplit = (CombineFileSplit) splits.get(0);
assertEquals(2, fileSplit.getNumPaths());
assertEquals(1, fileSplit.getLocations().length);
@@ -347,24 +347,24 @@ public class TestCombineFileInputFormat
for (InputSplit split : splits) {
System.out.println("File split(Test1): " + split);
}
- assertEquals(splits.size(), 2);
+ assertEquals(2, splits.size());
fileSplit = (CombineFileSplit) splits.get(0);
- assertEquals(fileSplit.getNumPaths(), 2);
- assertEquals(fileSplit.getLocations().length, 1);
- assertEquals(fileSplit.getPath(0).getName(), file2.getName());
- assertEquals(fileSplit.getOffset(0), 0);
- assertEquals(fileSplit.getLength(0), BLOCKSIZE);
- assertEquals(fileSplit.getPath(1).getName(), file2.getName());
- assertEquals(fileSplit.getOffset(1), BLOCKSIZE);
- assertEquals(fileSplit.getLength(1), BLOCKSIZE);
- assertEquals(fileSplit.getLocations()[0], hosts2[0]); // should be on r2
- fileSplit = (CombineFileSplit) splits.get(1);
- assertEquals(fileSplit.getNumPaths(), 1);
- assertEquals(fileSplit.getLocations().length, 1);
- assertEquals(fileSplit.getPath(0).getName(), file1.getName());
- assertEquals(fileSplit.getOffset(0), 0);
- assertEquals(fileSplit.getLength(0), BLOCKSIZE);
- assertEquals(fileSplit.getLocations()[0], hosts1[0]); // should be on r1
+ assertEquals(2, fileSplit.getNumPaths());
+ assertEquals(1, fileSplit.getLocations().length);
+ assertEquals(file2.getName(), fileSplit.getPath(0).getName());
+ assertEquals(0, fileSplit.getOffset(0));
+ assertEquals(BLOCKSIZE, fileSplit.getLength(0));
+ assertEquals(file2.getName(), fileSplit.getPath(1).getName());
+ assertEquals(BLOCKSIZE, fileSplit.getOffset(1));
+ assertEquals(BLOCKSIZE, fileSplit.getLength(1));
+ assertEquals(hosts2[0], fileSplit.getLocations()[0]); // should be on r2
+ fileSplit = (CombineFileSplit) splits.get(1);
+ assertEquals(1, fileSplit.getNumPaths());
+ assertEquals(1, fileSplit.getLocations().length);
+ assertEquals(file1.getName(), fileSplit.getPath(0).getName());
+ assertEquals(0, fileSplit.getOffset(0));
+ assertEquals(BLOCKSIZE, fileSplit.getLength(0));
+ assertEquals(hosts1[0], fileSplit.getLocations()[0]); // should be on r1
// create another file on 3 datanodes and 3 racks.
dfs.startDataNodes(conf, 1, true, null, rack3, hosts3, null);
@@ -378,37 +378,37 @@ public class TestCombineFileInputFormat
for (InputSplit split : splits) {
System.out.println("File split(Test2): " + split);
}
- assertEquals(splits.size(), 3);
+ assertEquals(3, splits.size());
fileSplit = (CombineFileSplit) splits.get(0);
- assertEquals(fileSplit.getNumPaths(), 3);
- assertEquals(fileSplit.getLocations().length, 1);
- assertEquals(fileSplit.getPath(0).getName(), file3.getName());
- assertEquals(fileSplit.getOffset(0), 0);
- assertEquals(fileSplit.getLength(0), BLOCKSIZE);
- assertEquals(fileSplit.getPath(1).getName(), file3.getName());
- assertEquals(fileSplit.getOffset(1), BLOCKSIZE);
- assertEquals(fileSplit.getLength(1), BLOCKSIZE);
- assertEquals(fileSplit.getPath(2).getName(), file3.getName());
- assertEquals(fileSplit.getOffset(2), 2 * BLOCKSIZE);
- assertEquals(fileSplit.getLength(2), BLOCKSIZE);
- assertEquals(fileSplit.getLocations()[0], hosts3[0]); // should be on r3
- fileSplit = (CombineFileSplit) splits.get(1);
- assertEquals(fileSplit.getNumPaths(), 2);
- assertEquals(fileSplit.getLocations().length, 1);
- assertEquals(fileSplit.getPath(0).getName(), file2.getName());
- assertEquals(fileSplit.getOffset(0), 0);
- assertEquals(fileSplit.getLength(0), BLOCKSIZE);
- assertEquals(fileSplit.getPath(1).getName(), file2.getName());
- assertEquals(fileSplit.getOffset(1), BLOCKSIZE);
- assertEquals(fileSplit.getLength(1), BLOCKSIZE);
- assertEquals(fileSplit.getLocations()[0], hosts2[0]); // should be on r2
- fileSplit = (CombineFileSplit) splits.get(2);
- assertEquals(fileSplit.getNumPaths(), 1);
- assertEquals(fileSplit.getLocations().length, 1);
- assertEquals(fileSplit.getPath(0).getName(), file1.getName());
- assertEquals(fileSplit.getOffset(0), 0);
- assertEquals(fileSplit.getLength(0), BLOCKSIZE);
- assertEquals(fileSplit.getLocations()[0], hosts1[0]); // should be on r1
+ assertEquals(3, fileSplit.getNumPaths());
+ assertEquals(1, fileSplit.getLocations().length);
+ assertEquals(file3.getName(), fileSplit.getPath(0).getName());
+ assertEquals(0, fileSplit.getOffset(0));
+ assertEquals(BLOCKSIZE, fileSplit.getLength(0));
+ assertEquals(file3.getName(), fileSplit.getPath(1).getName());
+ assertEquals(BLOCKSIZE, fileSplit.getOffset(1));
+ assertEquals(BLOCKSIZE, fileSplit.getLength(1));
+ assertEquals(file3.getName(), fileSplit.getPath(2).getName());
+ assertEquals(2 * BLOCKSIZE, fileSplit.getOffset(2));
+ assertEquals(BLOCKSIZE, fileSplit.getLength(2));
+ assertEquals(hosts3[0], fileSplit.getLocations()[0]); // should be on r3
+ fileSplit = (CombineFileSplit) splits.get(1);
+ assertEquals(2, fileSplit.getNumPaths());
+ assertEquals(1, fileSplit.getLocations().length);
+ assertEquals(file2.getName(), fileSplit.getPath(0).getName());
+ assertEquals(0, fileSplit.getOffset(0));
+ assertEquals(BLOCKSIZE, fileSplit.getLength(0));
+ assertEquals(file2.getName(), fileSplit.getPath(1).getName());
+ assertEquals(BLOCKSIZE, fileSplit.getOffset(1));
+ assertEquals(BLOCKSIZE, fileSplit.getLength(1));
+ assertEquals(hosts2[0], fileSplit.getLocations()[0]); // should be on r2
+ fileSplit = (CombineFileSplit) splits.get(2);
+ assertEquals(1, fileSplit.getNumPaths());
+ assertEquals(1, fileSplit.getLocations().length);
+ assertEquals(file1.getName(), fileSplit.getPath(0).getName());
+ assertEquals(0, fileSplit.getOffset(0));
+ assertEquals(BLOCKSIZE, fileSplit.getLength(0));
+ assertEquals(hosts1[0], fileSplit.getLocations()[0]); // should be on r1
// create file4 on all three racks
Path file4 = new Path(dir4 + "/file4");
@@ -420,37 +420,37 @@ public class TestCombineFileInputFormat
for (InputSplit split : splits) {
System.out.println("File split(Test3): " + split);
}
- assertEquals(splits.size(), 3);
+ assertEquals(3, splits.size());
fileSplit = (CombineFileSplit) splits.get(0);
- assertEquals(fileSplit.getNumPaths(), 6);
- assertEquals(fileSplit.getLocations().length, 1);
- assertEquals(fileSplit.getPath(0).getName(), file3.getName());
- assertEquals(fileSplit.getOffset(0), 0);
- assertEquals(fileSplit.getLength(0), BLOCKSIZE);
- assertEquals(fileSplit.getPath(1).getName(), file3.getName());
- assertEquals(fileSplit.getOffset(1), BLOCKSIZE);
- assertEquals(fileSplit.getLength(1), BLOCKSIZE);
- assertEquals(fileSplit.getPath(2).getName(), file3.getName());
- assertEquals(fileSplit.getOffset(2), 2 * BLOCKSIZE);
- assertEquals(fileSplit.getLength(2), BLOCKSIZE);
- assertEquals(fileSplit.getLocations()[0], hosts3[0]); // should be on r3
- fileSplit = (CombineFileSplit) splits.get(1);
- assertEquals(fileSplit.getNumPaths(), 2);
- assertEquals(fileSplit.getLocations().length, 1);
- assertEquals(fileSplit.getPath(0).getName(), file2.getName());
- assertEquals(fileSplit.getOffset(0), 0);
- assertEquals(fileSplit.getLength(0), BLOCKSIZE);
- assertEquals(fileSplit.getPath(1).getName(), file2.getName());
- assertEquals(fileSplit.getOffset(1), BLOCKSIZE);
- assertEquals(fileSplit.getLength(1), BLOCKSIZE);
- assertEquals(fileSplit.getLocations()[0], hosts2[0]); // should be on r2
- fileSplit = (CombineFileSplit) splits.get(2);
- assertEquals(fileSplit.getNumPaths(), 1);
- assertEquals(fileSplit.getLocations().length, 1);
- assertEquals(fileSplit.getPath(0).getName(), file1.getName());
- assertEquals(fileSplit.getOffset(0), 0);
- assertEquals(fileSplit.getLength(0), BLOCKSIZE);
- assertEquals(fileSplit.getLocations()[0], hosts1[0]); // should be on r1
+ assertEquals(6, fileSplit.getNumPaths());
+ assertEquals(1, fileSplit.getLocations().length);
+ assertEquals(file3.getName(), fileSplit.getPath(0).getName());
+ assertEquals(0, fileSplit.getOffset(0));
+ assertEquals(BLOCKSIZE, fileSplit.getLength(0));
+ assertEquals(file3.getName(), fileSplit.getPath(1).getName());
+ assertEquals(BLOCKSIZE, fileSplit.getOffset(1));
+ assertEquals(BLOCKSIZE, fileSplit.getLength(1));
+ assertEquals(file3.getName(), fileSplit.getPath(2).getName());
+ assertEquals(2 * BLOCKSIZE, fileSplit.getOffset(2));
+ assertEquals(BLOCKSIZE, fileSplit.getLength(2));
+ assertEquals(hosts3[0], fileSplit.getLocations()[0]); // should be on r3
+ fileSplit = (CombineFileSplit) splits.get(1);
+ assertEquals(2, fileSplit.getNumPaths());
+ assertEquals(1, fileSplit.getLocations().length);
+ assertEquals(file2.getName(), fileSplit.getPath(0).getName());
+ assertEquals(0, fileSplit.getOffset(0));
+ assertEquals(BLOCKSIZE, fileSplit.getLength(0));
+ assertEquals(file2.getName(), fileSplit.getPath(1).getName());
+ assertEquals(BLOCKSIZE, fileSplit.getOffset(1));
+ assertEquals(BLOCKSIZE, fileSplit.getLength(1));
+ assertEquals(hosts2[0], fileSplit.getLocations()[0]); // should be on r2
+ fileSplit = (CombineFileSplit) splits.get(2);
+ assertEquals(1, fileSplit.getNumPaths());
+ assertEquals(1, fileSplit.getLocations().length);
+ assertEquals(file1.getName(), fileSplit.getPath(0).getName());
+ assertEquals(0, fileSplit.getOffset(0));
+ assertEquals(BLOCKSIZE, fileSplit.getLength(0));
+ assertEquals(hosts1[0], fileSplit.getLocations()[0]); // should be on r1
// maximum split size is 2 blocks
inFormat = new DummyInputFormat();
@@ -462,35 +462,35 @@ public class TestCombineFileInputFormat
for (InputSplit split : splits) {
System.out.println("File split(Test4): " + split);
}
- assertEquals(splits.size(), 5);
+ assertEquals(5, splits.size());
fileSplit = (CombineFileSplit) splits.get(0);
- assertEquals(fileSplit.getNumPaths(), 2);
- assertEquals(fileSplit.getLocations().length, 1);
- assertEquals(fileSplit.getPath(0).getName(), file3.getName());
- assertEquals(fileSplit.getOffset(0), 0);
- assertEquals(fileSplit.getLength(0), BLOCKSIZE);
- assertEquals(fileSplit.getPath(1).getName(), file3.getName());
- assertEquals(fileSplit.getOffset(1), BLOCKSIZE);
- assertEquals(fileSplit.getLength(1), BLOCKSIZE);
- assertEquals(fileSplit.getLocations()[0], "host3.rack3.com");
- fileSplit = (CombineFileSplit) splits.get(1);
- assertEquals(fileSplit.getPath(0).getName(), file3.getName());
- assertEquals(fileSplit.getOffset(0), 2 * BLOCKSIZE);
- assertEquals(fileSplit.getLength(0), BLOCKSIZE);
- assertEquals(fileSplit.getPath(1).getName(), file4.getName());
- assertEquals(fileSplit.getOffset(1), 0);
- assertEquals(fileSplit.getLength(1), BLOCKSIZE);
- assertEquals(fileSplit.getLocations()[0], "host3.rack3.com");
- fileSplit = (CombineFileSplit) splits.get(2);
- assertEquals(fileSplit.getNumPaths(), 2);
- assertEquals(fileSplit.getLocations().length, 1);
- assertEquals(fileSplit.getPath(0).getName(), file4.getName());
- assertEquals(fileSplit.getOffset(0), BLOCKSIZE);
- assertEquals(fileSplit.getLength(0), BLOCKSIZE);
- assertEquals(fileSplit.getPath(1).getName(), file4.getName());
- assertEquals(fileSplit.getOffset(1), 2 * BLOCKSIZE);
- assertEquals(fileSplit.getLength(1), BLOCKSIZE);
- assertEquals(fileSplit.getLocations()[0], "host3.rack3.com");
+ assertEquals(2, fileSplit.getNumPaths());
+ assertEquals(1, fileSplit.getLocations().length);
+ assertEquals(file3.getName(), fileSplit.getPath(0).getName());
+ assertEquals(0, fileSplit.getOffset(0));
+ assertEquals(BLOCKSIZE, fileSplit.getLength(0));
+ assertEquals(file3.getName(), fileSplit.getPath(1).getName());
+ assertEquals(BLOCKSIZE, fileSplit.getOffset(1));
+ assertEquals(BLOCKSIZE, fileSplit.getLength(1));
+ assertEquals("host3.rack3.com", fileSplit.getLocations()[0]);
+ fileSplit = (CombineFileSplit) splits.get(1);
+ assertEquals(file3.getName(), fileSplit.getPath(0).getName());
+ assertEquals(2 * BLOCKSIZE, fileSplit.getOffset(0));
+ assertEquals(BLOCKSIZE, fileSplit.getLength(0));
+ assertEquals(file4.getName(), fileSplit.getPath(1).getName());
+ assertEquals(0, fileSplit.getOffset(1));
+ assertEquals(BLOCKSIZE, fileSplit.getLength(1));
+ assertEquals("host3.rack3.com", fileSplit.getLocations()[0]);
+ fileSplit = (CombineFileSplit) splits.get(2);
+ assertEquals(2, fileSplit.getNumPaths());
+ assertEquals(1, fileSplit.getLocations().length);
+ assertEquals(file4.getName(), fileSplit.getPath(0).getName());
+ assertEquals(BLOCKSIZE, fileSplit.getOffset(0));
+ assertEquals(BLOCKSIZE, fileSplit.getLength(0));
+ assertEquals(file4.getName(), fileSplit.getPath(1).getName());
+ assertEquals(2 * BLOCKSIZE, fileSplit.getOffset(1));
+ assertEquals(BLOCKSIZE, fileSplit.getLength(1));
+ assertEquals("host3.rack3.com", fileSplit.getLocations()[0]);
// maximum split size is 3 blocks
inFormat = new DummyInputFormat();
@@ -502,48 +502,48 @@ public class TestCombineFileInputFormat
for (InputSplit split : splits) {
System.out.println("File split(Test5): " + split);
}
- assertEquals(splits.size(), 4);
+ assertEquals(4, splits.size());
fileSplit = (CombineFileSplit) splits.get(0);
- assertEquals(fileSplit.getNumPaths(), 3);
- assertEquals(fileSplit.getLocations().length, 1);
- assertEquals(fileSplit.getPath(0).getName(), file3.getName());
- assertEquals(fileSplit.getOffset(0), 0);
- assertEquals(fileSplit.getLength(0), BLOCKSIZE);
- assertEquals(fileSplit.getPath(1).getName(), file3.getName());
- assertEquals(fileSplit.getOffset(1), BLOCKSIZE);
- assertEquals(fileSplit.getLength(1), BLOCKSIZE);
- assertEquals(fileSplit.getPath(2).getName(), file3.getName());
- assertEquals(fileSplit.getOffset(2), 2 * BLOCKSIZE);
- assertEquals(fileSplit.getLength(2), BLOCKSIZE);
- assertEquals(fileSplit.getLocations()[0], "host3.rack3.com");
- fileSplit = (CombineFileSplit) splits.get(1);
- assertEquals(fileSplit.getPath(0).getName(), file4.getName());
- assertEquals(fileSplit.getOffset(0), 0);
- assertEquals(fileSplit.getLength(0), BLOCKSIZE);
- assertEquals(fileSplit.getPath(1).getName(), file4.getName());
- assertEquals(fileSplit.getOffset(1), BLOCKSIZE);
- assertEquals(fileSplit.getLength(1), BLOCKSIZE);
- assertEquals(fileSplit.getPath(2).getName(), file4.getName());
- assertEquals(fileSplit.getOffset(2), 2 * BLOCKSIZE);
- assertEquals(fileSplit.getLength(2), BLOCKSIZE);
- assertEquals(fileSplit.getLocations()[0], "host3.rack3.com");
- fileSplit = (CombineFileSplit) splits.get(2);
- assertEquals(fileSplit.getNumPaths(), 2);
- assertEquals(fileSplit.getLocations().length, 1);
- assertEquals(fileSplit.getPath(0).getName(), file2.getName());
- assertEquals(fileSplit.getOffset(0), 0);
- assertEquals(fileSplit.getLength(0), BLOCKSIZE);
- assertEquals(fileSplit.getPath(1).getName(), file2.getName());
- assertEquals(fileSplit.getOffset(1), BLOCKSIZE);
- assertEquals(fileSplit.getLength(1), BLOCKSIZE);
- assertEquals(fileSplit.getLocations()[0], "host2.rack2.com");
+ assertEquals(3, fileSplit.getNumPaths());
+ assertEquals(1, fileSplit.getLocations().length);
+ assertEquals(file3.getName(), fileSplit.getPath(0).getName());
+ assertEquals(0, fileSplit.getOffset(0));
+ assertEquals(BLOCKSIZE, fileSplit.getLength(0));
+ assertEquals(file3.getName(), fileSplit.getPath(1).getName());
+ assertEquals(BLOCKSIZE, fileSplit.getOffset(1));
+ assertEquals(BLOCKSIZE, fileSplit.getLength(1));
+ assertEquals(file3.getName(), fileSplit.getPath(2).getName());
+ assertEquals(2 * BLOCKSIZE, fileSplit.getOffset(2));
+ assertEquals(BLOCKSIZE, fileSplit.getLength(2));
+ assertEquals("host3.rack3.com", fileSplit.getLocations()[0]);
+ fileSplit = (CombineFileSplit) splits.get(1);
+ assertEquals(file4.getName(), fileSplit.getPath(0).getName());
+ assertEquals(0, fileSplit.getOffset(0));
+ assertEquals(BLOCKSIZE, fileSplit.getLength(0));
+ assertEquals(file4.getName(), fileSplit.getPath(1).getName());
+ assertEquals(BLOCKSIZE, fileSplit.getOffset(1));
+ assertEquals(BLOCKSIZE, fileSplit.getLength(1));
+ assertEquals(file4.getName(), fileSplit.getPath(2).getName());
+ assertEquals( 2 * BLOCKSIZE, fileSplit.getOffset(2));
+ assertEquals(BLOCKSIZE, fileSplit.getLength(2));
+ assertEquals("host3.rack3.com", fileSplit.getLocations()[0]);
+ fileSplit = (CombineFileSplit) splits.get(2);
+ assertEquals(2, fileSplit.getNumPaths());
+ assertEquals(1, fileSplit.getLocations().length);
+ assertEquals(file2.getName(), fileSplit.getPath(0).getName());
+ assertEquals(0, fileSplit.getOffset(0));
+ assertEquals(BLOCKSIZE, fileSplit.getLength(0));
+ assertEquals(file2.getName(), fileSplit.getPath(1).getName());
+ assertEquals(BLOCKSIZE, fileSplit.getOffset(1));
+ assertEquals(BLOCKSIZE, fileSplit.getLength(1));
+ assertEquals("host2.rack2.com", fileSplit.getLocations()[0]);
fileSplit = (CombineFileSplit) splits.get(3);
- assertEquals(fileSplit.getNumPaths(), 1);
- assertEquals(fileSplit.getLocations().length, 1);
- assertEquals(fileSplit.getPath(0).getName(), file1.getName());
- assertEquals(fileSplit.getOffset(0), 0);
- assertEquals(fileSplit.getLength(0), BLOCKSIZE);
- assertEquals(fileSplit.getLocations()[0], "host1.rack1.com");
+ assertEquals(1, fileSplit.getNumPaths());
+ assertEquals(1, fileSplit.getLocations().length);
+ assertEquals(file1.getName(), fileSplit.getPath(0).getName());
+ assertEquals(0, fileSplit.getOffset(0));
+ assertEquals(BLOCKSIZE, fileSplit.getLength(0));
+ assertEquals("host1.rack1.com", fileSplit.getLocations()[0]);
// maximum split size is 4 blocks
inFormat = new DummyInputFormat();
@@ -553,42 +553,42 @@ public class TestCombineFileInputFormat
for (InputSplit split : splits) {
System.out.println("File split(Test6): " + split);
}
- assertEquals(splits.size(), 3);
+ assertEquals(3, splits.size());
fileSplit = (CombineFileSplit) splits.get(0);
- assertEquals(fileSplit.getNumPaths(), 4);
- assertEquals(fileSplit.getLocations().length, 1);
- assertEquals(fileSplit.getPath(0).getName(), file3.getName());
- assertEquals(fileSplit.getOffset(0), 0);
- assertEquals(fileSplit.getLength(0), BLOCKSIZE);
- assertEquals(fileSplit.getPath(1).getName(), file3.getName());
- assertEquals(fileSplit.getOffset(1), BLOCKSIZE);
- assertEquals(fileSplit.getLength(1), BLOCKSIZE);
- assertEquals(fileSplit.getPath(2).getName(), file3.getName());
- assertEquals(fileSplit.getOffset(2), 2 * BLOCKSIZE);
- assertEquals(fileSplit.getLength(2), BLOCKSIZE);
- assertEquals(fileSplit.getLocations()[0], "host3.rack3.com");
- fileSplit = (CombineFileSplit) splits.get(1);
- assertEquals(fileSplit.getNumPaths(), 4);
- assertEquals(fileSplit.getPath(0).getName(), file2.getName());
- assertEquals(fileSplit.getOffset(0), 0);
- assertEquals(fileSplit.getLength(0), BLOCKSIZE);
- assertEquals(fileSplit.getPath(1).getName(), file2.getName());
- assertEquals(fileSplit.getOffset(1), BLOCKSIZE);
- assertEquals(fileSplit.getLength(1), BLOCKSIZE);
- assertEquals(fileSplit.getPath(2).getName(), file4.getName());
- assertEquals(fileSplit.getOffset(2), BLOCKSIZE);
- assertEquals(fileSplit.getLength(2), BLOCKSIZE);
- assertEquals(fileSplit.getPath(3).getName(), file4.getName());
- assertEquals(fileSplit.getOffset(3), 2 * BLOCKSIZE);
- assertEquals(fileSplit.getLength(3), BLOCKSIZE);
- assertEquals(fileSplit.getLocations()[0], "host2.rack2.com");
- fileSplit = (CombineFileSplit) splits.get(2);
- assertEquals(fileSplit.getNumPaths(), 1);
- assertEquals(fileSplit.getLocations().length, 1);
- assertEquals(fileSplit.getPath(0).getName(), file1.getName());
- assertEquals(fileSplit.getOffset(0), 0);
- assertEquals(fileSplit.getLength(0), BLOCKSIZE);
- assertEquals(fileSplit.getLocations()[0], hosts1[0]); // should be on r1
+ assertEquals(4, fileSplit.getNumPaths());
+ assertEquals(1, fileSplit.getLocations().length);
+ assertEquals(file3.getName(), fileSplit.getPath(0).getName());
+ assertEquals(0, fileSplit.getOffset(0));
+ assertEquals(BLOCKSIZE, fileSplit.getLength(0));
+ assertEquals(file3.getName(), fileSplit.getPath(1).getName());
+ assertEquals(BLOCKSIZE, fileSplit.getOffset(1));
+ assertEquals(BLOCKSIZE, fileSplit.getLength(1));
+ assertEquals(file3.getName(), fileSplit.getPath(2).getName());
+ assertEquals(2 * BLOCKSIZE, fileSplit.getOffset(2));
+ assertEquals(BLOCKSIZE, fileSplit.getLength(2));
+ assertEquals("host3.rack3.com", fileSplit.getLocations()[0]);
+ fileSplit = (CombineFileSplit) splits.get(1);
+ assertEquals(4, fileSplit.getNumPaths());
+ assertEquals(file2.getName(), fileSplit.getPath(0).getName());
+ assertEquals(0, fileSplit.getOffset(0));
+ assertEquals(BLOCKSIZE, fileSplit.getLength(0));
+ assertEquals(file2.getName(), fileSplit.getPath(1).getName());
+ assertEquals(BLOCKSIZE, fileSplit.getOffset(1));
+ assertEquals(BLOCKSIZE, fileSplit.getLength(1));
+ assertEquals(file4.getName(), fileSplit.getPath(2).getName());
+ assertEquals(BLOCKSIZE, fileSplit.getOffset(2));
+ assertEquals(BLOCKSIZE, fileSplit.getLength(2));
+ assertEquals(file4.getName(), fileSplit.getPath(3).getName());
+ assertEquals( 2 * BLOCKSIZE, fileSplit.getOffset(3));
+ assertEquals(BLOCKSIZE, fileSplit.getLength(3));
+ assertEquals("host2.rack2.com", fileSplit.getLocations()[0]);
+ fileSplit = (CombineFileSplit) splits.get(2);
+ assertEquals(1, fileSplit.getNumPaths());
+ assertEquals(1, fileSplit.getLocations().length);
+ assertEquals(file1.getName(), fileSplit.getPath(0).getName());
+ assertEquals(0, fileSplit.getOffset(0));
+ assertEquals(BLOCKSIZE, fileSplit.getLength(0));
+ assertEquals(hosts1[0], fileSplit.getLocations()[0]); // should be on r1
// maximum split size is 7 blocks and min is 3 blocks
inFormat = new DummyInputFormat();
@@ -601,15 +601,15 @@ public class TestCombineFileInputFormat
for (InputSplit split : splits) {
System.out.println("File split(Test7): " + split);
}
- assertEquals(splits.size(), 2);
+ assertEquals(2, splits.size());
fileSplit = (CombineFileSplit) splits.get(0);
- assertEquals(fileSplit.getNumPaths(), 6);
- assertEquals(fileSplit.getLocations().length, 1);
- assertEquals(fileSplit.getLocations()[0], "host3.rack3.com");
- fileSplit = (CombineFileSplit) splits.get(1);
- assertEquals(fileSplit.getNumPaths(), 3);
- assertEquals(fileSplit.getLocations().length, 1);
- assertEquals(fileSplit.getLocations()[0], "host1.rack1.com");
+ assertEquals(6, fileSplit.getNumPaths());
+ assertEquals(1, fileSplit.getLocations().length);
+ assertEquals("host3.rack3.com", fileSplit.getLocations()[0]);
+ fileSplit = (CombineFileSplit) splits.get(1);
+ assertEquals(3, fileSplit.getNumPaths());
+ assertEquals(1, fileSplit.getLocations().length);
+ assertEquals("host1.rack1.com", fileSplit.getLocations()[0]);
// Rack 1 has file1, file2 and file3 and file4
// Rack 2 has file2 and file3 and file4
@@ -624,19 +624,19 @@ public class TestCombineFileInputFormat
for (InputSplit split : splits) {
System.out.println("File split(Test1): " + split);
}
- assertEquals(splits.size(), 3);
+ assertEquals(3, splits.size());
fileSplit = (CombineFileSplit) splits.get(0);
- assertEquals(fileSplit.getNumPaths(), 2);
- assertEquals(fileSplit.getLocations().length, 1);
- assertEquals(fileSplit.getLocations()[0], hosts2[0]); // should be on r2
- fileSplit = (CombineFileSplit) splits.get(1);
- assertEquals(fileSplit.getNumPaths(), 1);
- assertEquals(fileSplit.getLocations().length, 1);
- assertEquals(fileSplit.getLocations()[0], hosts1[0]); // should be on r1
- fileSplit = (CombineFileSplit) splits.get(2);
- assertEquals(fileSplit.getNumPaths(), 6);
- assertEquals(fileSplit.getLocations().length, 1);
- assertEquals(fileSplit.getLocations()[0], hosts3[0]); // should be on r3
+ assertEquals(2, fileSplit.getNumPaths());
+ assertEquals(1, fileSplit.getLocations().length);
+ assertEquals(hosts2[0], fileSplit.getLocations()[0]); // should be on r2
+ fileSplit = (CombineFileSplit) splits.get(1);
+ assertEquals(1, fileSplit.getNumPaths());
+ assertEquals(1, fileSplit.getLocations().length);
+ assertEquals(hosts1[0], fileSplit.getLocations()[0]); // should be on r1
+ fileSplit = (CombineFileSplit) splits.get(2);
+ assertEquals(6, fileSplit.getNumPaths());
+ assertEquals(1, fileSplit.getLocations().length);
+ assertEquals(hosts3[0], fileSplit.getLocations()[0]); // should be on r3
// measure performance when there are multiple pools and
// many files in each pool.
@@ -669,7 +669,7 @@ public class TestCombineFileInputFormat
for (InputSplit split : splits) {
System.out.println("File split(Test8): " + split);
}
- assertEquals(6, splits.size());
+ assertEquals(splits.size(), 6);
} finally {
if (dfs != null) {
@@ -750,7 +750,7 @@ public class TestCombineFileInputFormat
for (InputSplit split : splits) {
System.out.println("File split(Test0): " + split);
}
- assertEquals(splits.size(), 1);
+ assertEquals(1, splits.size());
CombineFileSplit fileSplit = (CombineFileSplit) splits.get(0);
assertEquals(2, fileSplit.getNumPaths());
assertEquals(1, fileSplit.getLocations().length);
@@ -1135,7 +1135,7 @@ public class TestCombineFileInputFormat
Job job = Job.getInstance(conf);
FileInputFormat.setInputPaths(job, "test");
List<InputSplit> splits = inFormat.getSplits(job);
- assertEquals(splits.size(), 1);
+ assertEquals(1, splits.size());
CombineFileSplit fileSplit = (CombineFileSplit) splits.get(0);
assertEquals(1, fileSplit.getNumPaths());
assertEquals(file.getName(), fileSplit.getPath(0).getName());