You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@accumulo.apache.org by ct...@apache.org on 2019/07/30 21:24:52 UTC

[accumulo-testing] branch master updated (b3a3823 -> 237c0c5)

This is an automated email from the ASF dual-hosted git repository.

ctubbsii pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/accumulo-testing.git.


    from b3a3823  Use non blocking secure random for seeding. (#106)
     new 2b7b28f  Revert to Java 8 for accumulo-testing
     new 237c0c5  Fix warnings and clean up pom.xml

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 pom.xml                                            | 117 ++++++++++++++++-----
 .../testing/continuous/ContinuousMoru.java         |  59 ++++++-----
 .../testing/continuous/ContinuousVerify.java       | 104 +++++++++---------
 .../testing/ingest/BulkImportDirectory.java        |   1 +
 .../accumulo/testing/ingest/VerifyIngest.java      |   5 +-
 .../apache/accumulo/testing/mapreduce/RowHash.java |  60 ++++++-----
 .../accumulo/testing/mapreduce/TeraSortIngest.java |  62 +++++------
 .../performance/tests/ConditionalMutationsPT.java  |  27 +++--
 .../testing/performance/tests/ScanExecutorPT.java  |   3 +-
 .../performance/tests/YieldingScanExecutorPT.java  |   2 +-
 .../testing/randomwalk/bulk/BulkPlusOne.java       |  13 +--
 .../testing/randomwalk/concurrent/BulkImport.java  |   4 +-
 .../testing/randomwalk/concurrent/Config.java      |   4 +-
 .../randomwalk/security/SecurityHelper.java        |   1 -
 .../testing/randomwalk/security/TableOp.java       |   5 +-
 .../testing/randomwalk/shard/BulkInsert.java       |   1 +
 .../testing/randomwalk/shard/SortTool.java         |  15 +--
 17 files changed, 275 insertions(+), 208 deletions(-)


[accumulo-testing] 01/02: Revert to Java 8 for accumulo-testing

Posted by ct...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

ctubbsii pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/accumulo-testing.git

commit 2b7b28f73cac698e1724ad60311d2002fb4e02ae
Author: Christopher Tubbs <ct...@apache.org>
AuthorDate: Tue Jul 30 16:25:46 2019 -0400

    Revert to Java 8 for accumulo-testing
    
    Allow accumulo-testing to build and run with Java 8, but support the
    release flag on JDK9 and later.
---
 pom.xml | 14 +++++++++++---
 1 file changed, 11 insertions(+), 3 deletions(-)

diff --git a/pom.xml b/pom.xml
index 1c02093..79463d0 100644
--- a/pom.xml
+++ b/pom.xml
@@ -31,9 +31,8 @@
     <accumulo.version>2.0.0-SNAPSHOT</accumulo.version>
     <eclipseFormatterStyle>${project.basedir}/contrib/Eclipse-Accumulo-Codestyle.xml</eclipseFormatterStyle>
     <hadoop.version>3.2.0</hadoop.version>
-    <maven.compiler.release>11</maven.compiler.release>
-    <maven.compiler.source>11</maven.compiler.source>
-    <maven.compiler.target>11</maven.compiler.target>
+    <maven.compiler.source>1.8</maven.compiler.source>
+    <maven.compiler.target>1.8</maven.compiler.target>
     <slf4j.version>1.7.25</slf4j.version>
     <spotbugs.version>3.1.7</spotbugs.version>
     <zookeeper.version>3.4.14</zookeeper.version>
@@ -351,5 +350,14 @@
         </plugins>
       </build>
     </profile>
+    <profile>
+      <id>jdk-release-flag</id>
+      <activation>
+        <jdk>[9,)</jdk>
+      </activation>
+      <properties>
+        <maven.compiler.release>8</maven.compiler.release>
+      </properties>
+    </profile>
   </profiles>
 </project>


[accumulo-testing] 02/02: Fix warnings and clean up pom.xml

Posted by ct...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

ctubbsii pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/accumulo-testing.git

commit 237c0c5a952adcef7f1aad29dcb243c602cadf54
Author: Christopher Tubbs <ct...@apache.org>
AuthorDate: Tue Jul 30 17:19:01 2019 -0400

    Fix warnings and clean up pom.xml
    
    POM updates:
    * Update plugins
    * Add m2e profile for ensuring compiler compliance and ignoring plugins
      in Eclipse that Eclipse doesn't understand
    * Require minimum java version of 1.8 and Maven 3.5
    * Remove unneeded plugins (felix bundle and exec-maven-plugin)
    
    Fix warnings in code:
    * Use try-with-resources for TestEnv
    * Use Double.parseDouble instead of new Double
    * Compute iterable sizes from scanners without unused variables
    * Suppress deprecation warnings where needed
    * Update BulkPlusOne RW test to use new bulk import API
    * Add missing case statement for GET_SUMMARIES in TableOp
    * Remove unused variables
---
 pom.xml                                            | 103 +++++++++++++++-----
 .../testing/continuous/ContinuousMoru.java         |  59 ++++++------
 .../testing/continuous/ContinuousVerify.java       | 104 +++++++++++----------
 .../testing/ingest/BulkImportDirectory.java        |   1 +
 .../accumulo/testing/ingest/VerifyIngest.java      |   5 +-
 .../apache/accumulo/testing/mapreduce/RowHash.java |  60 ++++++------
 .../accumulo/testing/mapreduce/TeraSortIngest.java |  62 ++++++------
 .../performance/tests/ConditionalMutationsPT.java  |  27 +++---
 .../testing/performance/tests/ScanExecutorPT.java  |   3 +-
 .../performance/tests/YieldingScanExecutorPT.java  |   2 +-
 .../testing/randomwalk/bulk/BulkPlusOne.java       |  13 +--
 .../testing/randomwalk/concurrent/BulkImport.java  |   4 +-
 .../testing/randomwalk/concurrent/Config.java      |   4 +-
 .../randomwalk/security/SecurityHelper.java        |   1 -
 .../testing/randomwalk/security/TableOp.java       |   5 +-
 .../testing/randomwalk/shard/BulkInsert.java       |   1 +
 .../testing/randomwalk/shard/SortTool.java         |  15 +--
 17 files changed, 264 insertions(+), 205 deletions(-)

diff --git a/pom.xml b/pom.xml
index 79463d0..9f53552 100644
--- a/pom.xml
+++ b/pom.xml
@@ -126,15 +126,19 @@
     <pluginManagement>
       <plugins>
         <plugin>
-          <!-- Allows us to get the apache-ds bundle artifacts -->
-          <groupId>org.apache.felix</groupId>
-          <artifactId>maven-bundle-plugin</artifactId>
-          <version>3.0.1</version>
+          <groupId>org.apache.maven.plugins</groupId>
+          <artifactId>maven-compiler-plugin</artifactId>
+          <version>3.8.1</version>
+        </plugin>
+        <plugin>
+          <groupId>org.apache.maven.plugins</groupId>
+          <artifactId>maven-shade-plugin</artifactId>
+          <version>3.2.1</version>
         </plugin>
         <plugin>
           <groupId>com.github.ekryd.sortpom</groupId>
           <artifactId>sortpom-maven-plugin</artifactId>
-          <version>2.8.0</version>
+          <version>2.10.0</version>
           <configuration>
             <predefinedSortOrder>recommended_2008_06</predefinedSortOrder>
             <createBackupFile>false</createBackupFile>
@@ -149,7 +153,7 @@
         <plugin>
           <groupId>net.revelc.code.formatter</groupId>
           <artifactId>formatter-maven-plugin</artifactId>
-          <version>2.8.1</version>
+          <version>2.10.0</version>
           <configuration>
             <configFile>${eclipseFormatterStyle}</configFile>
             <compilerCompliance>${maven.compiler.source}</compilerCompliance>
@@ -171,7 +175,7 @@
         <plugin>
           <groupId>net.revelc.code</groupId>
           <artifactId>impsort-maven-plugin</artifactId>
-          <version>1.2.0</version>
+          <version>1.3.0</version>
           <configuration>
             <removeUnused>true</removeUnused>
             <groups>java.,javax.,org.,com.</groups>
@@ -185,6 +189,30 @@
     <plugins>
       <plugin>
         <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-enforcer-plugin</artifactId>
+        <executions>
+          <execution>
+            <!-- must be same id as in the apache parent pom, to override the version -->
+            <id>enforce-maven-version</id>
+            <goals>
+              <goal>enforce</goal>
+            </goals>
+            <phase>validate</phase>
+            <configuration>
+              <rules>
+                <requireMavenVersion>
+                  <version>[3.5.0,)</version>
+                </requireMavenVersion>
+                <requireJavaVersion>
+                  <version>[${maven.compiler.target},)</version>
+                </requireJavaVersion>
+              </rules>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-compiler-plugin</artifactId>
         <configuration>
           <optimize>true</optimize>
@@ -201,13 +229,6 @@
         </configuration>
       </plugin>
       <plugin>
-        <!-- Allows us to get the apache-ds bundle artifacts -->
-        <groupId>org.apache.felix</groupId>
-        <artifactId>maven-bundle-plugin</artifactId>
-        <extensions>true</extensions>
-        <inherited>true</inherited>
-      </plugin>
-      <plugin>
         <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-failsafe-plugin</artifactId>
         <executions>
@@ -221,18 +242,10 @@
         </executions>
       </plugin>
       <plugin>
-        <groupId>org.codehaus.mojo</groupId>
-        <artifactId>exec-maven-plugin</artifactId>
-        <version>1.5.0</version>
-        <configuration>
-          <cleanupDaemonThreads>false</cleanupDaemonThreads>
-        </configuration>
-      </plugin>
-      <plugin>
         <!-- This was added to ensure project only uses public API -->
         <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-checkstyle-plugin</artifactId>
-        <version>3.0.0</version>
+        <version>3.1.0</version>
         <configuration>
           <configLocation>contrib/checkstyle.xml</configLocation>
         </configuration>
@@ -240,7 +253,7 @@
           <dependency>
             <groupId>com.puppycrawl.tools</groupId>
             <artifactId>checkstyle</artifactId>
-            <version>8.18</version>
+            <version>8.23</version>
           </dependency>
         </dependencies>
         <executions>
@@ -351,6 +364,48 @@
       </build>
     </profile>
     <profile>
+      <id>m2e</id>
+      <activation>
+        <property>
+          <name>m2e.version</name>
+        </property>
+      </activation>
+      <properties>
+        <maven.compiler.release>8</maven.compiler.release>
+      </properties>
+      <build>
+        <pluginManagement>
+          <plugins>
+            <!--This plugin's configuration is used to store Eclipse m2e settings only. It has no influence on the Maven build itself.-->
+            <plugin>
+              <groupId>org.eclipse.m2e</groupId>
+              <artifactId>lifecycle-mapping</artifactId>
+              <version>1.0.0</version>
+              <configuration>
+                <lifecycleMappingMetadata>
+                  <pluginExecutions>
+                    <pluginExecution>
+                      <pluginExecutionFilter>
+                        <groupId>org.apache.maven.plugins</groupId>
+                        <artifactId>maven-remote-resources-plugin</artifactId>
+                        <versionRange>[0,)</versionRange>
+                        <goals>
+                          <goal>process</goal>
+                        </goals>
+                      </pluginExecutionFilter>
+                      <action>
+                        <ignore />
+                      </action>
+                    </pluginExecution>
+                  </pluginExecutions>
+                </lifecycleMappingMetadata>
+              </configuration>
+            </plugin>
+          </plugins>
+        </pluginManagement>
+      </build>
+    </profile>
+    <profile>
       <id>jdk-release-flag</id>
       <activation>
         <jdk>[9,)</jdk>
diff --git a/src/main/java/org/apache/accumulo/testing/continuous/ContinuousMoru.java b/src/main/java/org/apache/accumulo/testing/continuous/ContinuousMoru.java
index c7d102c..99e2fdf 100644
--- a/src/main/java/org/apache/accumulo/testing/continuous/ContinuousMoru.java
+++ b/src/main/java/org/apache/accumulo/testing/continuous/ContinuousMoru.java
@@ -112,44 +112,45 @@ public class ContinuousMoru extends Configured implements Tool {
 
   @Override
   public int run(String[] args) throws Exception {
+    try (ContinuousEnv env = new ContinuousEnv(args)) {
 
-    ContinuousEnv env = new ContinuousEnv(args);
+      Job job = Job.getInstance(getConf(),
+          this.getClass().getSimpleName() + "_" + System.currentTimeMillis());
+      job.setJarByClass(this.getClass());
+      job.setInputFormatClass(AccumuloInputFormat.class);
 
-    Job job = Job.getInstance(getConf(),
-        this.getClass().getSimpleName() + "_" + System.currentTimeMillis());
-    job.setJarByClass(this.getClass());
-    job.setInputFormatClass(AccumuloInputFormat.class);
+      int maxMaps = Integer.parseInt(env.getTestProperty(TestProps.CI_VERIFY_MAX_MAPS));
+      Set<Range> ranges = env.getAccumuloClient().tableOperations()
+          .splitRangeByTablets(env.getAccumuloTableName(), new Range(), maxMaps);
 
-    int maxMaps = Integer.parseInt(env.getTestProperty(TestProps.CI_VERIFY_MAX_MAPS));
-    Set<Range> ranges = env.getAccumuloClient().tableOperations()
-        .splitRangeByTablets(env.getAccumuloTableName(), new Range(), maxMaps);
+      AccumuloInputFormat.configure().clientProperties(env.getClientProps())
+          .table(env.getAccumuloTableName()).ranges(ranges).autoAdjustRanges(false).store(job);
 
-    AccumuloInputFormat.configure().clientProperties(env.getClientProps())
-        .table(env.getAccumuloTableName()).ranges(ranges).autoAdjustRanges(false).store(job);
+      job.setMapperClass(CMapper.class);
+      job.setNumReduceTasks(0);
+      job.setOutputFormatClass(AccumuloOutputFormat.class);
 
-    job.setMapperClass(CMapper.class);
-    job.setNumReduceTasks(0);
-    job.setOutputFormatClass(AccumuloOutputFormat.class);
+      AccumuloOutputFormat.configure().clientProperties(env.getClientProps()).createTables(true)
+          .defaultTable(env.getAccumuloTableName()).store(job);
 
-    AccumuloOutputFormat.configure().clientProperties(env.getClientProps()).createTables(true)
-        .defaultTable(env.getAccumuloTableName()).store(job);
+      Configuration conf = job.getConfiguration();
+      conf.setLong(MIN, env.getRowMin());
+      conf.setLong(MAX, env.getRowMax());
+      conf.setInt(MAX_CF, env.getMaxColF());
+      conf.setInt(MAX_CQ, env.getMaxColQ());
+      conf.set(CI_ID, UUID.randomUUID().toString());
+      conf.set("mapreduce.job.classloader", "true");
 
-    Configuration conf = job.getConfiguration();
-    conf.setLong(MIN, env.getRowMin());
-    conf.setLong(MAX, env.getRowMax());
-    conf.setInt(MAX_CF, env.getMaxColF());
-    conf.setInt(MAX_CQ, env.getMaxColQ());
-    conf.set(CI_ID, UUID.randomUUID().toString());
-    conf.set("mapreduce.job.classloader", "true");
-
-    job.waitForCompletion(true);
-    return job.isSuccessful() ? 0 : 1;
+      job.waitForCompletion(true);
+      return job.isSuccessful() ? 0 : 1;
+    }
   }
 
   public static void main(String[] args) throws Exception {
-    ContinuousEnv env = new ContinuousEnv(args);
-    int res = ToolRunner.run(env.getHadoopConfiguration(), new ContinuousMoru(), args);
-    if (res != 0)
-      System.exit(res);
+    try (ContinuousEnv env = new ContinuousEnv(args)) {
+      int res = ToolRunner.run(env.getHadoopConfiguration(), new ContinuousMoru(), args);
+      if (res != 0)
+        System.exit(res);
+    }
   }
 }
diff --git a/src/main/java/org/apache/accumulo/testing/continuous/ContinuousVerify.java b/src/main/java/org/apache/accumulo/testing/continuous/ContinuousVerify.java
index 14015e3..5db4350 100644
--- a/src/main/java/org/apache/accumulo/testing/continuous/ContinuousVerify.java
+++ b/src/main/java/org/apache/accumulo/testing/continuous/ContinuousVerify.java
@@ -140,69 +140,71 @@ public class ContinuousVerify extends Configured implements Tool {
   @Override
   public int run(String[] args) throws Exception {
 
-    ContinuousEnv env = new ContinuousEnv(args);
-
-    String tableName = env.getAccumuloTableName();
-
-    Job job = Job.getInstance(getConf(),
-        this.getClass().getSimpleName() + "_" + tableName + "_" + System.currentTimeMillis());
-    job.setJarByClass(this.getClass());
-
-    job.setInputFormatClass(AccumuloInputFormat.class);
-
-    boolean scanOffline = Boolean
-        .parseBoolean(env.getTestProperty(TestProps.CI_VERIFY_SCAN_OFFLINE));
-    int maxMaps = Integer.parseInt(env.getTestProperty(TestProps.CI_VERIFY_MAX_MAPS));
-    int reducers = Integer.parseInt(env.getTestProperty(TestProps.CI_VERIFY_REDUCERS));
-    String outputDir = env.getTestProperty(TestProps.CI_VERIFY_OUTPUT_DIR);
-
-    Set<Range> ranges;
-    String clone = "";
-    AccumuloClient client = env.getAccumuloClient();
-    String table;
-
-    if (scanOffline) {
-      Random random = new Random();
-      clone = tableName + "_" + String.format("%016x", (random.nextLong() & 0x7fffffffffffffffL));
-      client.tableOperations().clone(tableName, clone, true, new HashMap<>(), new HashSet<>());
-      ranges = client.tableOperations().splitRangeByTablets(tableName, new Range(), maxMaps);
-      client.tableOperations().offline(clone);
-      table = clone;
-    } else {
-      ranges = client.tableOperations().splitRangeByTablets(tableName, new Range(), maxMaps);
-      table = tableName;
-    }
+    try (ContinuousEnv env = new ContinuousEnv(args)) {
+
+      String tableName = env.getAccumuloTableName();
+
+      Job job = Job.getInstance(getConf(),
+          this.getClass().getSimpleName() + "_" + tableName + "_" + System.currentTimeMillis());
+      job.setJarByClass(this.getClass());
+
+      job.setInputFormatClass(AccumuloInputFormat.class);
+
+      boolean scanOffline = Boolean
+          .parseBoolean(env.getTestProperty(TestProps.CI_VERIFY_SCAN_OFFLINE));
+      int maxMaps = Integer.parseInt(env.getTestProperty(TestProps.CI_VERIFY_MAX_MAPS));
+      int reducers = Integer.parseInt(env.getTestProperty(TestProps.CI_VERIFY_REDUCERS));
+      String outputDir = env.getTestProperty(TestProps.CI_VERIFY_OUTPUT_DIR);
+
+      Set<Range> ranges;
+      String clone = "";
+      AccumuloClient client = env.getAccumuloClient();
+      String table;
+
+      if (scanOffline) {
+        Random random = new Random();
+        clone = tableName + "_" + String.format("%016x", (random.nextLong() & 0x7fffffffffffffffL));
+        client.tableOperations().clone(tableName, clone, true, new HashMap<>(), new HashSet<>());
+        ranges = client.tableOperations().splitRangeByTablets(tableName, new Range(), maxMaps);
+        client.tableOperations().offline(clone);
+        table = clone;
+      } else {
+        ranges = client.tableOperations().splitRangeByTablets(tableName, new Range(), maxMaps);
+        table = tableName;
+      }
 
-    AccumuloInputFormat.configure().clientProperties(env.getClientProps()).table(table)
-        .ranges(ranges).autoAdjustRanges(false).offlineScan(scanOffline).store(job);
+      AccumuloInputFormat.configure().clientProperties(env.getClientProps()).table(table)
+          .ranges(ranges).autoAdjustRanges(false).offlineScan(scanOffline).store(job);
 
-    job.setMapperClass(CMapper.class);
-    job.setMapOutputKeyClass(LongWritable.class);
-    job.setMapOutputValueClass(VLongWritable.class);
+      job.setMapperClass(CMapper.class);
+      job.setMapOutputKeyClass(LongWritable.class);
+      job.setMapOutputValueClass(VLongWritable.class);
 
-    job.setReducerClass(CReducer.class);
-    job.setNumReduceTasks(reducers);
+      job.setReducerClass(CReducer.class);
+      job.setNumReduceTasks(reducers);
 
-    job.setOutputFormatClass(TextOutputFormat.class);
+      job.setOutputFormatClass(TextOutputFormat.class);
 
-    job.getConfiguration().setBoolean("mapred.map.tasks.speculative.execution", scanOffline);
-    job.getConfiguration().set("mapreduce.job.classloader", "true");
+      job.getConfiguration().setBoolean("mapred.map.tasks.speculative.execution", scanOffline);
+      job.getConfiguration().set("mapreduce.job.classloader", "true");
 
-    TextOutputFormat.setOutputPath(job, new Path(outputDir));
+      TextOutputFormat.setOutputPath(job, new Path(outputDir));
 
-    job.waitForCompletion(true);
+      job.waitForCompletion(true);
 
-    if (scanOffline) {
-      client.tableOperations().delete(clone);
+      if (scanOffline) {
+        client.tableOperations().delete(clone);
+      }
+      return job.isSuccessful() ? 0 : 1;
     }
-    return job.isSuccessful() ? 0 : 1;
   }
 
   public static void main(String[] args) throws Exception {
-    ContinuousEnv env = new ContinuousEnv(args);
+    try (ContinuousEnv env = new ContinuousEnv(args)) {
 
-    int res = ToolRunner.run(env.getHadoopConfiguration(), new ContinuousVerify(), args);
-    if (res != 0)
-      System.exit(res);
+      int res = ToolRunner.run(env.getHadoopConfiguration(), new ContinuousVerify(), args);
+      if (res != 0)
+        System.exit(res);
+    }
   }
 }
diff --git a/src/main/java/org/apache/accumulo/testing/ingest/BulkImportDirectory.java b/src/main/java/org/apache/accumulo/testing/ingest/BulkImportDirectory.java
index 2b3a2c3..2da2c62 100644
--- a/src/main/java/org/apache/accumulo/testing/ingest/BulkImportDirectory.java
+++ b/src/main/java/org/apache/accumulo/testing/ingest/BulkImportDirectory.java
@@ -41,6 +41,7 @@ public class BulkImportDirectory {
     String failures = null;
   }
 
+  @SuppressWarnings("deprecation")
   public static void main(String[] args)
       throws IOException, AccumuloException, AccumuloSecurityException, TableNotFoundException {
     final FileSystem fs = FileSystem.get(new Configuration());
diff --git a/src/main/java/org/apache/accumulo/testing/ingest/VerifyIngest.java b/src/main/java/org/apache/accumulo/testing/ingest/VerifyIngest.java
index 992a982..b783e8c 100644
--- a/src/main/java/org/apache/accumulo/testing/ingest/VerifyIngest.java
+++ b/src/main/java/org/apache/accumulo/testing/ingest/VerifyIngest.java
@@ -62,10 +62,9 @@ public class VerifyIngest {
     opts.parseArgs(VerifyIngest.class.getName(), args);
     try (AccumuloClient client = Accumulo.newClient().from(opts.getClientProps()).build()) {
       if (opts.trace) {
-        String name = VerifyIngest.class.getSimpleName();
         /*
-         * DistributedTrace.enable(); Trace.on(name); Trace.data("cmdLine",
-         * Arrays.asList(args).toString());
+         * String name = VerifyIngest.class.getSimpleName(); DistributedTrace.enable();
+         * Trace.on(name); Trace.data("cmdLine", Arrays.asList(args).toString());
          */
       }
 
diff --git a/src/main/java/org/apache/accumulo/testing/mapreduce/RowHash.java b/src/main/java/org/apache/accumulo/testing/mapreduce/RowHash.java
index 4722799..45e24f0 100644
--- a/src/main/java/org/apache/accumulo/testing/mapreduce/RowHash.java
+++ b/src/main/java/org/apache/accumulo/testing/mapreduce/RowHash.java
@@ -58,44 +58,46 @@ public class RowHash extends Configured implements Tool {
 
   @Override
   public int run(String[] args) throws Exception {
-    TestEnv env = new TestEnv(args);
-    Job job = Job.getInstance(getConf());
-    job.setJobName(this.getClass().getName());
-    job.setJarByClass(this.getClass());
-    job.setInputFormatClass(AccumuloInputFormat.class);
+    try (TestEnv env = new TestEnv(args)) {
+      Job job = Job.getInstance(getConf());
+      job.setJobName(this.getClass().getName());
+      job.setJarByClass(this.getClass());
+      job.setInputFormatClass(AccumuloInputFormat.class);
 
-    Properties props = env.getTestProperties();
-    String col = props.getProperty(TestProps.ROWHASH_COLUMN);
-    int idx = col.indexOf(":");
-    Text cf = new Text(idx < 0 ? col : col.substring(0, idx));
-    Text cq = idx < 0 ? null : new Text(col.substring(idx + 1));
-    Collection<IteratorSetting.Column> cols = Collections.emptyList();
-    if (cf.getLength() > 0)
-      cols = Collections.singleton(new IteratorSetting.Column(cf, cq));
+      Properties props = env.getTestProperties();
+      String col = props.getProperty(TestProps.ROWHASH_COLUMN);
+      int idx = col.indexOf(":");
+      Text cf = new Text(idx < 0 ? col : col.substring(0, idx));
+      Text cq = idx < 0 ? null : new Text(col.substring(idx + 1));
+      Collection<IteratorSetting.Column> cols = Collections.emptyList();
+      if (cf.getLength() > 0)
+        cols = Collections.singleton(new IteratorSetting.Column(cf, cq));
 
-    String inputTable = props.getProperty(TestProps.ROWHASH_INPUT_TABLE);
-    String outputTable = props.getProperty(TestProps.ROWHASH_OUTPUT_TABLE);
+      String inputTable = props.getProperty(TestProps.ROWHASH_INPUT_TABLE);
+      String outputTable = props.getProperty(TestProps.ROWHASH_OUTPUT_TABLE);
 
-    AccumuloInputFormat.configure().clientProperties(env.getClientProps()).table(inputTable)
-        .fetchColumns(cols).store(job);
-    AccumuloOutputFormat.configure().clientProperties(env.getClientProps())
-        .defaultTable(outputTable).createTables(true).store(job);
+      AccumuloInputFormat.configure().clientProperties(env.getClientProps()).table(inputTable)
+          .fetchColumns(cols).store(job);
+      AccumuloOutputFormat.configure().clientProperties(env.getClientProps())
+          .defaultTable(outputTable).createTables(true).store(job);
 
-    job.getConfiguration().set("mapreduce.job.classloader", "true");
-    job.setMapperClass(HashDataMapper.class);
-    job.setMapOutputKeyClass(Text.class);
-    job.setMapOutputValueClass(Mutation.class);
+      job.getConfiguration().set("mapreduce.job.classloader", "true");
+      job.setMapperClass(HashDataMapper.class);
+      job.setMapOutputKeyClass(Text.class);
+      job.setMapOutputValueClass(Mutation.class);
 
-    job.setNumReduceTasks(0);
+      job.setNumReduceTasks(0);
 
-    job.setOutputFormatClass(AccumuloOutputFormat.class);
+      job.setOutputFormatClass(AccumuloOutputFormat.class);
 
-    job.waitForCompletion(true);
-    return job.isSuccessful() ? 0 : 1;
+      job.waitForCompletion(true);
+      return job.isSuccessful() ? 0 : 1;
+    }
   }
 
   public static void main(String[] args) throws Exception {
-    TestEnv env = new TestEnv(args);
-    ToolRunner.run(env.getHadoopConfiguration(), new RowHash(), args);
+    try (TestEnv env = new TestEnv(args)) {
+      ToolRunner.run(env.getHadoopConfiguration(), new RowHash(), args);
+    }
   }
 }
diff --git a/src/main/java/org/apache/accumulo/testing/mapreduce/TeraSortIngest.java b/src/main/java/org/apache/accumulo/testing/mapreduce/TeraSortIngest.java
index ee2b5d0..1961b93 100644
--- a/src/main/java/org/apache/accumulo/testing/mapreduce/TeraSortIngest.java
+++ b/src/main/java/org/apache/accumulo/testing/mapreduce/TeraSortIngest.java
@@ -352,42 +352,44 @@ public class TeraSortIngest extends Configured implements Tool {
   }
 
   public static void main(String[] args) throws Exception {
-    TestEnv env = new TestEnv(args);
-    ToolRunner.run(env.getHadoopConfiguration(), new TeraSortIngest(), args);
+    try (TestEnv env = new TestEnv(args)) {
+      ToolRunner.run(env.getHadoopConfiguration(), new TeraSortIngest(), args);
+    }
   }
 
   @Override
   public int run(String[] args) throws Exception {
 
-    TestEnv env = new TestEnv(args);
-
-    Job job = Job.getInstance(getConf());
-    job.setJobName("TeraSortIngest");
-    job.setJarByClass(this.getClass());
-    job.setInputFormatClass(RangeInputFormat.class);
-    job.setMapperClass(SortGenMapper.class);
-    job.setMapOutputKeyClass(Text.class);
-    job.setMapOutputValueClass(Mutation.class);
-    job.setNumReduceTasks(0);
-    job.setOutputFormatClass(AccumuloOutputFormat.class);
-
-    Properties testProps = env.getTestProperties();
-    String tableName = testProps.getProperty(TestProps.TERASORT_TABLE);
-    Objects.requireNonNull(tableName);
-
-    AccumuloOutputFormat.configure().clientProperties(env.getClientProps()).createTables(true)
-        .defaultTable(tableName).store(job);
-
-    Configuration conf = job.getConfiguration();
-    conf.set("mapreduce.job.classloader", "true");
-    for (Object keyObj : testProps.keySet()) {
-      String key = (String) keyObj;
-      if (key.startsWith(TestProps.TERASORT)) {
-        conf.set(key, testProps.getProperty(key));
+    try (TestEnv env = new TestEnv(args)) {
+
+      Job job = Job.getInstance(getConf());
+      job.setJobName("TeraSortIngest");
+      job.setJarByClass(this.getClass());
+      job.setInputFormatClass(RangeInputFormat.class);
+      job.setMapperClass(SortGenMapper.class);
+      job.setMapOutputKeyClass(Text.class);
+      job.setMapOutputValueClass(Mutation.class);
+      job.setNumReduceTasks(0);
+      job.setOutputFormatClass(AccumuloOutputFormat.class);
+
+      Properties testProps = env.getTestProperties();
+      String tableName = testProps.getProperty(TestProps.TERASORT_TABLE);
+      Objects.requireNonNull(tableName);
+
+      AccumuloOutputFormat.configure().clientProperties(env.getClientProps()).createTables(true)
+          .defaultTable(tableName).store(job);
+
+      Configuration conf = job.getConfiguration();
+      conf.set("mapreduce.job.classloader", "true");
+      for (Object keyObj : testProps.keySet()) {
+        String key = (String) keyObj;
+        if (key.startsWith(TestProps.TERASORT)) {
+          conf.set(key, testProps.getProperty(key));
+        }
       }
-    }
 
-    job.waitForCompletion(true);
-    return job.isSuccessful() ? 0 : 1;
+      job.waitForCompletion(true);
+      return job.isSuccessful() ? 0 : 1;
+    }
   }
 }
diff --git a/src/main/java/org/apache/accumulo/testing/performance/tests/ConditionalMutationsPT.java b/src/main/java/org/apache/accumulo/testing/performance/tests/ConditionalMutationsPT.java
index 2d4f8b1..bf86a6a 100644
--- a/src/main/java/org/apache/accumulo/testing/performance/tests/ConditionalMutationsPT.java
+++ b/src/main/java/org/apache/accumulo/testing/performance/tests/ConditionalMutationsPT.java
@@ -22,10 +22,8 @@ import org.apache.accumulo.core.client.admin.NewTableConfiguration;
 import org.apache.accumulo.core.conf.Property;
 import org.apache.accumulo.core.data.Condition;
 import org.apache.accumulo.core.data.ConditionalMutation;
-import org.apache.accumulo.core.data.Key;
 import org.apache.accumulo.core.data.Mutation;
 import org.apache.accumulo.core.data.Range;
-import org.apache.accumulo.core.data.Value;
 import org.apache.accumulo.core.security.Authorizations;
 import org.apache.accumulo.testing.performance.Environment;
 import org.apache.accumulo.testing.performance.PerformanceTest;
@@ -35,6 +33,7 @@ import org.apache.hadoop.io.Text;
 
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.ImmutableSet;
+import com.google.common.collect.Iterables;
 import com.google.common.hash.Hashing;
 
 public class ConditionalMutationsPT implements PerformanceTest {
@@ -85,7 +84,7 @@ public class ConditionalMutationsPT implements PerformanceTest {
     }
 
     reportBuilder.result("avgRate: 1-19",
-        new Double(new DecimalFormat("#0.00").format(rateSum / 20)),
+        Double.parseDouble(new DecimalFormat("#0.00").format(rateSum / 20)),
         "ConditionalMutationsTest: average rate (conditions/sec) to run sequence 1-19");
 
     env.getClient().tableOperations().flush(tableName, null, null, true);
@@ -96,7 +95,7 @@ public class ConditionalMutationsPT implements PerformanceTest {
     }
 
     reportBuilder.result("avgRate: 20-39",
-        new Double(new DecimalFormat("#0.00").format(rateSum / 20)),
+        Double.parseDouble(new DecimalFormat("#0.00").format(rateSum / 20)),
         "ConditionalMutationsTest: average rate (conditions/sec)  to run sequence 20-39");
   }
 
@@ -157,7 +156,7 @@ public class ConditionalMutationsPT implements PerformanceTest {
     }
 
     reportBuilder.result("avgRate: 1-19",
-        new Double(new DecimalFormat("#0.00").format(rateSum / 20)),
+        Double.parseDouble(new DecimalFormat("#0.00").format(rateSum / 20)),
         "RandomizeConditionalMutationsTest: average rate (conditions/sec)  to run sequence 1-19");
 
     env.getClient().tableOperations().flush(tableName, null, null, true);
@@ -168,7 +167,7 @@ public class ConditionalMutationsPT implements PerformanceTest {
     }
 
     reportBuilder.result("avgRate: 20-39",
-        new Double(new DecimalFormat("#0.00").format(rateSum / 20)),
+        Double.parseDouble(new DecimalFormat("#0.00").format(rateSum / 20)),
         "RandomizeConditionalMutationsTest: average rate (conditions/sec)  to run sequence 20-39");
   }
 
@@ -241,7 +240,7 @@ public class ConditionalMutationsPT implements PerformanceTest {
     }
 
     reportBuilder.result("avgRate: 1-19",
-        new Double(new DecimalFormat("#0.00").format(rateSum / 20)),
+        Double.parseDouble(new DecimalFormat("#0.00").format(rateSum / 20)),
         "RandomizeBatchScanAndWriteTest: average rate (conditions/sec)  to write and scan sequence 1-19");
 
     env.getClient().tableOperations().flush(tableName, null, null, true);
@@ -252,7 +251,7 @@ public class ConditionalMutationsPT implements PerformanceTest {
     }
 
     reportBuilder.result("avgRate: 20-39",
-        new Double(new DecimalFormat("#0.00").format(rateSum / 20)),
+        Double.parseDouble(new DecimalFormat("#0.00").format(rateSum / 20)),
         "RandomizeBatchScanAndWriteTest: average rate (conditions/sec)  to write and scan sequence 20-39 post flush");
   }
 
@@ -282,10 +281,7 @@ public class ConditionalMutationsPT implements PerformanceTest {
 
     bs.setRanges(ranges);
 
-    int count = 0;
-    for (Map.Entry<Key,Value> entry : bs) {
-      count++;
-    }
+    int count = Iterables.size(bs);
     if (0 != count) {
       throw new RuntimeException("count = " + count);
     }
@@ -324,7 +320,7 @@ public class ConditionalMutationsPT implements PerformanceTest {
     }
 
     reportBuilder.result("avgRate1",
-        new Double(new DecimalFormat("#0.00").format(rateSum / numTest)),
+        Double.parseDouble(new DecimalFormat("#0.00").format(rateSum / numTest)),
         "SetBlockSizeTest: average rate in conditions/sec");
 
     env.getClient().tableOperations().flush(tableName, null, null, true);
@@ -335,7 +331,7 @@ public class ConditionalMutationsPT implements PerformanceTest {
     }
 
     reportBuilder.result("avgRate2",
-        new Double(new DecimalFormat("#0.00").format(rateSum / numTest)),
+        Double.parseDouble(new DecimalFormat("#0.00").format(rateSum / numTest)),
         "SetBlockSizeTest: average rate in conditions/sec post flush");
 
     env.getClient().tableOperations().compact(tableName, null, null, true, true);
@@ -345,7 +341,8 @@ public class ConditionalMutationsPT implements PerformanceTest {
       rateSum += setBlockSizeTime(cw, numRows, numCols);
     }
 
-    reportBuilder.result("avgRate3", new Double(new DecimalFormat("#0.00").format(rateSum / 20)),
+    reportBuilder.result("avgRate3",
+        Double.parseDouble(new DecimalFormat("#0.00").format(rateSum / 20)),
         "SetBlockSizeTest: average rate in conditions/sec post compaction");
     reportBuilder.parameter("numRows", numRows, "SetBlockSizeTest: The number of rows");
     reportBuilder.parameter("numCols", numCols, "SetBlockSizeTest: The number of columns");
diff --git a/src/main/java/org/apache/accumulo/testing/performance/tests/ScanExecutorPT.java b/src/main/java/org/apache/accumulo/testing/performance/tests/ScanExecutorPT.java
index 20911cc..ae9b8c0 100644
--- a/src/main/java/org/apache/accumulo/testing/performance/tests/ScanExecutorPT.java
+++ b/src/main/java/org/apache/accumulo/testing/performance/tests/ScanExecutorPT.java
@@ -18,6 +18,7 @@
 package org.apache.accumulo.testing.performance.tests;
 
 import java.util.HashMap;
+import java.util.Iterator;
 import java.util.LongSummaryStatistics;
 import java.util.Map;
 import java.util.Map.Entry;
@@ -161,7 +162,7 @@ public class ScanExecutorPT implements PerformanceTest {
     while (!stop.get()) {
       try (Scanner scanner = c.createScanner(tableName, Authorizations.EMPTY)) {
         scanner.setExecutionHints(hints);
-        for (Entry<Key,Value> entry : scanner) {
+        for (Iterator<Entry<Key,Value>> iter = scanner.iterator(); iter.hasNext(); iter.next()) {
           count++;
           if (stop.get()) {
             return count;
diff --git a/src/main/java/org/apache/accumulo/testing/performance/tests/YieldingScanExecutorPT.java b/src/main/java/org/apache/accumulo/testing/performance/tests/YieldingScanExecutorPT.java
index 59571fb..8fe424f 100644
--- a/src/main/java/org/apache/accumulo/testing/performance/tests/YieldingScanExecutorPT.java
+++ b/src/main/java/org/apache/accumulo/testing/performance/tests/YieldingScanExecutorPT.java
@@ -182,7 +182,7 @@ public class YieldingScanExecutorPT implements PerformanceTest {
         scanner.addScanIterator(is);
 
         // scanner.setExecutionHints(hints);
-        for (Entry<Key,Value> entry : scanner) {
+        for (Iterator<Entry<Key,Value>> iter = scanner.iterator(); iter.hasNext(); iter.next()) {
           count++;
           if (stop.get()) {
             return count;
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/bulk/BulkPlusOne.java b/src/main/java/org/apache/accumulo/testing/randomwalk/bulk/BulkPlusOne.java
index 60821e2..55d81c1 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/bulk/BulkPlusOne.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/bulk/BulkPlusOne.java
@@ -30,7 +30,6 @@ import org.apache.accumulo.core.data.Key;
 import org.apache.accumulo.core.data.Value;
 import org.apache.accumulo.testing.randomwalk.RandWalkEnv;
 import org.apache.accumulo.testing.randomwalk.State;
-import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.Text;
@@ -58,9 +57,7 @@ public class BulkPlusOne extends BulkImportTest {
     final FileSystem fs = (FileSystem) state.get("fs");
     final Path dir = new Path(fs.getUri() + "/tmp", "bulk_" + UUID.randomUUID().toString());
     log.debug("Bulk loading from {}", dir);
-    final Path fail = new Path(dir.toString() + "_fail");
     final Random rand = state.getRandom();
-    fs.mkdirs(fail);
     final int parts = rand.nextInt(10) + 1;
 
     TreeSet<Integer> startRows = new TreeSet<>();
@@ -96,15 +93,9 @@ public class BulkPlusOne extends BulkImportTest {
       }
       writer.close();
     }
-    env.getAccumuloClient().tableOperations().importDirectory(Setup.getTableName(), dir.toString(),
-        fail.toString(), true);
+    env.getAccumuloClient().tableOperations().importDirectory(dir.toString())
+        .to(Setup.getTableName()).tableTime(true);
     fs.delete(dir, true);
-    FileStatus[] failures = fs.listStatus(fail);
-    if (failures != null && failures.length > 0) {
-      state.set("bulkImportSuccess", "false");
-      throw new Exception(failures.length + " failure files found importing files from " + dir);
-    }
-    fs.delete(fail, true);
     log.debug("Finished bulk import, start rows " + printRows + " last row "
         + String.format(FMT, LOTS - 1) + " marker " + markerColumnQualifier);
   }
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/concurrent/BulkImport.java b/src/main/java/org/apache/accumulo/testing/randomwalk/concurrent/BulkImport.java
index 1d1d0c1..4dd9736 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/concurrent/BulkImport.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/concurrent/BulkImport.java
@@ -126,8 +126,8 @@ public class BulkImport extends Test {
         bw.close();
       }
 
-      client.tableOperations().importDirectory(tableName, bulkDir, bulkDir + "_f",
-          rand.nextBoolean());
+      client.tableOperations().importDirectory(bulkDir).to(tableName).tableTime(rand.nextBoolean())
+          .load();
 
       log.debug("BulkImported to " + tableName);
     } catch (TableNotFoundException e) {
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/concurrent/Config.java b/src/main/java/org/apache/accumulo/testing/randomwalk/concurrent/Config.java
index b209d33..992fad6 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/concurrent/Config.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/concurrent/Config.java
@@ -52,6 +52,8 @@ public class Config extends Test {
     return new Setting(property, min, max);
   }
 
+  @SuppressWarnings("deprecation")
+  Property TSERV_READ_AHEAD_MAXCONCURRENT_deprecated = Property.TSERV_READ_AHEAD_MAXCONCURRENT;
   // @formatter:off
 	Setting[] settings = {
 			s(Property.TSERV_BLOOM_LOAD_MAXCONCURRENT, 1, 10),
@@ -69,7 +71,7 @@ public class Config extends Test {
 			s(Property.TSERV_DEFAULT_BLOCKSIZE, 100000, 10000000L),
 			s(Property.TSERV_MAX_IDLE, 10000, 500 * 1000),
 			s(Property.TSERV_MAXMEM, 1000000, 3 * 1024 * 1024 * 1024L),
-			s(Property.TSERV_READ_AHEAD_MAXCONCURRENT, 1, 25),
+			s(TSERV_READ_AHEAD_MAXCONCURRENT_deprecated, 1, 25),
 			s(Property.TSERV_MIGRATE_MAXCONCURRENT, 1, 10),
 			s(Property.TSERV_TOTAL_MUTATION_QUEUE_MAX, 10000, 1024 * 1024),
 			s(Property.TSERV_RECOVERY_MAX_CONCURRENT, 1, 100),
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/security/SecurityHelper.java b/src/main/java/org/apache/accumulo/testing/randomwalk/security/SecurityHelper.java
index b632249..0c8c474 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/security/SecurityHelper.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/security/SecurityHelper.java
@@ -45,7 +45,6 @@ public class SecurityHelper {
 
   private static final String authsMap = "authorizationsCountMap";
   private static final String lastKey = "lastMutationKey";
-  private static final String filesystem = "securityFileSystem";
 
   public static String getTableName(State state) {
     return state.getString(tableName);
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/security/TableOp.java b/src/main/java/org/apache/accumulo/testing/randomwalk/security/TableOp.java
index 7e217c3..bf58fdb 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/security/TableOp.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/security/TableOp.java
@@ -240,7 +240,7 @@ public class TableOp extends Test {
             rFileWriter.append(k, new Value("Value".getBytes(UTF_8)));
           rFileWriter.close();
           try {
-            tableOps.importDirectory(tableName, dir.toString(), fail.toString(), true);
+            tableOps.importDirectory(dir.toString()).to(tableName).tableTime(true).load();
           } catch (TableNotFoundException tnfe) {
             if (tableExists)
               throw new AccumuloException("Table didn't exist when it should have: " + tableName);
@@ -293,6 +293,9 @@ public class TableOp extends Test {
           props.setProperty("source", "table");
           DropTable.dropTable(state, env, props);
           break;
+
+        case GET_SUMMARIES:
+          throw new UnsupportedOperationException("GET_SUMMARIES not implemented");
       }
     }
   }
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/shard/BulkInsert.java b/src/main/java/org/apache/accumulo/testing/randomwalk/shard/BulkInsert.java
index e1c377a..1916e57 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/shard/BulkInsert.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/shard/BulkInsert.java
@@ -142,6 +142,7 @@ public class BulkInsert extends Test {
     fs.delete(new Path(rootDir), true);
   }
 
+  @SuppressWarnings("deprecation")
   private void bulkImport(FileSystem fs, State state, RandWalkEnv env, String tableName,
       String rootDir, String prefix) throws Exception {
     while (true) {
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/shard/SortTool.java b/src/main/java/org/apache/accumulo/testing/randomwalk/shard/SortTool.java
index 5206417..91ff5eb 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/shard/SortTool.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/shard/SortTool.java
@@ -18,8 +18,6 @@ package org.apache.accumulo.testing.randomwalk.shard;
 
 import java.util.Collection;
 
-import org.apache.accumulo.core.client.mapreduce.AccumuloFileOutputFormat;
-import org.apache.accumulo.core.client.mapreduce.lib.partition.KeyRangePartitioner;
 import org.apache.accumulo.core.data.Key;
 import org.apache.accumulo.core.data.Value;
 import org.apache.hadoop.conf.Configured;
@@ -45,6 +43,7 @@ public class SortTool extends Configured implements Tool {
     this.splits = splits;
   }
 
+  @SuppressWarnings("deprecation")
   @Override
   public int run(String[] args) throws Exception {
     Job job = Job.getInstance(getConf(), this.getClass().getSimpleName());
@@ -58,16 +57,20 @@ public class SortTool extends Configured implements Tool {
     job.setInputFormatClass(SequenceFileInputFormat.class);
     SequenceFileInputFormat.setInputPaths(job, seqFile);
 
-    job.setPartitionerClass(KeyRangePartitioner.class);
-    KeyRangePartitioner.setSplitFile(job, splitFile);
+    job.setPartitionerClass(
+        org.apache.accumulo.core.client.mapreduce.lib.partition.KeyRangePartitioner.class);
+    org.apache.accumulo.core.client.mapreduce.lib.partition.KeyRangePartitioner.setSplitFile(job,
+        splitFile);
 
     job.setMapOutputKeyClass(Key.class);
     job.setMapOutputValueClass(Value.class);
 
     job.setNumReduceTasks(splits.size() + 1);
 
-    job.setOutputFormatClass(AccumuloFileOutputFormat.class);
-    AccumuloFileOutputFormat.setOutputPath(job, new Path(outputDir));
+    job.setOutputFormatClass(
+        org.apache.accumulo.core.client.mapreduce.AccumuloFileOutputFormat.class);
+    org.apache.accumulo.core.client.mapreduce.AccumuloFileOutputFormat.setOutputPath(job,
+        new Path(outputDir));
 
     job.waitForCompletion(true);
     return job.isSuccessful() ? 0 : 1;