You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@accumulo.apache.org by ct...@apache.org on 2020/04/09 17:45:15 UTC

[accumulo] branch master updated: Resolves ImportExportIT Failures (#1585)

This is an automated email from the ASF dual-hosted git repository.

ctubbsii pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/accumulo.git


The following commit(s) were added to refs/heads/master by this push:
     new 1919076  Resolves ImportExportIT Failures (#1585)
1919076 is described below

commit 1919076f7741f921da625f530e6eb66c68a6bb67
Author: Drew Farris <dr...@apache.org>
AuthorDate: Thu Apr 9 13:45:03 2020 -0400

    Resolves ImportExportIT Failures (#1585)
---
 .../org/apache/accumulo/core/clientImpl/TableOperationsImpl.java | 9 ++++++---
 1 file changed, 6 insertions(+), 3 deletions(-)

diff --git a/core/src/main/java/org/apache/accumulo/core/clientImpl/TableOperationsImpl.java b/core/src/main/java/org/apache/accumulo/core/clientImpl/TableOperationsImpl.java
index ca2f57a..3f5783b 100644
--- a/core/src/main/java/org/apache/accumulo/core/clientImpl/TableOperationsImpl.java
+++ b/core/src/main/java/org/apache/accumulo/core/clientImpl/TableOperationsImpl.java
@@ -61,6 +61,7 @@ import java.util.concurrent.atomic.AtomicReference;
 import java.util.function.Predicate;
 import java.util.regex.Pattern;
 import java.util.stream.Collectors;
+import java.util.stream.Stream;
 import java.util.zip.ZipEntry;
 import java.util.zip.ZipInputStream;
 
@@ -139,7 +140,6 @@ import org.apache.accumulo.core.util.Pair;
 import org.apache.accumulo.core.util.TextUtil;
 import org.apache.accumulo.core.volume.VolumeConfiguration;
 import org.apache.accumulo.fate.util.Retry;
-import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -1499,7 +1499,9 @@ public class TableOperationsImpl extends TableOperationsHelper {
       try {
         FileSystem fs = new Path(importDir).getFileSystem(context.getHadoopConf());
         exportFilePath = new Path(importDir, Constants.EXPORT_FILE);
+        log.debug("Looking for export metadata in {}", exportFilePath);
         if (fs.exists(exportFilePath)) {
+          log.debug("Found export metadata in {}", exportFilePath);
           exportFiles.add(exportFilePath);
         }
       } catch (IOException ioe) {
@@ -1577,8 +1579,9 @@ public class TableOperationsImpl extends TableOperationsHelper {
           ioe.getMessage());
     }
 
-    List<ByteBuffer> args = Arrays.asList(ByteBuffer.wrap(tableName.getBytes(UTF_8)),
-        ByteBuffer.wrap(StringUtils.join(checkedImportDirs, ",").getBytes(UTF_8)));
+    Stream<String> argStream = Stream.concat(Stream.of(tableName), checkedImportDirs.stream());
+    List<ByteBuffer> args =
+        argStream.map(String::getBytes).map(ByteBuffer::wrap).collect(Collectors.toList());
 
     Map<String,String> opts = Collections.emptyMap();