You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by kr...@apache.org on 2019/12/13 03:26:07 UTC

[lucene-solr] branch master updated: SOLR-14048: Improve Hadoop test sanity checks

This is an automated email from the ASF dual-hosted git repository.

krisden pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git


The following commit(s) were added to refs/heads/master by this push:
     new 3ba0054  SOLR-14048: Improve Hadoop test sanity checks
3ba0054 is described below

commit 3ba005465a5dff3975b85f9c44d365bd3cd36346
Author: Kevin Risden <kr...@apache.org>
AuthorDate: Wed Dec 11 22:57:50 2019 -0500

    SOLR-14048: Improve Hadoop test sanity checks
    
    Signed-off-by: Kevin Risden <kr...@apache.org>
---
 .../src/test/org/apache/hadoop/fs/FileUtil.java    |  1 +
 .../src/test/org/apache/hadoop/fs/HardLink.java    |  1 +
 .../org/apache/hadoop/fs/RawLocalFileSystem.java   |  2 ++
 .../datanode/fsdataset/impl/BlockPoolSlice.java    |  4 ++-
 .../server/namenode/NameNodeResourceChecker.java   |  2 ++
 .../test/org/apache/hadoop/http/HttpServer2.java   |  2 ++
 .../src/test/org/apache/hadoop/package-info.java   | 39 ++++++++++++++++++++++
 .../test/org/apache/hadoop/util/DiskChecker.java   |  2 ++
 .../org/apache/solr/cloud/hdfs/HdfsTestUtil.java   | 29 ++++++++++++++++
 9 files changed, 81 insertions(+), 1 deletion(-)

diff --git a/solr/core/src/test/org/apache/hadoop/fs/FileUtil.java b/solr/core/src/test/org/apache/hadoop/fs/FileUtil.java
index 98e281dc..f49604f 100644
--- a/solr/core/src/test/org/apache/hadoop/fs/FileUtil.java
+++ b/solr/core/src/test/org/apache/hadoop/fs/FileUtil.java
@@ -74,6 +74,7 @@ import org.slf4j.LoggerFactory;
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
 public class FileUtil {
+  public static final Object SOLR_HACK_FOR_CLASS_VERIFICATION = new Object();
 
   private static final Logger LOG = LoggerFactory.getLogger(FileUtil.class);
 
diff --git a/solr/core/src/test/org/apache/hadoop/fs/HardLink.java b/solr/core/src/test/org/apache/hadoop/fs/HardLink.java
index f3a173e..3b546ce 100644
--- a/solr/core/src/test/org/apache/hadoop/fs/HardLink.java
+++ b/solr/core/src/test/org/apache/hadoop/fs/HardLink.java
@@ -38,6 +38,7 @@ import static java.nio.file.Files.createLink;
  * efficient - and minimizes the impact of the extra buffer creations.
  */
 public class HardLink {
+  public static final Object SOLR_HACK_FOR_CLASS_VERIFICATION = new Object();
 
   public final LinkStats linkStats; //not static
 
diff --git a/solr/core/src/test/org/apache/hadoop/fs/RawLocalFileSystem.java b/solr/core/src/test/org/apache/hadoop/fs/RawLocalFileSystem.java
index 4ee69b9..5373c03 100644
--- a/solr/core/src/test/org/apache/hadoop/fs/RawLocalFileSystem.java
+++ b/solr/core/src/test/org/apache/hadoop/fs/RawLocalFileSystem.java
@@ -62,6 +62,8 @@ import org.apache.hadoop.util.StringUtils;
 @InterfaceAudience.Public
 @InterfaceStability.Stable
 public class RawLocalFileSystem extends FileSystem {
+  public static final Object SOLR_HACK_FOR_CLASS_VERIFICATION = new Object();
+
   static final URI NAME = URI.create("file:///");
   private Path workingDir;
   // Temporary workaround for HADOOP-9652.
diff --git a/solr/core/src/test/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java b/solr/core/src/test/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java
index ff19c72..bfc18a3 100644
--- a/solr/core/src/test/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java
+++ b/solr/core/src/test/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java
@@ -78,7 +78,9 @@ import com.google.common.annotations.VisibleForTesting;
  *
  * This class is synchronized by {@link FsVolumeImpl}.
  */
-class BlockPoolSlice {
+public class BlockPoolSlice {
+  public static final Object SOLR_HACK_FOR_CLASS_VERIFICATION = new Object();
+
   static final Logger LOG = LoggerFactory.getLogger(BlockPoolSlice.class);
 
   private final String bpid;
diff --git a/solr/core/src/test/org/apache/hadoop/hdfs/server/namenode/NameNodeResourceChecker.java b/solr/core/src/test/org/apache/hadoop/hdfs/server/namenode/NameNodeResourceChecker.java
index bf18fac..0f500e1 100644
--- a/solr/core/src/test/org/apache/hadoop/hdfs/server/namenode/NameNodeResourceChecker.java
+++ b/solr/core/src/test/org/apache/hadoop/hdfs/server/namenode/NameNodeResourceChecker.java
@@ -32,6 +32,8 @@ import org.apache.hadoop.conf.Configuration;
  */
 @InterfaceAudience.Private
 public class NameNodeResourceChecker {
+  public static final Object SOLR_HACK_FOR_CLASS_VERIFICATION = new Object();
+
   /**
    * Create a NameNodeResourceChecker, which will check the edits dirs and any
    * additional dirs to check set in <code>conf</code>.
diff --git a/solr/core/src/test/org/apache/hadoop/http/HttpServer2.java b/solr/core/src/test/org/apache/hadoop/http/HttpServer2.java
index 757b211..e7ae95c 100644
--- a/solr/core/src/test/org/apache/hadoop/http/HttpServer2.java
+++ b/solr/core/src/test/org/apache/hadoop/http/HttpServer2.java
@@ -116,6 +116,8 @@ import org.slf4j.LoggerFactory;
 @InterfaceAudience.Private
 @InterfaceStability.Evolving
 public final class HttpServer2 implements FilterContainer {
+  public static final Object SOLR_HACK_FOR_CLASS_VERIFICATION = new Object();
+
   public static final Logger LOG = LoggerFactory.getLogger(HttpServer2.class);
 
   public static final String HTTP_SCHEME = "http";
diff --git a/solr/core/src/test/org/apache/hadoop/package-info.java b/solr/core/src/test/org/apache/hadoop/package-info.java
new file mode 100644
index 0000000..f10ed77
--- /dev/null
+++ b/solr/core/src/test/org/apache/hadoop/package-info.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * The classes under this package were copied from Apache Hadoop and modified
+ * to avoid certain issues during tests. The copied classes override the
+ * Apache Hadoop dependency versions during tests.
+ *
+ * HttpServer2 class was copied to avoid Jetty 9.4 dependency issues. Since
+ * Solr uses Jetty 9.4, Hadoop integration tests needs to use Jetty 9.4 as
+ * well. The HttpServer2 class should be removed when Hadoop is upgraded to
+ * 3.3.0 due to HADOOP-16152 upgrading Hadoop to Jetty 9.4.
+ *
+ * The classes BlockPoolSlice (HDFS-14251), DiskChecker, FileUtil, HardLink,
+ * NameNodeResourceChecker, and RawLocalFileSystem were copied to avoid
+ * issues with running Hadoop integration tests under the Java security
+ * manager. Many of these classes use org.apache.hadoop.util.Shell
+ * which shells out to try to do common filesystem checks.
+ *
+ * Overtime these classes should be removed as upstream fixes to Apache
+ * Hadoop are made. When the Apache Hadoop dependency is upgraded in
+ * Solr, the classes should be compared against that version.
+ */
+package org.apache.hadoop;
+
diff --git a/solr/core/src/test/org/apache/hadoop/util/DiskChecker.java b/solr/core/src/test/org/apache/hadoop/util/DiskChecker.java
index 3043da9..54ba286 100644
--- a/solr/core/src/test/org/apache/hadoop/util/DiskChecker.java
+++ b/solr/core/src/test/org/apache/hadoop/util/DiskChecker.java
@@ -43,6 +43,8 @@ import org.slf4j.LoggerFactory;
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
 public class DiskChecker {
+  public static final Object SOLR_HACK_FOR_CLASS_VERIFICATION = new Object();
+
   public static final Logger LOG = LoggerFactory.getLogger(DiskChecker.class);
 
   public static class DiskErrorException extends IOException {
diff --git a/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsTestUtil.java b/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsTestUtil.java
index 2afee35..98f8274 100644
--- a/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsTestUtil.java
+++ b/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsTestUtil.java
@@ -19,8 +19,10 @@ package org.apache.solr.cloud.hdfs;
 import java.io.File;
 import java.lang.invoke.MethodHandles;
 import java.net.URI;
+import java.util.Arrays;
 import java.util.Enumeration;
 import java.util.HashMap;
+import java.util.List;
 import java.util.Locale;
 import java.util.Map;
 import java.util.Timer;
@@ -33,12 +35,19 @@ import org.apache.commons.lang3.time.FastDateFormat;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.fs.HardLink;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.RawLocalFileSystem;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.hdfs.MiniDFSNNTopology;
+import org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.BlockPoolSlice;
 import org.apache.hadoop.hdfs.server.namenode.NameNodeAdapter;
+import org.apache.hadoop.hdfs.server.namenode.NameNodeResourceChecker;
 import org.apache.hadoop.hdfs.server.namenode.ha.HATestUtil;
+import org.apache.hadoop.http.HttpServer2;
 import org.apache.hadoop.io.nativeio.NativeIO;
+import org.apache.hadoop.util.DiskChecker;
 import org.apache.lucene.util.Constants;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.solr.SolrTestCaseJ4;
@@ -54,6 +63,8 @@ import static org.apache.lucene.util.LuceneTestCase.random;
 public class HdfsTestUtil {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
+  private static final String SOLR_HACK_FOR_CLASS_VERIFICATION_FIELD = "SOLR_HACK_FOR_CLASS_VERIFICATION";
+
   private static final String LOGICAL_HOSTNAME = "ha-nn-uri-%d";
 
   private static final boolean HA_TESTING_ENABLED = false; // SOLR-XXX
@@ -76,6 +87,7 @@ public class HdfsTestUtil {
   public static void checkAssumptions() {
     ensureHadoopHomeNotSet();
     checkHadoopWindows();
+    checkOverriddenHadoopClasses();
     checkFastDateFormat();
     checkGeneratedIdMatches();
   }
@@ -104,6 +116,23 @@ public class HdfsTestUtil {
   }
 
   /**
+   * Ensure that the tests are picking up the modified Hadoop classes
+   */
+  private static void checkOverriddenHadoopClasses() {
+    List<Class<?>> modifiedHadoopClasses = Arrays.asList(BlockPoolSlice.class, DiskChecker.class,
+        FileUtil.class, HardLink.class, HttpServer2.class, NameNodeResourceChecker.class, RawLocalFileSystem.class);
+    for (Class<?> clazz : modifiedHadoopClasses) {
+      try {
+        LuceneTestCase.assertNotNull("Field on " + clazz.getCanonicalName() + " should not have been null",
+            clazz.getField(SOLR_HACK_FOR_CLASS_VERIFICATION_FIELD));
+      } catch (NoSuchFieldException e) {
+        LuceneTestCase.fail("Expected to load Solr modified Hadoop class " + clazz.getCanonicalName() +
+            " , but it was not found.");
+      }
+    }
+  }
+
+  /**
    * Checks that commons-lang3 FastDateFormat works with configured locale
    */
   @SuppressForbidden(reason="Call FastDateFormat.format same way Hadoop calls it")