You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ta...@apache.org on 2018/09/13 11:43:52 UTC

hadoop git commit: HADOOP-12760. sun.misc.Cleaner has moved to a new location in OpenJDK 9. Contributed by Akira Ajisaka.

Repository: hadoop
Updated Branches:
  refs/heads/trunk c6e19db19 -> 5d084d7ec


HADOOP-12760. sun.misc.Cleaner has moved to a new location in OpenJDK 9. Contributed by Akira Ajisaka.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/5d084d7e
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/5d084d7e
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/5d084d7e

Branch: refs/heads/trunk
Commit: 5d084d7eca32cfa647a78ff6ed3c378659f5b186
Parents: c6e19db
Author: Takanobu Asanuma <ta...@apache.org>
Authored: Thu Sep 13 20:42:09 2018 +0900
Committer: Takanobu Asanuma <ta...@apache.org>
Committed: Thu Sep 13 20:42:09 2018 +0900

----------------------------------------------------------------------
 .../apache/hadoop/crypto/CryptoStreamUtils.java |  21 +-
 .../org/apache/hadoop/io/nativeio/NativeIO.java |  15 +-
 .../org/apache/hadoop/util/CleanerUtil.java     | 199 +++++++++++++++++++
 3 files changed, 224 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d084d7e/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoStreamUtils.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoStreamUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoStreamUtils.java
index be85497..b55f842 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoStreamUtils.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoStreamUtils.java
@@ -27,22 +27,31 @@ import java.nio.ByteBuffer;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Seekable;
+import org.apache.hadoop.util.CleanerUtil;
 
 import com.google.common.base.Preconditions;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 @InterfaceAudience.Private
 public class CryptoStreamUtils {
   private static final int MIN_BUFFER_SIZE = 512;
-  
+  private static final Logger LOG =
+      LoggerFactory.getLogger(CryptoStreamUtils.class);
+
   /** Forcibly free the direct buffer. */
   public static void freeDB(ByteBuffer buffer) {
-    if (buffer instanceof sun.nio.ch.DirectBuffer) {
-      final sun.misc.Cleaner bufferCleaner =
-          ((sun.nio.ch.DirectBuffer) buffer).cleaner();
-      bufferCleaner.clean();
+    if (CleanerUtil.UNMAP_SUPPORTED) {
+      try {
+        CleanerUtil.getCleaner().freeBuffer(buffer);
+      } catch (IOException e) {
+        LOG.info("Failed to free the buffer", e);
+      }
+    } else {
+      LOG.trace(CleanerUtil.UNMAP_NOT_SUPPORTED_REASON);
     }
   }
-  
+
   /** Read crypto buffer size */
   public static int getBufferSize(Configuration conf) {
     return conf.getInt(HADOOP_SECURITY_CRYPTO_BUFFER_SIZE_KEY, 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d084d7e/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java
index adc74bd..4e0cd8f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java
@@ -38,6 +38,7 @@ import org.apache.hadoop.fs.HardLink;
 import org.apache.hadoop.fs.PathIOException;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.SecureIOUtils.AlreadyExistsException;
+import org.apache.hadoop.util.CleanerUtil;
 import org.apache.hadoop.util.NativeCodeLoader;
 import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.util.PerformanceAdvisory;
@@ -315,7 +316,7 @@ public class NativeIO {
       }
       mlock_native(buffer, len);
     }
-    
+
     /**
      * Unmaps the block from memory. See munmap(2).
      *
@@ -329,10 +330,14 @@ public class NativeIO {
      * @param buffer    The buffer to unmap.
      */
     public static void munmap(MappedByteBuffer buffer) {
-      if (buffer instanceof sun.nio.ch.DirectBuffer) {
-        sun.misc.Cleaner cleaner =
-            ((sun.nio.ch.DirectBuffer)buffer).cleaner();
-        cleaner.clean();
+      if (CleanerUtil.UNMAP_SUPPORTED) {
+        try {
+          CleanerUtil.getCleaner().freeBuffer(buffer);
+        } catch (IOException e) {
+          LOG.info("Failed to unmap the buffer", e);
+        }
+      } else {
+        LOG.trace(CleanerUtil.UNMAP_NOT_SUPPORTED_REASON);
       }
     }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d084d7e/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/CleanerUtil.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/CleanerUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/CleanerUtil.java
new file mode 100644
index 0000000..a56602e
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/CleanerUtil.java
@@ -0,0 +1,199 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.util;
+
+import java.io.IOException;
+import java.lang.invoke.MethodHandle;
+import java.lang.invoke.MethodHandles;
+import java.lang.reflect.Field;
+import java.lang.reflect.Method;
+import java.nio.ByteBuffer;
+import java.security.AccessController;
+import java.security.PrivilegedAction;
+import java.util.Objects;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+import static java.lang.invoke.MethodHandles.constant;
+import static java.lang.invoke.MethodHandles.dropArguments;
+import static java.lang.invoke.MethodHandles.filterReturnValue;
+import static java.lang.invoke.MethodHandles.guardWithTest;
+import static java.lang.invoke.MethodType.methodType;
+
+/**
+ * sun.misc.Cleaner has moved in OpenJDK 9 and
+ * sun.misc.Unsafe#invokeCleaner(ByteBuffer) is the replacement.
+ * This class is a hack to use sun.misc.Cleaner in Java 8 and
+ * use the replacement in Java 9+.
+ * This implementation is inspired by LUCENE-6989.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Unstable
+public final class CleanerUtil {
+
+  // Prevent instantiation
+  private CleanerUtil(){}
+
+  /**
+   * <code>true</code>, if this platform supports unmapping mmapped files.
+   */
+  public static final boolean UNMAP_SUPPORTED;
+
+  /**
+   * if {@link #UNMAP_SUPPORTED} is {@code false}, this contains the reason
+   * why unmapping is not supported.
+   */
+  public static final String UNMAP_NOT_SUPPORTED_REASON;
+
+
+  private static final BufferCleaner CLEANER;
+
+  /**
+   * Reference to a BufferCleaner that does unmapping.
+   * @return {@code null} if not supported.
+   */
+  public static BufferCleaner getCleaner() {
+    return CLEANER;
+  }
+
+  static {
+    final Object hack = AccessController.doPrivileged(
+        (PrivilegedAction<Object>) CleanerUtil::unmapHackImpl);
+    if (hack instanceof BufferCleaner) {
+      CLEANER = (BufferCleaner) hack;
+      UNMAP_SUPPORTED = true;
+      UNMAP_NOT_SUPPORTED_REASON = null;
+    } else {
+      CLEANER = null;
+      UNMAP_SUPPORTED = false;
+      UNMAP_NOT_SUPPORTED_REASON = hack.toString();
+    }
+  }
+
+  private static Object unmapHackImpl() {
+    final MethodHandles.Lookup lookup = MethodHandles.lookup();
+    try {
+      try {
+        // *** sun.misc.Unsafe unmapping (Java 9+) ***
+        final Class<?> unsafeClass = Class.forName("sun.misc.Unsafe");
+        // first check if Unsafe has the right method, otherwise we can
+        // give up without doing any security critical stuff:
+        final MethodHandle unmapper = lookup.findVirtual(unsafeClass,
+            "invokeCleaner", methodType(void.class, ByteBuffer.class));
+        // fetch the unsafe instance and bind it to the virtual MH:
+        final Field f = unsafeClass.getDeclaredField("theUnsafe");
+        f.setAccessible(true);
+        final Object theUnsafe = f.get(null);
+        return newBufferCleaner(ByteBuffer.class, unmapper.bindTo(theUnsafe));
+      } catch (SecurityException se) {
+        // rethrow to report errors correctly (we need to catch it here,
+        // as we also catch RuntimeException below!):
+        throw se;
+      } catch (ReflectiveOperationException | RuntimeException e) {
+        // *** sun.misc.Cleaner unmapping (Java 8) ***
+        final Class<?> directBufferClass =
+            Class.forName("java.nio.DirectByteBuffer");
+
+        final Method m = directBufferClass.getMethod("cleaner");
+        m.setAccessible(true);
+        final MethodHandle directBufferCleanerMethod = lookup.unreflect(m);
+        final Class<?> cleanerClass =
+            directBufferCleanerMethod.type().returnType();
+
+        /*
+         * "Compile" a MethodHandle that basically is equivalent
+         * to the following code:
+         *
+         * void unmapper(ByteBuffer byteBuffer) {
+         *   sun.misc.Cleaner cleaner =
+         *       ((java.nio.DirectByteBuffer) byteBuffer).cleaner();
+         *   if (Objects.nonNull(cleaner)) {
+         *     cleaner.clean();
+         *   } else {
+         *     // the noop is needed because MethodHandles#guardWithTest
+         *     // always needs ELSE
+         *     noop(cleaner);
+         *   }
+         * }
+         */
+        final MethodHandle cleanMethod = lookup.findVirtual(
+            cleanerClass, "clean", methodType(void.class));
+        final MethodHandle nonNullTest = lookup.findStatic(Objects.class,
+            "nonNull", methodType(boolean.class, Object.class))
+            .asType(methodType(boolean.class, cleanerClass));
+        final MethodHandle noop = dropArguments(
+            constant(Void.class, null).asType(methodType(void.class)),
+            0, cleanerClass);
+        final MethodHandle unmapper = filterReturnValue(
+            directBufferCleanerMethod,
+            guardWithTest(nonNullTest, cleanMethod, noop))
+            .asType(methodType(void.class, ByteBuffer.class));
+        return newBufferCleaner(directBufferClass, unmapper);
+      }
+    } catch (SecurityException se) {
+      return "Unmapping is not supported, because not all required " +
+          "permissions are given to the Hadoop JAR file: " + se +
+          " [Please grant at least the following permissions: " +
+          "RuntimePermission(\"accessClassInPackage.sun.misc\") " +
+          " and ReflectPermission(\"suppressAccessChecks\")]";
+    } catch (ReflectiveOperationException | RuntimeException e) {
+      return "Unmapping is not supported on this platform, " +
+          "because internal Java APIs are not compatible with " +
+          "this Hadoop version: " + e;
+    }
+  }
+
+  private static BufferCleaner newBufferCleaner(
+      final Class<?> unmappableBufferClass, final MethodHandle unmapper) {
+    assert Objects.equals(
+        methodType(void.class, ByteBuffer.class), unmapper.type());
+    return buffer -> {
+      if (!buffer.isDirect()) {
+        throw new IllegalArgumentException(
+            "unmapping only works with direct buffers");
+      }
+      if (!unmappableBufferClass.isInstance(buffer)) {
+        throw new IllegalArgumentException("buffer is not an instance of " +
+            unmappableBufferClass.getName());
+      }
+      final Throwable error = AccessController.doPrivileged(
+          (PrivilegedAction<Throwable>) () -> {
+            try {
+              unmapper.invokeExact(buffer);
+              return null;
+            } catch (Throwable t) {
+              return t;
+            }
+          });
+      if (error != null) {
+        throw new IOException("Unable to unmap the mapped buffer", error);
+      }
+    };
+  }
+
+  /**
+   * Pass in an implementation of this interface to cleanup ByteBuffers.
+   * CleanerUtil implements this to allow unmapping of bytebuffers
+   * with private Java APIs.
+   */
+  @FunctionalInterface
+  public interface BufferCleaner {
+    void freeBuffer(ByteBuffer b) throws IOException;
+  }
+}


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org