You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@flink.apache.org by al...@apache.org on 2017/11/15 09:46:11 UTC
flink git commit: [FLINK-7973] disable JNI bridge for relocated
hadoop classes in s3-fs-*
Repository: flink
Updated Branches:
refs/heads/release-1.4 5c6eaabfc -> d6d35fa14
[FLINK-7973] disable JNI bridge for relocated hadoop classes in s3-fs-*
Project: http://git-wip-us.apache.org/repos/asf/flink/repo
Commit: http://git-wip-us.apache.org/repos/asf/flink/commit/d6d35fa1
Tree: http://git-wip-us.apache.org/repos/asf/flink/tree/d6d35fa1
Diff: http://git-wip-us.apache.org/repos/asf/flink/diff/d6d35fa1
Branch: refs/heads/release-1.4
Commit: d6d35fa145c122f469d72bf73433afde5d778aac
Parents: 5c6eaab
Author: Nico Kruber <ni...@data-artisans.com>
Authored: Tue Nov 14 14:36:22 2017 +0100
Committer: Aljoscha Krettek <al...@gmail.com>
Committed: Wed Nov 15 10:45:31 2017 +0100
----------------------------------------------------------------------
flink-filesystems/flink-s3-fs-hadoop/README.md | 21 +++--
.../apache/hadoop/util/NativeCodeLoader.java | 94 ++++++++++++++++++++
flink-filesystems/flink-s3-fs-presto/README.md | 21 +++--
.../apache/hadoop/util/NativeCodeLoader.java | 94 ++++++++++++++++++++
tools/maven/suppressions.xml | 5 +-
5 files changed, 222 insertions(+), 13 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/flink/blob/d6d35fa1/flink-filesystems/flink-s3-fs-hadoop/README.md
----------------------------------------------------------------------
diff --git a/flink-filesystems/flink-s3-fs-hadoop/README.md b/flink-filesystems/flink-s3-fs-hadoop/README.md
index 3ad90e3..ca8fdab 100644
--- a/flink-filesystems/flink-s3-fs-hadoop/README.md
+++ b/flink-filesystems/flink-s3-fs-hadoop/README.md
@@ -13,12 +13,21 @@ relocated class names of classes loaded via reflection
If you want to change the Hadoop version this project depends on, the following
steps are required to keep the shading correct:
-1. copy `org/apache/hadoop/conf/Configuration.java` from the respective Hadoop jar file to this project
- - adapt the `Configuration` class by replacing `core-default.xml` with `core-default-shaded.xml`.
-2. copy `core-default.xml` from the respective Hadoop jar file to this project as
- - `src/main/resources/core-default-shaded.xml` (replacing every occurence of `org.apache.hadoop` with `org.apache.flink.fs.s3hadoop.shaded.org.apache.hadoop`)
- - `src/test/resources/core-site.xml` (as is)
-3. verify the shaded jar:
+1. from the respective Hadoop jar (currently 2.8.1 as of the `s3hadoop.hadoop.version` property our `pom.xml`),
+ - copy `org/apache/hadoop/conf/Configuration.java` to `src/main/java/org/apache/hadoop/conf/` and
+ - replace `core-default.xml` with `core-default-shaded.xml`.
+ - copy `org/apache/hadoop/util/NativeCodeLoader.java` to `src/main/java/org/apache/hadoop/util/` and
+ - replace the static initializer with
+ ```
+ static {
+ LOG.info("Skipping native-hadoop library for flink-s3-fs-hadoop's relocated Hadoop... " +
+ "using builtin-java classes where applicable");
+ }
+```
+ - copy `core-default.xml` to `src/main/resources/core-default-shaded.xml` and
+ - change every occurence of `org.apache.hadoop` into `org.apache.flink.fs.s3hadoop.shaded.org.apache.hadoop`
+ - copy `core-site.xml` to `src/test/resources/core-site.xml` (as is)
+2. verify the shaded jar:
- does not contain any unshaded classes except for `org.apache.flink.fs.s3hadoop.S3FileSystemFactory`
- all other classes should be under `org.apache.flink.fs.s3hadoop.shaded`
- there should be a `META-INF/services/org.apache.flink.fs.s3hadoop.S3FileSystemFactory` file pointing to the `org.apache.flink.fs.s3hadoop.S3FileSystemFactory` class
http://git-wip-us.apache.org/repos/asf/flink/blob/d6d35fa1/flink-filesystems/flink-s3-fs-hadoop/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java
----------------------------------------------------------------------
diff --git a/flink-filesystems/flink-s3-fs-hadoop/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java b/flink-filesystems/flink-s3-fs-hadoop/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java
new file mode 100644
index 0000000..822da5b
--- /dev/null
+++ b/flink-filesystems/flink-s3-fs-hadoop/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java
@@ -0,0 +1,94 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.util;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeys;
+
+/**
+ * A helper to load the native hadoop code i.e. libhadoop.so.
+ * This handles the fallback to either the bundled libhadoop-Linux-i386-32.so
+ * or the default java implementations where appropriate.
+ *
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Unstable
+public class NativeCodeLoader {
+
+ private static final Log LOG =
+ LogFactory.getLog(NativeCodeLoader.class);
+
+ private static boolean nativeCodeLoaded = false;
+
+ static {
+ LOG.info("Skipping native-hadoop library for flink-s3-fs-hadoop's relocated Hadoop... " +
+ "using builtin-java classes where applicable");
+ }
+
+ /**
+ * Check if native-hadoop code is loaded for this platform.
+ *
+ * @return <code>true</code> if native-hadoop is loaded,
+ * else <code>false</code>
+ */
+ public static boolean isNativeCodeLoaded() {
+ return nativeCodeLoaded;
+ }
+
+ /**
+ * Returns true only if this build was compiled with support for snappy.
+ */
+ public static native boolean buildSupportsSnappy();
+
+ /**
+ * Returns true only if this build was compiled with support for openssl.
+ */
+ public static native boolean buildSupportsOpenssl();
+
+ public static native String getLibraryName();
+
+ /**
+ * Return if native hadoop libraries, if present, can be used for this job.
+ * @param conf configuration
+ *
+ * @return <code>true</code> if native hadoop libraries, if present, can be
+ * used for this job; <code>false</code> otherwise.
+ */
+ public boolean getLoadNativeLibraries(Configuration conf) {
+ return conf.getBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY,
+ CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_DEFAULT);
+ }
+
+ /**
+ * Set if native hadoop libraries, if present, can be used for this job.
+ *
+ * @param conf configuration
+ * @param loadNativeLibraries can native hadoop libraries be loaded
+ */
+ public void setLoadNativeLibraries(Configuration conf,
+ boolean loadNativeLibraries) {
+ conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY,
+ loadNativeLibraries);
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/flink/blob/d6d35fa1/flink-filesystems/flink-s3-fs-presto/README.md
----------------------------------------------------------------------
diff --git a/flink-filesystems/flink-s3-fs-presto/README.md b/flink-filesystems/flink-s3-fs-presto/README.md
index bf3c1fe..cae39c9 100644
--- a/flink-filesystems/flink-s3-fs-presto/README.md
+++ b/flink-filesystems/flink-s3-fs-presto/README.md
@@ -14,12 +14,21 @@ relocated class names of classes loaded via reflection
If you want to change the Hadoop version this project depends on, the following
steps are required to keep the shading correct:
-1. copy `org/apache/hadoop/conf/Configuration.java` from the respective Hadoop jar file (from `com.facebook.presto.hadoop/hadoop-apache2`) to this project
- - adapt the `Configuration` class by replacing `core-default.xml` with `core-default-shaded.xml`.
-2. copy `core-default.xml` from the respective Hadoop jar (from `com.facebook.presto.hadoop/hadoop-apache2`) file to this project as
- - `src/main/resources/core-default-shaded.xml` (replacing every occurence of `org.apache.hadoop` with `org.apache.flink.fs.s3presto.shaded.org.apache.hadoop`)
- - `src/test/resources/core-site.xml` (as is)
-3. verify the shaded jar:
+1. from the respective Hadoop jar (from the `com.facebook.presto.hadoop/hadoop-apache2` resource, currently version 2.7.3-1 as of our `pom.xml`),
+ - copy `org/apache/hadoop/conf/Configuration.java` to `src/main/java/org/apache/hadoop/conf/` and
+ - replace `core-default.xml` with `core-default-shaded.xml`.
+ - copy `org/apache/hadoop/util/NativeCodeLoader.java` to `src/main/java/org/apache/hadoop/util/` and
+ - replace the static initializer with
+ ```
+ static {
+ LOG.info("Skipping native-hadoop library for flink-s3-fs-presto's relocated Hadoop... " +
+ "using builtin-java classes where applicable");
+ }
+```
+ - copy `core-default.xml` to `src/main/resources/core-default-shaded.xml` and
+ - change every occurence of `org.apache.hadoop` into `org.apache.flink.fs.s3presto.shaded.org.apache.hadoop`
+ - copy `core-site.xml` to `src/test/resources/core-site.xml` (as is)
+2. verify the shaded jar:
- does not contain any unshaded classes except for `org.apache.flink.fs.s3presto.S3FileSystemFactory`
- all other classes should be under `org.apache.flink.fs.s3presto.shaded`
- there should be a `META-INF/services/org.apache.flink.fs.s3presto.S3FileSystemFactory` file pointing to the `org.apache.flink.fs.s3presto.S3FileSystemFactory` class
http://git-wip-us.apache.org/repos/asf/flink/blob/d6d35fa1/flink-filesystems/flink-s3-fs-presto/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java
----------------------------------------------------------------------
diff --git a/flink-filesystems/flink-s3-fs-presto/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java b/flink-filesystems/flink-s3-fs-presto/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java
new file mode 100644
index 0000000..cbcb756
--- /dev/null
+++ b/flink-filesystems/flink-s3-fs-presto/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java
@@ -0,0 +1,94 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.util;
+
+import com.facebook.presto.hadoop.$internal.org.apache.commons.logging.Log;
+import com.facebook.presto.hadoop.$internal.org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeys;
+
+/**
+ * A helper to load the native hadoop code i.e. libhadoop.so.
+ * This handles the fallback to either the bundled libhadoop-Linux-i386-32.so
+ * or the default java implementations where appropriate.
+ *
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Unstable
+public class NativeCodeLoader {
+
+ private static final Log LOG =
+ LogFactory.getLog(NativeCodeLoader.class);
+
+ private static boolean nativeCodeLoaded = false;
+
+ static {
+ LOG.info("Skipping native-hadoop library for flink-s3-fs-presto's relocated Hadoop... " +
+ "using builtin-java classes where applicable");
+ }
+
+ /**
+ * Check if native-hadoop code is loaded for this platform.
+ *
+ * @return <code>true</code> if native-hadoop is loaded,
+ * else <code>false</code>
+ */
+ public static boolean isNativeCodeLoaded() {
+ return nativeCodeLoaded;
+ }
+
+ /**
+ * Returns true only if this build was compiled with support for snappy.
+ */
+ public static native boolean buildSupportsSnappy();
+
+ /**
+ * Returns true only if this build was compiled with support for openssl.
+ */
+ public static native boolean buildSupportsOpenssl();
+
+ public static native String getLibraryName();
+
+ /**
+ * Return if native hadoop libraries, if present, can be used for this job.
+ * @param conf configuration
+ *
+ * @return <code>true</code> if native hadoop libraries, if present, can be
+ * used for this job; <code>false</code> otherwise.
+ */
+ public boolean getLoadNativeLibraries(Configuration conf) {
+ return conf.getBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY,
+ CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_DEFAULT);
+ }
+
+ /**
+ * Set if native hadoop libraries, if present, can be used for this job.
+ *
+ * @param conf configuration
+ * @param loadNativeLibraries can native hadoop libraries be loaded
+ */
+ public void setLoadNativeLibraries(Configuration conf,
+ boolean loadNativeLibraries) {
+ conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY,
+ loadNativeLibraries);
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/flink/blob/d6d35fa1/tools/maven/suppressions.xml
----------------------------------------------------------------------
diff --git a/tools/maven/suppressions.xml b/tools/maven/suppressions.xml
index 90a1eea..a2de9e8 100644
--- a/tools/maven/suppressions.xml
+++ b/tools/maven/suppressions.xml
@@ -35,8 +35,11 @@ under the License.
<suppress
files="FlinkKinesisProducer.java|FlinkKinesisProducerTest.java"
checks="IllegalImport"/>
- <!-- Configuration class copied from Hadoop -->
+ <!-- Classes copied from Hadoop -->
<suppress
files="org[\\/]apache[\\/]hadoop[\\/]conf[\\/]Configuration.java"
checks=".*"/>
+ <suppress
+ files="org[\\/]apache[\\/]hadoop[\\/]util[\\/]NativeCodeLoader.java"
+ checks=".*"/>
</suppressions>