You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by cu...@apache.org on 2014/08/21 23:56:02 UTC
svn commit: r1619608 - in
/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common: ./
dev-support/ src/ src/main/bin/ src/main/conf/ src/main/docs/
src/main/java/ src/main/java/org/apache/hadoop/crypto/
src/main/java/org/apache/hadoop/cry...
Author: curino
Date: Thu Aug 21 21:55:57 2014
New Revision: 1619608
URL: http://svn.apache.org/r1619608
Log:
Merge with trunk to pick up YARN-2436
Added:
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/AesCtrCryptoCodec.java
- copied unchanged from r1619607, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/AesCtrCryptoCodec.java
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CipherSuite.java
- copied unchanged from r1619607, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CipherSuite.java
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoCodec.java
- copied unchanged from r1619607, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoCodec.java
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoInputStream.java
- copied unchanged from r1619607, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoInputStream.java
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoOutputStream.java
- copied unchanged from r1619607, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoOutputStream.java
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoStreamUtils.java
- copied unchanged from r1619607, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoStreamUtils.java
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/Decryptor.java
- copied unchanged from r1619607, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/Decryptor.java
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/Encryptor.java
- copied unchanged from r1619607, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/Encryptor.java
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/JceAesCtrCryptoCodec.java
- copied unchanged from r1619607, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/JceAesCtrCryptoCodec.java
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslAesCtrCryptoCodec.java
- copied unchanged from r1619607, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslAesCtrCryptoCodec.java
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java
- copied unchanged from r1619607, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/
- copied from r1619607, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OpensslSecureRandom.java
- copied unchanged from r1619607, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OpensslSecureRandom.java
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OsSecureRandom.java
- copied unchanged from r1619607, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OsSecureRandom.java
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileEncryptionInfo.java
- copied unchanged from r1619607, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileEncryptionInfo.java
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/crypto/
- copied from r1619607, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/crypto/
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/crypto/CryptoFSDataInputStream.java
- copied unchanged from r1619607, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/crypto/CryptoFSDataInputStream.java
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/crypto/CryptoFSDataOutputStream.java
- copied unchanged from r1619607, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/crypto/CryptoFSDataOutputStream.java
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ApplicationClassLoader.java
- copied unchanged from r1619607, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ApplicationClassLoader.java
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/crypto/
- copied from r1619607, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/crypto/
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/crypto/OpensslCipher.c
- copied unchanged from r1619607, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/crypto/OpensslCipher.c
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/crypto/org_apache_hadoop_crypto.h
- copied unchanged from r1619607, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/crypto/org_apache_hadoop_crypto.h
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/crypto/random/
- copied from r1619607, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/crypto/random/
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/crypto/random/OpensslSecureRandom.c
- copied unchanged from r1619607, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/crypto/random/OpensslSecureRandom.c
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/crypto/random/org_apache_hadoop_crypto_random.h
- copied unchanged from r1619607, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/crypto/random/org_apache_hadoop_crypto_random.h
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/CryptoStreamsTestBase.java
- copied unchanged from r1619607, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/CryptoStreamsTestBase.java
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/TestCryptoCodec.java
- copied unchanged from r1619607, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/TestCryptoCodec.java
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/TestCryptoStreams.java
- copied unchanged from r1619607, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/TestCryptoStreams.java
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/TestCryptoStreamsForLocalFS.java
- copied unchanged from r1619607, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/TestCryptoStreamsForLocalFS.java
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/TestCryptoStreamsNormal.java
- copied unchanged from r1619607, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/TestCryptoStreamsNormal.java
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/TestCryptoStreamsWithOpensslAesCtrCryptoCodec.java
- copied unchanged from r1619607, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/TestCryptoStreamsWithOpensslAesCtrCryptoCodec.java
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/TestOpensslCipher.java
- copied unchanged from r1619607, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/TestOpensslCipher.java
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/random/
- copied from r1619607, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/random/
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/random/TestOpensslSecureRandom.java
- copied unchanged from r1619607, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/random/TestOpensslSecureRandom.java
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/random/TestOsSecureRandom.java
- copied unchanged from r1619607, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/random/TestOsSecureRandom.java
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/ClassLoaderCheck.java
- copied unchanged from r1619607, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/ClassLoaderCheck.java
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/ClassLoaderCheckMain.java
- copied unchanged from r1619607, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/ClassLoaderCheckMain.java
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/ClassLoaderCheckSecond.java
- copied unchanged from r1619607, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/ClassLoaderCheckSecond.java
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/ClassLoaderCheckThird.java
- copied unchanged from r1619607, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/ClassLoaderCheckThird.java
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestApplicationClassLoader.java
- copied unchanged from r1619607, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestApplicationClassLoader.java
Modified:
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/CHANGES.txt (contents, props changed)
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/pom.xml
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/CMakeLists.txt
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/config.h.cmake
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.cmd
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/bin/hadoop.cmd
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/docs/ (props changed)
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/ (props changed)
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataOutputStream.java
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeLibraryChecker.java
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCodeLoader.c
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/site/apt/FileSystemShell.apt.vm
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/core/ (props changed)
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestNativeCodeLoader.java
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestRunJar.java
hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml
Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1619608&r1=1619607&r2=1619608&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/CHANGES.txt (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/CHANGES.txt Thu Aug 21 21:55:57 2014
@@ -13,8 +13,6 @@ Trunk (Unreleased)
NEW FEATURES
- HADOOP-10433. Key Management Server based on KeyProvider API. (tucu)
-
HADOOP-9629. Support Windows Azure Storage - Blob as a file system in Hadoop.
(Dexter Bradshaw, Mostafa Elhemali, Xi Fang, Johannes Klein, David Lao,
Mike Liddell, Chuan Liu, Lengning Liu, Ivan Mitic, Michael Rys,
@@ -25,9 +23,6 @@ Trunk (Unreleased)
Mike Liddell, Chuan Liu, Lengning Liu, Ivan Mitic, Michael Rys,
Alexander Stojanovich, Brian Swan, and Min Wei via cnauroth)
- HADOOP-10719. Add generateEncryptedKey and decryptEncryptedKey
- methods to KeyProvider. (asuresh via tucu)
-
IMPROVEMENTS
HADOOP-8017. Configure hadoop-main pom to get rid of M2E plugin execution
@@ -121,93 +116,15 @@ Trunk (Unreleased)
HADOOP-9833 move slf4j to version 1.7.5 (Kousuke Saruta via stevel)
- HADOOP-10141. Create KeyProvider API to separate encryption key storage
- from the applications. (omalley)
-
- HADOOP-10201. Add listing to KeyProvider API. (Larry McCay via omalley)
-
- HADOOP-10177. Create CLI tools for managing keys. (Larry McCay via omalley)
-
- HADOOP-10244. TestKeyShell improperly tests the results of delete (Larry
- McCay via omalley)
-
HADOOP-10325. Improve jenkins javadoc warnings from test-patch.sh (cmccabe)
HADOOP-10342. Add a new method to UGI to use a Kerberos login subject to
build a new UGI. (Larry McCay via omalley)
- HADOOP-10237. JavaKeyStoreProvider needs to set keystore permissions
- correctly. (Larry McCay via omalley)
-
- HADOOP-10432. Refactor SSLFactory to expose static method to determine
- HostnameVerifier. (tucu)
-
- HADOOP-10427. KeyProvider implementations should be thread safe. (tucu)
-
- HADOOP-10429. KeyStores should have methods to generate the materials
- themselves, KeyShell should use them. (tucu)
-
- HADOOP-10428. JavaKeyStoreProvider should accept keystore password via
- configuration falling back to ENV VAR. (tucu)
-
- HADOOP-10430. KeyProvider Metadata should have an optional description,
- there should be a method to retrieve the metadata from all keys. (tucu)
-
- HADOOP-10534. KeyProvider getKeysMetadata should take a list of names
- rather than returning all keys. (omalley)
-
HADOOP-10563. Remove the dependency of jsp in trunk. (wheat9)
HADOOP-10485. Remove dead classes in hadoop-streaming. (wheat9)
- HADOOP-10696. Add optional attributes to KeyProvider Options and Metadata.
- (tucu)
-
- HADOOP-10695. KMSClientProvider should respect a configurable timeout.
- (yoderme via tucu)
-
- HADOOP-10757. KeyProvider KeyVersion should provide the key name.
- (asuresh via tucu)
-
- HADOOP-10769. Create KeyProvider extension to handle delegation tokens.
- (Arun Suresh via atm)
-
- HADOOP-10812. Delegate KeyProviderExtension#toString to underlying
- KeyProvider. (wang)
-
- HADOOP-10736. Add key attributes to the key shell. (Mike Yoder via wang)
-
- HADOOP-10824. Refactor KMSACLs to avoid locking. (Benoy Antony via umamahesh)
-
- HADOOP-10841. EncryptedKeyVersion should have a key name property.
- (asuresh via tucu)
-
- HADOOP-10842. CryptoExtension generateEncryptedKey method should
- receive the key name. (asuresh via tucu)
-
- HADOOP-10750. KMSKeyProviderCache should be in hadoop-common.
- (asuresh via tucu)
-
- HADOOP-10720. KMS: Implement generateEncryptedKey and decryptEncryptedKey
- in the REST API. (asuresh via tucu)
-
- HADOOP-10891. Add EncryptedKeyVersion factory method to
- KeyProviderCryptoExtension. (wang)
-
- HADOOP-10756. KMS audit log should consolidate successful similar requests.
- (asuresh via tucu)
-
- HADOOP-10793. KeyShell args should use single-dash style. (wang)
-
- HADOOP-10936. Change default KeyProvider bitlength to 128. (wang)
-
- HADOOP-10224. JavaKeyStoreProvider has to protect against corrupting
- underlying store. (asuresh via tucu)
-
- HADOOP-10770. KMS add delegation token support. (tucu)
-
- HADOOP-10698. KMS, add proxyuser support. (tucu)
-
BUG FIXES
HADOOP-9451. Fault single-layer config if node group topology is enabled.
@@ -379,22 +296,9 @@ Trunk (Unreleased)
HADOOP-10044 Improve the javadoc of rpc code (sanjay Radia)
- HADOOP-10488. TestKeyProviderFactory fails randomly. (tucu)
-
- HADOOP-10431. Change visibility of KeyStore.Options getter methods to public. (tucu)
-
- HADOOP-10583. bin/hadoop key throws NPE with no args and assorted other fixups. (clamb via tucu)
-
- HADOOP-10586. KeyShell doesn't allow setting Options via CLI. (clamb via tucu)
-
HADOOP-10625. Trim configuration names when putting/getting them
to properties. (Wangda Tan via xgong)
- HADOOP-10645. TestKMS fails because race condition writing acl files. (tucu)
-
- HADOOP-10611. KMS, keyVersion name should not be assumed to be
- keyName@versionNumber. (tucu)
-
HADOOP-10717. HttpServer2 should load jsp DTD from local jars instead of
going remote. (Dapeng Sun via wheat9)
@@ -409,38 +313,67 @@ Trunk (Unreleased)
HADOOP-10834. Typo in CredentialShell usage. (Benoy Antony via umamahesh)
- HADOOP-10816. KeyShell returns -1 on error to the shell, should be 1.
- (Mike Yoder via wang)
-
HADOOP-10840. Fix OutOfMemoryError caused by metrics system in Azure File
System. (Shanyu Zhao via cnauroth)
- HADOOP-10826. Iteration on KeyProviderFactory.serviceLoader is
- thread-unsafe. (benoyantony viat tucu)
+ HADOOP-10925. Compilation fails in native link0 function on Windows.
+ (cnauroth)
- HADOOP-10881. Clarify usage of encryption and encrypted encryption
- key in KeyProviderCryptoExtension. (wang)
+ OPTIMIZATIONS
- HADOOP-10920. site plugin couldn't parse hadoop-kms index.apt.vm.
- (Akira Ajisaka via wang)
+ HADOOP-7761. Improve the performance of raw comparisons. (todd)
- HADOOP-10925. Compilation fails in native link0 function on Windows.
- (cnauroth)
+ HADOOP-8589. ViewFs tests fail when tests and home dirs are nested (sanjay Radia)
- HADOOP-10939. Fix TestKeyProviderFactory testcases to use default 128 bit
- length keys. (Arun Suresh via wang)
+ BREAKDOWN OF HDFS-6134 AND HADOOP-10150 SUBTASKS AND RELATED JIRAS
- HADOOP-10862. Miscellaneous trivial corrections to KMS classes.
- (asuresh via tucu)
+ HADOOP-10734. Implement high-performance secure random number sources.
+ (Yi Liu via Colin Patrick McCabe)
- HADOOP-10967. Improve DefaultCryptoExtension#generateEncryptedKey
- performance. (hitliuyi via tucu)
+ HADOOP-10603. Crypto input and output streams implementing Hadoop stream
+ interfaces. (Yi Liu and Charles Lamb)
- OPTIMIZATIONS
+ HADOOP-10628. Javadoc and few code style improvement for Crypto
+ input and output streams. (Yi Liu via clamb)
- HADOOP-7761. Improve the performance of raw comparisons. (todd)
+ HADOOP-10632. Minor improvements to Crypto input and output streams.
+ (Yi Liu)
- HADOOP-8589. ViewFs tests fail when tests and home dirs are nested (sanjay Radia)
+ HADOOP-10635. Add a method to CryptoCodec to generate SRNs for IV. (Yi Liu)
+
+ HADOOP-10653. Add a new constructor for CryptoInputStream that
+ receives current position of wrapped stream. (Yi Liu)
+
+ HADOOP-10662. NullPointerException in CryptoInputStream while wrapped
+ stream is not ByteBufferReadable. Add tests using normal stream. (Yi Liu)
+
+ HADOOP-10713. Refactor CryptoCodec#generateSecureRandom to take a byte[].
+ (wang via yliu)
+
+ HADOOP-10693. Implementation of AES-CTR CryptoCodec using JNI to OpenSSL.
+ (Yi Liu via cmccabe)
+
+ HADOOP-10803. Update OpensslCipher#getInstance to accept CipherSuite#name
+ format. (Yi Liu)
+
+ HADOOP-10735. Fall back AesCtrCryptoCodec implementation from OpenSSL to
+ JCE if non native support. (Yi Liu)
+
+ HADOOP-10870. Failed to load OpenSSL cipher error logs on systems with old
+ openssl versions (cmccabe)
+
+ HADOOP-10853. Refactor get instance of CryptoCodec and support create via
+ algorithm/mode/padding. (Yi Liu)
+
+ HADOOP-10919. Copy command should preserve raw.* namespace
+ extended attributes. (clamb)
+
+ HDFS-6873. Constants in CommandWithDestination should be static. (clamb)
+
+ HADOOP-10871. incorrect prototype in OpensslSecureRandom.c (cmccabe)
+
+ HADOOP-10886. CryptoCodec#getCodecclasses throws NPE when configurations not
+ loaded. (umamahesh)
Release 2.6.0 - UNRELEASED
@@ -448,6 +381,11 @@ Release 2.6.0 - UNRELEASED
NEW FEATURES
+ HADOOP-10433. Key Management Server based on KeyProvider API. (tucu)
+
+ HADOOP-10893. isolated classloader on the client side (Sangjin Lee via
+ jlowe)
+
IMPROVEMENTS
HADOOP-10808. Remove unused native code for munlock. (cnauroth)
@@ -532,10 +470,91 @@ Release 2.6.0 - UNRELEASED
HADOOP-10975. org.apache.hadoop.util.DataChecksum should support calculating
checksums in native code (James Thomas via Colin Patrick McCabe)
+ HADOOP-10201. Add listing to KeyProvider API. (Larry McCay via omalley)
+
+ HADOOP-10177. Create CLI tools for managing keys. (Larry McCay via omalley)
+
+ HADOOP-10432. Refactor SSLFactory to expose static method to determine
+ HostnameVerifier. (tucu)
+
+ HADOOP-10429. KeyStores should have methods to generate the materials
+ themselves, KeyShell should use them. (tucu)
+
+ HADOOP-10427. KeyProvider implementations should be thread safe. (tucu)
+
+ HADOOP-10428. JavaKeyStoreProvider should accept keystore password via
+ configuration falling back to ENV VAR. (tucu)
+
+ HADOOP-10430. KeyProvider Metadata should have an optional description,
+ there should be a method to retrieve the metadata from all keys. (tucu)
+
+ HADOOP-10431. Change visibility of KeyStore.Options getter methods to
+ public. (tucu)
+
+ HADOOP-10534. KeyProvider getKeysMetadata should take a list of names
+ rather than returning all keys. (omalley)
+
+ HADOOP-10719. Add generateEncryptedKey and decryptEncryptedKey
+ methods to KeyProvider. (asuresh via tucu)
+
+ HADOOP-10817. ProxyUsers configuration should support configurable
+ prefixes. (tucu)
+
+ HADOOP-10881. Clarify usage of encryption and encrypted encryption
+ key in KeyProviderCryptoExtension. (wang)
+
+ HADOOP-10770. KMS add delegation token support. (tucu)
+
+ HADOOP-10698. KMS, add proxyuser support. (tucu)
+
OPTIMIZATIONS
HADOOP-10838. Byte array native checksumming. (James Thomas via todd)
+ HADOOP-10696. Add optional attributes to KeyProvider Options and Metadata.
+ (tucu)
+
+ HADOOP-10695. KMSClientProvider should respect a configurable timeout.
+ (yoderme via tucu)
+
+ HADOOP-10757. KeyProvider KeyVersion should provide the key name.
+ (asuresh via tucu)
+
+ HADOOP-10769. Create KeyProvider extension to handle delegation tokens.
+ (Arun Suresh via atm)
+
+ HADOOP-10812. Delegate KeyProviderExtension#toString to underlying
+ KeyProvider. (wang)
+
+ HADOOP-10736. Add key attributes to the key shell. (Mike Yoder via wang)
+
+ HADOOP-10824. Refactor KMSACLs to avoid locking. (Benoy Antony via umamahesh)
+
+ HADOOP-10841. EncryptedKeyVersion should have a key name property.
+ (asuresh via tucu)
+
+ HADOOP-10842. CryptoExtension generateEncryptedKey method should
+ receive the key name. (asuresh via tucu)
+
+ HADOOP-10750. KMSKeyProviderCache should be in hadoop-common.
+ (asuresh via tucu)
+
+ HADOOP-10720. KMS: Implement generateEncryptedKey and decryptEncryptedKey
+ in the REST API. (asuresh via tucu)
+
+ HADOOP-10891. Add EncryptedKeyVersion factory method to
+ KeyProviderCryptoExtension. (wang)
+
+ HADOOP-10756. KMS audit log should consolidate successful similar requests.
+ (asuresh via tucu)
+
+ HADOOP-10793. KeyShell args should use single-dash style. (wang)
+
+ HADOOP-10936. Change default KeyProvider bitlength to 128. (wang)
+
+ HADOOP-10224. JavaKeyStoreProvider has to protect against corrupting
+ underlying store. (asuresh via tucu)
+
BUG FIXES
HADOOP-10781. Unportable getgrouplist() usage breaks FreeBSD (Dmitry
@@ -571,11 +590,6 @@ Release 2.6.0 - UNRELEASED
HADOOP-10927. Fix CredentialShell help behavior and error codes.
(Josh Elser via wang)
- HADOOP-10937. Need to set version name correctly before decrypting EEK.
- (Arun Suresh via wang)
-
- HADOOP-10918. JMXJsonServlet fails when used within Tomcat. (tucu)
-
HADOOP-10933. FileBasedKeyStoresFactory Should use Configuration.getPassword
for SSL Passwords. (lmccay via tucu)
@@ -626,7 +640,50 @@ Release 2.6.0 - UNRELEASED
HADOOP-10968. hadoop native build fails to detect java_libarch on
ppc64le (Dinar Valeev via Colin Patrick McCabe)
-Release 2.5.0 - UNRELEASED
+ HADOOP-10141. Create KeyProvider API to separate encryption key storage
+ from the applications. (omalley)
+
+ HADOOP-10237. JavaKeyStoreProvider needs to set keystore permissions
+ correctly. (Larry McCay via omalley)
+
+ HADOOP-10244. TestKeyShell improperly tests the results of delete (Larry
+ McCay via omalley)
+
+ HADOOP-10583. bin/hadoop key throws NPE with no args and assorted other fixups. (clamb via tucu)
+
+ HADOOP-10586. KeyShell doesn't allow setting Options via CLI. (clamb via tucu)
+
+ HADOOP-10645. TestKMS fails because race condition writing acl files. (tucu)
+
+ HADOOP-10611. KMS, keyVersion name should not be assumed to be
+ keyName@versionNumber. (tucu)
+
+ HADOOP-10816. KeyShell returns -1 on error to the shell, should be 1.
+ (Mike Yoder via wang)
+
+ HADOOP-10826. Iteration on KeyProviderFactory.serviceLoader is
+ thread-unsafe. (benoyantony viat tucu)
+
+ HADOOP-10920. site plugin couldn't parse hadoop-kms index.apt.vm.
+ (Akira Ajisaka via wang)
+
+ HADOOP-10937. Need to set version name correctly before decrypting EEK.
+ (Arun Suresh via wang)
+
+ HADOOP-10918. JMXJsonServlet fails when used within Tomcat. (tucu)
+
+ HADOOP-10939. Fix TestKeyProviderFactory testcases to use default 128 bit
+ length keys. (Arun Suresh via wang)
+
+ HADOOP-10862. Miscellaneous trivial corrections to KMS classes.
+ (asuresh via tucu)
+
+ HADOOP-10967. Improve DefaultCryptoExtension#generateEncryptedKey
+ performance. (hitliuyi via tucu)
+
+ HADOOP-10488. TestKeyProviderFactory fails randomly. (tucu)
+
+Release 2.5.0 - 2014-08-11
INCOMPATIBLE CHANGES
Propchange: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/CHANGES.txt
------------------------------------------------------------------------------
Merged /hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/CHANGES.txt:r1594376-1619194
Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:r1619018-1619607
Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml?rev=1619608&r1=1619607&r2=1619608&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml Thu Aug 21 21:55:57 2014
@@ -108,6 +108,11 @@
<Method name="driver" />
<Bug pattern="DM_EXIT" />
</Match>
+ <Match>
+ <Class name="org.apache.hadoop.util.RunJar" />
+ <Method name="run" />
+ <Bug pattern="DM_EXIT" />
+ </Match>
<!--
We need to cast objects between old and new api objects
-->
Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/pom.xml?rev=1619608&r1=1619607&r2=1619608&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/pom.xml (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/pom.xml Thu Aug 21 21:55:57 2014
@@ -499,6 +499,10 @@
<snappy.lib></snappy.lib>
<snappy.include></snappy.include>
<require.snappy>false</require.snappy>
+ <openssl.prefix></openssl.prefix>
+ <openssl.lib></openssl.lib>
+ <openssl.include></openssl.include>
+ <require.openssl>false</require.openssl>
</properties>
<build>
<plugins>
@@ -548,6 +552,8 @@
<javahClassName>org.apache.hadoop.io.compress.snappy.SnappyDecompressor</javahClassName>
<javahClassName>org.apache.hadoop.io.compress.lz4.Lz4Compressor</javahClassName>
<javahClassName>org.apache.hadoop.io.compress.lz4.Lz4Decompressor</javahClassName>
+ <javahClassName>org.apache.hadoop.crypto.OpensslCipher</javahClassName>
+ <javahClassName>org.apache.hadoop.crypto.random.OpensslSecureRandom</javahClassName>
<javahClassName>org.apache.hadoop.util.NativeCrc32</javahClassName>
<javahClassName>org.apache.hadoop.net.unix.DomainSocket</javahClassName>
<javahClassName>org.apache.hadoop.net.unix.DomainSocketWatcher</javahClassName>
@@ -568,7 +574,7 @@
<configuration>
<target>
<exec executable="cmake" dir="${project.build.directory}/native" failonerror="true">
- <arg line="${basedir}/src/ -DGENERATED_JAVAH=${project.build.directory}/native/javah -DJVM_ARCH_DATA_MODEL=${sun.arch.data.model} -DREQUIRE_BZIP2=${require.bzip2} -DREQUIRE_SNAPPY=${require.snappy} -DCUSTOM_SNAPPY_PREFIX=${snappy.prefix} -DCUSTOM_SNAPPY_LIB=${snappy.lib} -DCUSTOM_SNAPPY_INCLUDE=${snappy.include}"/>
+ <arg line="${basedir}/src/ -DGENERATED_JAVAH=${project.build.directory}/native/javah -DJVM_ARCH_DATA_MODEL=${sun.arch.data.model} -DREQUIRE_BZIP2=${require.bzip2} -DREQUIRE_SNAPPY=${require.snappy} -DCUSTOM_SNAPPY_PREFIX=${snappy.prefix} -DCUSTOM_SNAPPY_LIB=${snappy.lib} -DCUSTOM_SNAPPY_INCLUDE=${snappy.include} -DREQUIRE_OPENSSL=${require.openssl} -DCUSTOM_OPENSSL_PREFIX=${openssl.prefix} -DCUSTOM_OPENSSL_LIB=${openssl.lib} -DCUSTOM_OPENSSL_INCLUDE=${openssl.include}"/>
</exec>
<exec executable="make" dir="${project.build.directory}/native" failonerror="true">
<arg line="VERBOSE=1"/>
@@ -612,6 +618,11 @@
<snappy.include></snappy.include>
<require.snappy>false</require.snappy>
<bundle.snappy.in.bin>true</bundle.snappy.in.bin>
+ <openssl.prefix></openssl.prefix>
+ <openssl.lib></openssl.lib>
+ <openssl.include></openssl.include>
+ <require.openssl>false</require.openssl>
+ <bundle.openssl.in.bin>true</bundle.openssl.in.bin>
</properties>
<build>
<plugins>
@@ -657,6 +668,8 @@
<javahClassName>org.apache.hadoop.io.compress.snappy.SnappyDecompressor</javahClassName>
<javahClassName>org.apache.hadoop.io.compress.lz4.Lz4Compressor</javahClassName>
<javahClassName>org.apache.hadoop.io.compress.lz4.Lz4Decompressor</javahClassName>
+ <javahClassName>org.apache.hadoop.crypto.OpensslCipher</javahClassName>
+ <javahClassName>org.apache.hadoop.crypto.random.OpensslSecureRandom</javahClassName>
<javahClassName>org.apache.hadoop.util.NativeCrc32</javahClassName>
</javahClassNames>
<javahOutputDirectory>${project.build.directory}/native/javah</javahOutputDirectory>
@@ -701,6 +714,10 @@
<argument>/p:CustomSnappyLib=${snappy.lib}</argument>
<argument>/p:CustomSnappyInclude=${snappy.include}</argument>
<argument>/p:RequireSnappy=${require.snappy}</argument>
+ <argument>/p:CustomOpensslPrefix=${openssl.prefix}</argument>
+ <argument>/p:CustomOpensslLib=${openssl.lib}</argument>
+ <argument>/p:CustomOpensslInclude=${openssl.include}</argument>
+ <argument>/p:RequireOpenssl=${require.openssl}</argument>
</arguments>
</configuration>
</execution>
Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/CMakeLists.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/CMakeLists.txt?rev=1619608&r1=1619607&r2=1619608&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/CMakeLists.txt (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/CMakeLists.txt Thu Aug 21 21:55:57 2014
@@ -145,6 +145,38 @@ else (SNAPPY_LIBRARY AND SNAPPY_INCLUDE_
ENDIF(REQUIRE_SNAPPY)
endif (SNAPPY_LIBRARY AND SNAPPY_INCLUDE_DIR)
+SET(STORED_CMAKE_FIND_LIBRARY_SUFFIXES CMAKE_FIND_LIBRARY_SUFFIXES)
+set_find_shared_library_version("1.0.0")
+SET(OPENSSL_NAME "crypto")
+IF(${CMAKE_SYSTEM_NAME} MATCHES "Windows")
+ SET(OPENSSL_NAME "eay32")
+ENDIF()
+find_library(OPENSSL_LIBRARY
+ NAMES ${OPENSSL_NAME}
+ PATHS ${CUSTOM_OPENSSL_PREFIX} ${CUSTOM_OPENSSL_PREFIX}/lib
+ ${CUSTOM_OPENSSL_PREFIX}/lib64 ${CUSTOM_OPENSSL_LIB} NO_DEFAULT_PATH)
+find_library(OPENSSL_LIBRARY
+ NAMES ${OPENSSL_NAME})
+SET(CMAKE_FIND_LIBRARY_SUFFIXES STORED_CMAKE_FIND_LIBRARY_SUFFIXES)
+find_path(OPENSSL_INCLUDE_DIR
+ NAMES openssl/evp.h
+ PATHS ${CUSTOM_OPENSSL_PREFIX} ${CUSTOM_OPENSSL_PREFIX}/include
+ ${CUSTOM_OPENSSL_INCLUDE} NO_DEFAULT_PATH)
+find_path(OPENSSL_INCLUDE_DIR
+ NAMES openssl/evp.h)
+if (OPENSSL_LIBRARY AND OPENSSL_INCLUDE_DIR)
+ GET_FILENAME_COMPONENT(HADOOP_OPENSSL_LIBRARY ${OPENSSL_LIBRARY} NAME)
+ SET(OPENSSL_SOURCE_FILES
+ "${D}/crypto/OpensslCipher.c"
+ "${D}/crypto/random/OpensslSecureRandom.c")
+else (OPENSSL_LIBRARY AND OPENSSL_INCLUDE_DIR)
+ SET(OPENSSL_INCLUDE_DIR "")
+ SET(OPENSSL_SOURCE_FILES "")
+ IF(REQUIRE_OPENSSL)
+ MESSAGE(FATAL_ERROR "Required openssl library could not be found. OPENSSL_LIBRARY=${OPENSSL_LIBRARY}, OPENSSL_INCLUDE_DIR=${OPENSSL_INCLUDE_DIR}, CUSTOM_OPENSSL_INCLUDE_DIR=${CUSTOM_OPENSSL_INCLUDE_DIR}, CUSTOM_OPENSSL_PREFIX=${CUSTOM_OPENSSL_PREFIX}, CUSTOM_OPENSSL_INCLUDE=${CUSTOM_OPENSSL_INCLUDE}")
+ ENDIF(REQUIRE_OPENSSL)
+endif (OPENSSL_LIBRARY AND OPENSSL_INCLUDE_DIR)
+
include_directories(
${GENERATED_JAVAH}
main/native/src
@@ -155,6 +187,7 @@ include_directories(
${ZLIB_INCLUDE_DIRS}
${BZIP2_INCLUDE_DIR}
${SNAPPY_INCLUDE_DIR}
+ ${OPENSSL_INCLUDE_DIR}
${D}/util
)
CONFIGURE_FILE(${CMAKE_SOURCE_DIR}/config.h.cmake ${CMAKE_BINARY_DIR}/config.h)
@@ -172,6 +205,7 @@ add_dual_library(hadoop
${D}/io/compress/lz4/lz4.c
${D}/io/compress/lz4/lz4hc.c
${SNAPPY_SOURCE_FILES}
+ ${OPENSSL_SOURCE_FILES}
${D}/io/compress/zlib/ZlibCompressor.c
${D}/io/compress/zlib/ZlibDecompressor.c
${BZIP2_SOURCE_FILES}
Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/config.h.cmake
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/config.h.cmake?rev=1619608&r1=1619607&r2=1619608&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/config.h.cmake (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/config.h.cmake Thu Aug 21 21:55:57 2014
@@ -21,6 +21,7 @@
#cmakedefine HADOOP_ZLIB_LIBRARY "@HADOOP_ZLIB_LIBRARY@"
#cmakedefine HADOOP_BZIP2_LIBRARY "@HADOOP_BZIP2_LIBRARY@"
#cmakedefine HADOOP_SNAPPY_LIBRARY "@HADOOP_SNAPPY_LIBRARY@"
+#cmakedefine HADOOP_OPENSSL_LIBRARY "@HADOOP_OPENSSL_LIBRARY@"
#cmakedefine HAVE_SYNC_FILE_RANGE
#cmakedefine HAVE_POSIX_FADVISE
Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.cmd
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.cmd?rev=1619608&r1=1619607&r2=1619608&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.cmd (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.cmd Thu Aug 21 21:55:57 2014
@@ -282,10 +282,12 @@ if not "%HADOOP_MAPRED_HOME%\%MAPRED_DIR
@rem
if defined HADOOP_CLASSPATH (
- if defined HADOOP_USER_CLASSPATH_FIRST (
- set CLASSPATH=%HADOOP_CLASSPATH%;%CLASSPATH%;
- ) else (
- set CLASSPATH=%CLASSPATH%;%HADOOP_CLASSPATH%;
+ if not defined HADOOP_USE_CLIENT_CLASSLOADER (
+ if defined HADOOP_USER_CLASSPATH_FIRST (
+ set CLASSPATH=%HADOOP_CLASSPATH%;%CLASSPATH%;
+ ) else (
+ set CLASSPATH=%CLASSPATH%;%HADOOP_CLASSPATH%;
+ )
)
)
Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh?rev=1619608&r1=1619607&r2=1619608&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh Thu Aug 21 21:55:57 2014
@@ -450,7 +450,8 @@ function hadoop_add_to_classpath_mapred
function hadoop_add_to_classpath_userpath
{
# Add the user-specified HADOOP_CLASSPATH to the
- # official CLASSPATH env var.
+ # official CLASSPATH env var if HADOOP_USE_CLIENT_CLASSLOADER
+ # is not set.
# Add it first or last depending on if user has
# set env-var HADOOP_USER_CLASSPATH_FIRST
# we'll also dedupe it, because we're cool like that.
@@ -469,14 +470,16 @@ function hadoop_add_to_classpath_userpat
done
let j=c-1
- if [[ -z "${HADOOP_USER_CLASSPATH_FIRST}" ]]; then
- for ((i=j; i>=0; i--)); do
- hadoop_add_classpath "${array[$i]}" before
- done
- else
- for ((i=0; i<=j; i++)); do
- hadoop_add_classpath "${array[$i]}" after
- done
+ if [[ -z "${HADOOP_USE_CLIENT_CLASSLOADER}" ]]; then
+ if [[ -z "${HADOOP_USER_CLASSPATH_FIRST}" ]]; then
+ for ((i=j; i>=0; i--)); do
+ hadoop_add_classpath "${array[$i]}" before
+ done
+ else
+ for ((i=0; i<=j; i++)); do
+ hadoop_add_classpath "${array[$i]}" after
+ done
+ fi
fi
fi
}
Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/bin/hadoop.cmd
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/bin/hadoop.cmd?rev=1619608&r1=1619607&r2=1619608&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/bin/hadoop.cmd (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/bin/hadoop.cmd Thu Aug 21 21:55:57 2014
@@ -29,6 +29,26 @@
@rem by doing
@rem export HADOOP_USER_CLASSPATH_FIRST=true
@rem
+@rem HADOOP_USE_CLIENT_CLASSLOADER When defined, HADOOP_CLASSPATH and the
+@rem jar as the hadoop jar argument are
+@rem handled by a separate isolated client
+@rem classloader. If it is set,
+@rem HADOOP_USER_CLASSPATH_FIRST is
+@rem ignored. Can be defined by doing
+@rem export HADOOP_USE_CLIENT_CLASSLOADER=true
+@rem
+@rem HADOOP_CLIENT_CLASSLOADER_SYSTEM_CLASSES
+@rem When defined, it overrides the default
+@rem definition of system classes for the
+@rem client classloader when
+@rem HADOOP_USE_CLIENT_CLASSLOADER is
+@rem enabled. Names ending in '.' (period)
+@rem are treated as package names, and names
+@rem starting with a '-' are treated as
+@rem negative matches. For example,
+@rem export HADOOP_CLIENT_CLASSLOADER_SYSTEM_CLASSES="-org.apache.hadoop.UserClass,java.,javax.,org.apache.hadoop."
+
+@rem
@rem HADOOP_HEAPSIZE The maximum amount of heap to use, in MB.
@rem Default is 1000.
@rem
Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh?rev=1619608&r1=1619607&r2=1619608&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh Thu Aug 21 21:55:57 2014
@@ -111,6 +111,17 @@ esac
# Should HADOOP_USER_CLASSPATH be first in the official CLASSPATH?
# export HADOOP_USER_CLASSPATH_FIRST="yes"
+# If HADOOP_USE_CLIENT_CLASSLOADER is set, HADOOP_CLASSPATH along with the main
+# jar are handled by a separate isolated client classloader. If it is set,
+# HADOOP_USER_CLASSPATH_FIRST is ignored. Can be defined by doing
+# export HADOOP_USE_CLIENT_CLASSLOADER=true
+
+# HADOOP_CLIENT_CLASSLOADER_SYSTEM_CLASSES overrides the default definition of
+# system classes for the client classloader when HADOOP_USE_CLIENT_CLASSLOADER
+# is enabled. Names ending in '.' (period) are treated as package names, and
+# names starting with a '-' are treated as negative matches. For example,
+# export HADOOP_CLIENT_CLASSLOADER_SYSTEM_CLASSES="-org.apache.hadoop.UserClass,java.,javax.,org.apache.hadoop."
+
###
# Options for remote shell connectivity
###
Propchange: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/docs/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/docs:r1619018-1619607
Merged /hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/docs:r1594376-1619194
Propchange: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:r1619018-1619607
Merged /hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java:r1594376-1619194
Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java?rev=1619608&r1=1619607&r2=1619608&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java Thu Aug 21 21:55:57 2014
@@ -283,5 +283,4 @@ public class CommonConfigurationKeys ext
public static final String NFS_EXPORTS_ALLOWED_HOSTS_SEPARATOR = ";";
public static final String NFS_EXPORTS_ALLOWED_HOSTS_KEY = "nfs.exports.allowed.hosts";
public static final String NFS_EXPORTS_ALLOWED_HOSTS_KEY_DEFAULT = "* rw";
-
}
Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java?rev=1619608&r1=1619607&r2=1619608&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java Thu Aug 21 21:55:57 2014
@@ -288,6 +288,21 @@ public class CommonConfigurationKeysPubl
/** Class to override Sasl Properties for a connection */
public static final String HADOOP_SECURITY_SASL_PROPS_RESOLVER_CLASS =
"hadoop.security.saslproperties.resolver.class";
+ public static final String HADOOP_SECURITY_CRYPTO_CODEC_CLASSES_KEY_PREFIX =
+ "hadoop.security.crypto.codec.classes";
+ /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
+ public static final String HADOOP_SECURITY_CRYPTO_CIPHER_SUITE_KEY =
+ "hadoop.security.crypto.cipher.suite";
+ public static final String HADOOP_SECURITY_CRYPTO_CIPHER_SUITE_DEFAULT =
+ "AES/CTR/NoPadding";
+ /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
+ public static final String HADOOP_SECURITY_CRYPTO_JCE_PROVIDER_KEY =
+ "hadoop.security.crypto.jce.provider";
+ /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
+ public static final String HADOOP_SECURITY_CRYPTO_BUFFER_SIZE_KEY =
+ "hadoop.security.crypto.buffer.size";
+ /** Defalt value for HADOOP_SECURITY_CRYPTO_BUFFER_SIZE_KEY */
+ public static final int HADOOP_SECURITY_CRYPTO_BUFFER_SIZE_DEFAULT = 8192;
/** Class to override Impersonation provider */
public static final String HADOOP_SECURITY_IMPERSONATION_PROVIDER_CLASS =
"hadoop.security.impersonation.provider.class";
@@ -318,5 +333,20 @@ public class CommonConfigurationKeysPubl
"hadoop.security.kms.client.encrypted.key.cache.expiry";
/** Default value for KMS_CLIENT_ENC_KEY_CACHE_EXPIRY (12 hrs)*/
public static final int KMS_CLIENT_ENC_KEY_CACHE_EXPIRY_DEFAULT = 43200000;
+
+ /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
+ public static final String HADOOP_SECURITY_JAVA_SECURE_RANDOM_ALGORITHM_KEY =
+ "hadoop.security.java.secure.random.algorithm";
+ /** Defalt value for HADOOP_SECURITY_JAVA_SECURE_RANDOM_ALGORITHM_KEY */
+ public static final String HADOOP_SECURITY_JAVA_SECURE_RANDOM_ALGORITHM_DEFAULT =
+ "SHA1PRNG";
+ /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
+ public static final String HADOOP_SECURITY_SECURE_RANDOM_IMPL_KEY =
+ "hadoop.security.secure.random.impl";
+ /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
+ public static final String HADOOP_SECURITY_SECURE_RANDOM_DEVICE_FILE_PATH_KEY =
+ "hadoop.security.random.device.file.path";
+ public static final String HADOOP_SECURITY_SECURE_RANDOM_DEVICE_FILE_PATH_DEFAULT =
+ "/dev/urandom";
}
Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataOutputStream.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataOutputStream.java?rev=1619608&r1=1619607&r2=1619608&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataOutputStream.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataOutputStream.java Thu Aug 21 21:55:57 2014
@@ -102,7 +102,7 @@ public class FSDataOutputStream extends
}
/**
- * Get a reference to the wrapped output stream. Used by unit tests.
+ * Get a reference to the wrapped output stream.
*
* @return the underlying output stream
*/
Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java?rev=1619608&r1=1619607&r2=1619608&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java Thu Aug 21 21:55:57 2014
@@ -58,6 +58,17 @@ abstract class CommandWithDestination ex
private boolean writeChecksum = true;
/**
+ * The name of the raw xattr namespace. It would be nice to use
+ * XAttr.RAW.name() but we can't reference the hadoop-hdfs project.
+ */
+ private static final String RAW = "raw.";
+
+ /**
+ * The name of the reserved raw directory.
+ */
+ private static final String RESERVED_RAW = "/.reserved/raw";
+
+ /**
*
* This method is used to enable the force(-f) option while copying the files.
*
@@ -231,7 +242,7 @@ abstract class CommandWithDestination ex
/**
* Called with a source and target destination pair
* @param src for the operation
- * @param target for the operation
+ * @param dst for the operation
* @throws IOException if anything goes wrong
*/
protected void processPath(PathData src, PathData dst) throws IOException {
@@ -253,6 +264,8 @@ abstract class CommandWithDestination ex
// modify dst as we descend to append the basename of the
// current directory being processed
dst = getTargetPath(src);
+ final boolean preserveRawXattrs =
+ checkPathsForReservedRaw(src.path, dst.path);
if (dst.exists) {
if (!dst.stat.isDirectory()) {
throw new PathIsNotDirectoryException(dst.toString());
@@ -268,7 +281,7 @@ abstract class CommandWithDestination ex
}
super.recursePath(src);
if (dst.stat.isDirectory()) {
- preserveAttributes(src, dst);
+ preserveAttributes(src, dst, preserveRawXattrs);
}
} finally {
dst = savedDst;
@@ -295,19 +308,61 @@ abstract class CommandWithDestination ex
* @param target where to copy the item
* @throws IOException if copy fails
*/
- protected void copyFileToTarget(PathData src, PathData target) throws IOException {
+ protected void copyFileToTarget(PathData src, PathData target)
+ throws IOException {
+ final boolean preserveRawXattrs =
+ checkPathsForReservedRaw(src.path, target.path);
src.fs.setVerifyChecksum(verifyChecksum);
InputStream in = null;
try {
in = src.fs.open(src.path);
copyStreamToTarget(in, target);
- preserveAttributes(src, target);
+ preserveAttributes(src, target, preserveRawXattrs);
} finally {
IOUtils.closeStream(in);
}
}
/**
+ * Check the source and target paths to ensure that they are either both in
+ * /.reserved/raw or neither in /.reserved/raw. If neither src nor target are
+ * in /.reserved/raw, then return false, indicating not to preserve raw.*
+ * xattrs. If both src/target are in /.reserved/raw, then return true,
+ * indicating raw.* xattrs should be preserved. If only one of src/target is
+ * in /.reserved/raw then throw an exception.
+ *
+ * @param src The source path to check. This should be a fully-qualified
+ * path, not relative.
+ * @param target The target path to check. This should be a fully-qualified
+ * path, not relative.
+ * @return true if raw.* xattrs should be preserved.
+ * @throws PathOperationException is only one of src/target are in
+ * /.reserved/raw.
+ */
+ private boolean checkPathsForReservedRaw(Path src, Path target)
+ throws PathOperationException {
+ final boolean srcIsRR = Path.getPathWithoutSchemeAndAuthority(src).
+ toString().startsWith(RESERVED_RAW);
+ final boolean dstIsRR = Path.getPathWithoutSchemeAndAuthority(target).
+ toString().startsWith(RESERVED_RAW);
+ boolean preserveRawXattrs = false;
+ if (srcIsRR && !dstIsRR) {
+ final String s = "' copy from '" + RESERVED_RAW + "' to non '" +
+ RESERVED_RAW + "'. Either both source and target must be in '" +
+ RESERVED_RAW + "' or neither.";
+ throw new PathOperationException("'" + src.toString() + s);
+ } else if (!srcIsRR && dstIsRR) {
+ final String s = "' copy from non '" + RESERVED_RAW +"' to '" +
+ RESERVED_RAW + "'. Either both source and target must be in '" +
+ RESERVED_RAW + "' or neither.";
+ throw new PathOperationException("'" + dst.toString() + s);
+ } else if (srcIsRR && dstIsRR) {
+ preserveRawXattrs = true;
+ }
+ return preserveRawXattrs;
+ }
+
+ /**
* Copies the stream contents to a temporary file. If the copy is
* successful, the temporary file will be renamed to the real path,
* else the temporary file will be deleted.
@@ -337,9 +392,11 @@ abstract class CommandWithDestination ex
* attribute to preserve.
* @param src source to preserve
* @param target where to preserve attributes
+ * @param preserveRawXAttrs true if raw.* xattrs should be preserved
* @throws IOException if fails to preserve attributes
*/
- protected void preserveAttributes(PathData src, PathData target)
+ protected void preserveAttributes(PathData src, PathData target,
+ boolean preserveRawXAttrs)
throws IOException {
if (shouldPreserve(FileAttribute.TIMESTAMPS)) {
target.fs.setTimes(
@@ -369,13 +426,17 @@ abstract class CommandWithDestination ex
target.fs.setAcl(target.path, srcFullEntries);
}
}
- if (shouldPreserve(FileAttribute.XATTR)) {
+ final boolean preserveXAttrs = shouldPreserve(FileAttribute.XATTR);
+ if (preserveXAttrs || preserveRawXAttrs) {
Map<String, byte[]> srcXAttrs = src.fs.getXAttrs(src.path);
if (srcXAttrs != null) {
Iterator<Entry<String, byte[]>> iter = srcXAttrs.entrySet().iterator();
while (iter.hasNext()) {
Entry<String, byte[]> entry = iter.next();
- target.fs.setXAttr(target.path, entry.getKey(), entry.getValue());
+ final String xattrName = entry.getKey();
+ if (xattrName.startsWith(RAW) || preserveXAttrs) {
+ target.fs.setXAttr(target.path, entry.getKey(), entry.getValue());
+ }
}
}
}
Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java?rev=1619608&r1=1619607&r2=1619608&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java Thu Aug 21 21:55:57 2014
@@ -143,7 +143,11 @@ class CopyCommands {
"timestamps, ownership, permission. If -pa is specified, " +
"then preserves permission also because ACL is a super-set of " +
"permission. Passing -f overwrites the destination if it " +
- "already exists.\n";
+ "already exists. raw namespace extended attributes are preserved " +
+ "if (1) they are supported (HDFS only) and, (2) all of the source and " +
+ "target pathnames are in the /.reserved/raw hierarchy. raw namespace " +
+ "xattr preservation is determined solely by the presence (or absence) " +
+ "of the /.reserved/raw prefix and not by the -p option.\n";
@Override
protected void processOptions(LinkedList<String> args) throws IOException {
Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java?rev=1619608&r1=1619607&r2=1619608&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java Thu Aug 21 21:55:57 2014
@@ -78,6 +78,11 @@ public class NativeCodeLoader {
* Returns true only if this build was compiled with support for snappy.
*/
public static native boolean buildSupportsSnappy();
+
+ /**
+ * Returns true only if this build was compiled with support for openssl.
+ */
+ public static native boolean buildSupportsOpenssl();
public static native String getLibraryName();
Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeLibraryChecker.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeLibraryChecker.java?rev=1619608&r1=1619607&r2=1619608&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeLibraryChecker.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeLibraryChecker.java Thu Aug 21 21:55:57 2014
@@ -20,6 +20,7 @@ package org.apache.hadoop.util;
import org.apache.hadoop.util.NativeCodeLoader;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.crypto.OpensslCipher;
import org.apache.hadoop.io.compress.Lz4Codec;
import org.apache.hadoop.io.compress.SnappyCodec;
import org.apache.hadoop.io.compress.bzip2.Bzip2Factory;
@@ -60,6 +61,8 @@ public class NativeLibraryChecker {
// lz4 is linked within libhadoop
boolean lz4Loaded = nativeHadoopLoaded;
boolean bzip2Loaded = Bzip2Factory.isNativeBzip2Loaded(conf);
+ boolean openSslLoaded = false;
+ String openSslDetail = "";
String hadoopLibraryName = "";
String zlibLibraryName = "";
String snappyLibraryName = "";
@@ -76,6 +79,13 @@ public class NativeLibraryChecker {
if (snappyLoaded && NativeCodeLoader.buildSupportsSnappy()) {
snappyLibraryName = SnappyCodec.getLibraryName();
}
+ if (OpensslCipher.getLoadingFailureReason() != null) {
+ openSslDetail = OpensslCipher.getLoadingFailureReason();
+ openSslLoaded = false;
+ } else {
+ openSslDetail = OpensslCipher.getLibraryName();
+ openSslLoaded = true;
+ }
if (lz4Loaded) {
lz4LibraryName = Lz4Codec.getLibraryName();
}
@@ -84,11 +94,12 @@ public class NativeLibraryChecker {
}
}
System.out.println("Native library checking:");
- System.out.printf("hadoop: %b %s\n", nativeHadoopLoaded, hadoopLibraryName);
- System.out.printf("zlib: %b %s\n", zlibLoaded, zlibLibraryName);
- System.out.printf("snappy: %b %s\n", snappyLoaded, snappyLibraryName);
- System.out.printf("lz4: %b %s\n", lz4Loaded, lz4LibraryName);
- System.out.printf("bzip2: %b %s\n", bzip2Loaded, bzip2LibraryName);
+ System.out.printf("hadoop: %b %s\n", nativeHadoopLoaded, hadoopLibraryName);
+ System.out.printf("zlib: %b %s\n", zlibLoaded, zlibLibraryName);
+ System.out.printf("snappy: %b %s\n", snappyLoaded, snappyLibraryName);
+ System.out.printf("lz4: %b %s\n", lz4Loaded, lz4LibraryName);
+ System.out.printf("bzip2: %b %s\n", bzip2Loaded, bzip2LibraryName);
+ System.out.printf("openssl: %b %s\n", openSslLoaded, openSslDetail);
if ((!nativeHadoopLoaded) ||
(checkAll && !(zlibLoaded && snappyLoaded && lz4Loaded && bzip2Loaded))) {
// return 1 to indicated check failed
Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java?rev=1619608&r1=1619607&r2=1619608&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java Thu Aug 21 21:55:57 2014
@@ -18,23 +18,25 @@
package org.apache.hadoop.util;
-import java.lang.reflect.Array;
-import java.lang.reflect.Method;
-import java.lang.reflect.InvocationTargetException;
-import java.net.URL;
-import java.net.URLClassLoader;
+import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
-import java.io.File;
-import java.util.regex.Pattern;
-import java.util.Arrays;
+import java.lang.reflect.Array;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.net.URLClassLoader;
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.Enumeration;
-import java.util.jar.JarFile;
+import java.util.List;
import java.util.jar.JarEntry;
+import java.util.jar.JarFile;
import java.util.jar.Manifest;
+import java.util.regex.Pattern;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
@@ -56,6 +58,21 @@ public class RunJar {
public static final int SHUTDOWN_HOOK_PRIORITY = 10;
/**
+ * Environment key for using the client classloader.
+ */
+ public static final String HADOOP_USE_CLIENT_CLASSLOADER =
+ "HADOOP_USE_CLIENT_CLASSLOADER";
+ /**
+ * Environment key for the (user-provided) hadoop classpath.
+ */
+ public static final String HADOOP_CLASSPATH = "HADOOP_CLASSPATH";
+ /**
+ * Environment key for the system classes.
+ */
+ public static final String HADOOP_CLIENT_CLASSLOADER_SYSTEM_CLASSES =
+ "HADOOP_CLIENT_CLASSLOADER_SYSTEM_CLASSES";
+
+ /**
* Unpack a jar file into a directory.
*
* This version unpacks all files inside the jar regardless of filename.
@@ -116,6 +133,10 @@ public class RunJar {
/** Run a Hadoop job jar. If the main class is not in the jar's manifest,
* then it must be provided on the command line. */
public static void main(String[] args) throws Throwable {
+ new RunJar().run(args);
+ }
+
+ public void run(String[] args) throws Throwable {
String usage = "RunJar jarFile [mainClass] args...";
if (args.length < 1) {
@@ -187,19 +208,7 @@ public class RunJar {
unJar(file, workDir);
- ArrayList<URL> classPath = new ArrayList<URL>();
- classPath.add(new File(workDir+"/").toURI().toURL());
- classPath.add(file.toURI().toURL());
- classPath.add(new File(workDir, "classes/").toURI().toURL());
- File[] libs = new File(workDir, "lib").listFiles();
- if (libs != null) {
- for (int i = 0; i < libs.length; i++) {
- classPath.add(libs[i].toURI().toURL());
- }
- }
-
- ClassLoader loader =
- new URLClassLoader(classPath.toArray(new URL[0]));
+ ClassLoader loader = createClassLoader(file, workDir);
Thread.currentThread().setContextClassLoader(loader);
Class<?> mainClass = Class.forName(mainClassName, true, loader);
@@ -214,5 +223,65 @@ public class RunJar {
throw e.getTargetException();
}
}
-
+
+ /**
+ * Creates a classloader based on the environment that was specified by the
+ * user. If HADOOP_USE_CLIENT_CLASSLOADER is specified, it creates an
+ * application classloader that provides the isolation of the user class space
+ * from the hadoop classes and their dependencies. It forms a class space for
+ * the user jar as well as the HADOOP_CLASSPATH. Otherwise, it creates a
+ * classloader that simply adds the user jar to the classpath.
+ */
+ private ClassLoader createClassLoader(File file, final File workDir)
+ throws MalformedURLException {
+ ClassLoader loader;
+ // see if the client classloader is enabled
+ if (useClientClassLoader()) {
+ StringBuilder sb = new StringBuilder();
+ sb.append(workDir+"/").
+ append(File.pathSeparator).append(file).
+ append(File.pathSeparator).append(workDir+"/classes/").
+ append(File.pathSeparator).append(workDir+"/lib/*");
+ // HADOOP_CLASSPATH is added to the client classpath
+ String hadoopClasspath = getHadoopClasspath();
+ if (hadoopClasspath != null && !hadoopClasspath.isEmpty()) {
+ sb.append(File.pathSeparator).append(hadoopClasspath);
+ }
+ String clientClasspath = sb.toString();
+ // get the system classes
+ String systemClasses = getSystemClasses();
+ List<String> systemClassesList = systemClasses == null ?
+ null :
+ Arrays.asList(StringUtils.getTrimmedStrings(systemClasses));
+ // create an application classloader that isolates the user classes
+ loader = new ApplicationClassLoader(clientClasspath,
+ getClass().getClassLoader(), systemClassesList);
+ } else {
+ List<URL> classPath = new ArrayList<URL>();
+ classPath.add(new File(workDir+"/").toURI().toURL());
+ classPath.add(file.toURI().toURL());
+ classPath.add(new File(workDir, "classes/").toURI().toURL());
+ File[] libs = new File(workDir, "lib").listFiles();
+ if (libs != null) {
+ for (int i = 0; i < libs.length; i++) {
+ classPath.add(libs[i].toURI().toURL());
+ }
+ }
+ // create a normal parent-delegating classloader
+ loader = new URLClassLoader(classPath.toArray(new URL[0]));
+ }
+ return loader;
+ }
+
+ boolean useClientClassLoader() {
+ return Boolean.parseBoolean(System.getenv(HADOOP_USE_CLIENT_CLASSLOADER));
+ }
+
+ String getHadoopClasspath() {
+ return System.getenv(HADOOP_CLASSPATH);
+ }
+
+ String getSystemClasses() {
+ return System.getenv(HADOOP_CLIENT_CLASSLOADER_SYSTEM_CLASSES);
+ }
}
Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCodeLoader.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCodeLoader.c?rev=1619608&r1=1619607&r2=1619608&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCodeLoader.c (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCodeLoader.c Thu Aug 21 21:55:57 2014
@@ -39,6 +39,16 @@ JNIEXPORT jboolean JNICALL Java_org_apac
#endif
}
+JNIEXPORT jboolean JNICALL Java_org_apache_hadoop_util_NativeCodeLoader_buildSupportsOpenssl
+ (JNIEnv *env, jclass clazz)
+{
+#ifdef HADOOP_OPENSSL_LIBRARY
+ return JNI_TRUE;
+#else
+ return JNI_FALSE;
+#endif
+}
+
JNIEXPORT jstring JNICALL Java_org_apache_hadoop_util_NativeCodeLoader_getLibraryName
(JNIEnv *env, jclass clazz)
{
Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml?rev=1619608&r1=1619607&r2=1619608&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml Thu Aug 21 21:55:57 2014
@@ -1445,6 +1445,74 @@ for ldap providers in the same way as ab
true.
</description>
</property>
+
+<property>
+ <name>hadoop.security.crypto.codec.classes.EXAMPLECIPHERSUITE</name>
+ <value></value>
+ <description>
+ The prefix for a given crypto codec, contains a comma-separated
+ list of implementation classes for a given crypto codec (eg EXAMPLECIPHERSUITE).
+ The first implementation will be used if available, others are fallbacks.
+ </description>
+</property>
+
+<property>
+ <name>hadoop.security.crypto.codec.classes.aes.ctr.nopadding</name>
+ <value>org.apache.hadoop.crypto.OpensslAesCtrCryptoCodec,org.apache.hadoop.crypto.JceAesCtrCryptoCodec</value>
+ <description>
+ Comma-separated list of crypto codec implementations for AES/CTR/NoPadding.
+ The first implementation will be used if available, others are fallbacks.
+ </description>
+</property>
+
+<property>
+ <name>hadoop.security.crypto.cipher.suite</name>
+ <value>AES/CTR/NoPadding</value>
+ <description>
+ Cipher suite for crypto codec.
+ </description>
+</property>
+
+<property>
+ <name>hadoop.security.crypto.jce.provider</name>
+ <value></value>
+ <description>
+ The JCE provider name used in CryptoCodec.
+ </description>
+</property>
+
+<property>
+ <name>hadoop.security.crypto.buffer.size</name>
+ <value>8192</value>
+ <description>
+ The buffer size used by CryptoInputStream and CryptoOutputStream.
+ </description>
+</property>
+
+<property>
+ <name>hadoop.security.java.secure.random.algorithm</name>
+ <value>SHA1PRNG</value>
+ <description>
+ The java secure random algorithm.
+ </description>
+</property>
+
+<property>
+ <name>hadoop.security.secure.random.impl</name>
+ <value></value>
+ <description>
+ Implementation of secure random.
+ </description>
+</property>
+
+<property>
+ <name>hadoop.security.random.device.file.path</name>
+ <value>/dev/urandom</value>
+ <description>
+ OS security random device file path.
+ </description>
+</property>
+
<property>
<name>fs.har.impl.disable.cache</name>
<value>true</value>
@@ -1483,4 +1551,5 @@ for ldap providers in the same way as ab
key will be dropped. Default = 12hrs
</description>
</property>
+
</configuration>
Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/site/apt/FileSystemShell.apt.vm
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/site/apt/FileSystemShell.apt.vm?rev=1619608&r1=1619607&r2=1619608&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/site/apt/FileSystemShell.apt.vm (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/site/apt/FileSystemShell.apt.vm Thu Aug 21 21:55:57 2014
@@ -168,15 +168,22 @@ cp
Copy files from source to destination. This command allows multiple sources
as well in which case the destination must be a directory.
+ 'raw.*' namespace extended attributes are preserved if (1) the source and
+ destination filesystems support them (HDFS only), and (2) all source and
+ destination pathnames are in the /.reserved/raw hierarchy. Determination of
+ whether raw.* namespace xattrs are preserved is independent of the
+ -p (preserve) flag.
+
Options:
* The -f option will overwrite the destination if it already exists.
- * The -p option will preserve file attributes [topx] (timestamps,
+ * The -p option will preserve file attributes [topx] (timestamps,
ownership, permission, ACL, XAttr). If -p is specified with no <arg>,
then preserves timestamps, ownership, permission. If -pa is specified,
then preserves permission also because ACL is a super-set of
- permission.
+ permission. Determination of whether raw namespace extended attributes
+ are preserved is independent of the -p flag.
Example:
Propchange: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/core/
------------------------------------------------------------------------------
Merged /hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/core:r1594376-1619194
Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/core:r1619018-1619607
Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestNativeCodeLoader.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestNativeCodeLoader.java?rev=1619608&r1=1619607&r2=1619608&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestNativeCodeLoader.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestNativeCodeLoader.java Thu Aug 21 21:55:57 2014
@@ -22,6 +22,7 @@ import static org.junit.Assert.*;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.crypto.OpensslCipher;
import org.apache.hadoop.io.compress.Lz4Codec;
import org.apache.hadoop.io.compress.SnappyCodec;
import org.apache.hadoop.io.compress.zlib.ZlibFactory;
@@ -54,6 +55,9 @@ public class TestNativeCodeLoader {
if (NativeCodeLoader.buildSupportsSnappy()) {
assertFalse(SnappyCodec.getLibraryName().isEmpty());
}
+ if (NativeCodeLoader.buildSupportsOpenssl()) {
+ assertFalse(OpensslCipher.getLibraryName().isEmpty());
+ }
assertFalse(Lz4Codec.getLibraryName().isEmpty());
LOG.info("TestNativeCodeLoader: libhadoop.so is loaded.");
}
Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestRunJar.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestRunJar.java?rev=1619608&r1=1619607&r2=1619608&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestRunJar.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestRunJar.java Thu Aug 21 21:55:57 2014
@@ -17,23 +17,30 @@
*/
package org.apache.hadoop.util;
-import junit.framework.TestCase;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.when;
+
+import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
+import java.io.InputStream;
import java.util.jar.JarOutputStream;
import java.util.regex.Pattern;
import java.util.zip.ZipEntry;
+import junit.framework.TestCase;
+
+import org.apache.hadoop.fs.FileUtil;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
-import org.apache.hadoop.fs.FileUtil;
public class TestRunJar extends TestCase {
private File TEST_ROOT_DIR;
private static final String TEST_JAR_NAME="test-runjar.jar";
+ private static final String TEST_JAR_2_NAME = "test-runjar2.jar";
@Override
@Before
@@ -107,4 +114,59 @@ public class TestRunJar extends TestCase
new File(unjarDir, "foobaz.txt").exists());
}
+
+ /**
+ * Tests the client classloader to verify the main class and its dependent
+ * class are loaded correctly by the application classloader, and others are
+ * loaded by the system classloader.
+ */
+ @Test
+ public void testClientClassLoader() throws Throwable {
+ RunJar runJar = spy(new RunJar());
+ // enable the client classloader
+ when(runJar.useClientClassLoader()).thenReturn(true);
+ // set the system classes and blacklist the test main class and the test
+ // third class so they can be loaded by the application classloader
+ String mainCls = ClassLoaderCheckMain.class.getName();
+ String thirdCls = ClassLoaderCheckThird.class.getName();
+ String systemClasses = "-" + mainCls + "," +
+ "-" + thirdCls + "," +
+ ApplicationClassLoader.DEFAULT_SYSTEM_CLASSES;
+ when(runJar.getSystemClasses()).thenReturn(systemClasses);
+
+ // create the test jar
+ File testJar = makeClassLoaderTestJar(mainCls, thirdCls);
+ // form the args
+ String[] args = new String[3];
+ args[0] = testJar.getAbsolutePath();
+ args[1] = mainCls;
+
+ // run RunJar
+ runJar.run(args);
+ // it should not throw an exception
+ }
+
+ private File makeClassLoaderTestJar(String... clsNames) throws IOException {
+ File jarFile = new File(TEST_ROOT_DIR, TEST_JAR_2_NAME);
+ JarOutputStream jstream =
+ new JarOutputStream(new FileOutputStream(jarFile));
+ for (String clsName: clsNames) {
+ String name = clsName.replace('.', '/') + ".class";
+ InputStream entryInputStream = this.getClass().getResourceAsStream(
+ "/" + name);
+ ZipEntry entry = new ZipEntry(name);
+ jstream.putNextEntry(entry);
+ BufferedInputStream bufInputStream = new BufferedInputStream(
+ entryInputStream, 2048);
+ int count;
+ byte[] data = new byte[2048];
+ while ((count = bufInputStream.read(data, 0, 2048)) != -1) {
+ jstream.write(data, 0, count);
+ }
+ jstream.closeEntry();
+ }
+ jstream.close();
+
+ return jarFile;
+ }
}
\ No newline at end of file
Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml?rev=1619608&r1=1619607&r2=1619608&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml Thu Aug 21 21:55:57 2014
@@ -324,7 +324,23 @@
</comparator>
<comparator>
<type>RegexpComparator</type>
- <expected-output>^\s*permission. Passing -f overwrites the destination if it already exists.( )*</expected-output>
+ <expected-output>^( |\t)*permission. Passing -f overwrites the destination if it already exists. raw( )*</expected-output>
+ </comparator>
+ <comparator>
+ <type>RegexpComparator</type>
+ <expected-output>^( |\t)*namespace extended attributes are preserved if \(1\) they are supported \(HDFS( )*</expected-output>
+ </comparator>
+ <comparator>
+ <type>RegexpComparator</type>
+ <expected-output>^( |\t)*only\) and, \(2\) all of the source and target pathnames are in the \/\.reserved\/raw( )*</expected-output>
+ </comparator>
+ <comparator>
+ <type>RegexpComparator</type>
+ <expected-output>^( |\t)*hierarchy. raw namespace xattr preservation is determined solely by the presence( )*</expected-output>
+ </comparator>
+ <comparator>
+ <type>RegexpComparator</type>
+ <expected-output>^\s*\(or absence\) of the \/\.reserved\/raw prefix and not by the -p option.( )*</expected-output>
</comparator>
</comparators>
</test>