You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by to...@apache.org on 2014/07/23 03:47:33 UTC
svn commit: r1612742 [1/3] - in
/hadoop/common/branches/MR-2841/hadoop-common-project: hadoop-auth/
hadoop-common/ hadoop-common/src/main/bin/ hadoop-common/src/main/java/
hadoop-common/src/main/java/org/apache/hadoop/crypto/key/
hadoop-common/src/main...
Author: todd
Date: Wed Jul 23 01:47:28 2014
New Revision: 1612742
URL: http://svn.apache.org/r1612742
Log:
Merge trunk into branch
Added:
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/CachingKeyProvider.java
- copied unchanged from r1612740, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/CachingKeyProvider.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/ValueQueue.java
- copied unchanged from r1612740, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/ValueQueue.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Timer.java
- copied unchanged from r1612740, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Timer.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestCachingKeyProvider.java
- copied unchanged from r1612740, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestCachingKeyProvider.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestValueQueue.java
- copied unchanged from r1612740, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestValueQueue.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/FakeTimer.java
- copied unchanged from r1612740, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/FakeTimer.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/EagerKeyGeneratorKeyProviderCryptoExtension.java
- copied unchanged from r1612740, hadoop/common/trunk/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/EagerKeyGeneratorKeyProviderCryptoExtension.java
Removed:
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSCacheKeyProvider.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMSCacheKeyProvider.java
Modified:
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-auth/pom.xml
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/CHANGES.txt (contents, props changed)
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/bin/hadoop-daemon.sh
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/ (props changed)
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderFactory.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSRESTConstants.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Stat.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFs.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BZip2Codec.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodec.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionInputStream.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionOutputStream.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DefaultCodec.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/GzipCodec.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Lz4Codec.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/SnappyCodec.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsSystem.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopologyWithNodeGroup.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Groups.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsMapping.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialShell.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/JavaKeyStoreProvider.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/DefaultImpersonationProvider.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ImpersonationProvider.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Tool.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/site/apt/CommandsManual.apt.vm
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/site/apt/NativeLibraries.apt.vm
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyProviderCryptoExtension.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyShell.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestStat.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemBaseTest.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsBaseTest.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/MiniRPCBenchmark.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetworkTopologyWithNodeGroup.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestDoAsEffectiveUser.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestGroupsCaching.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredShell.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-kms/src/main/conf/kms-acls.xml
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSACLs.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSConfiguration.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSServerJSONUtils.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSWebApp.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-kms/src/site/apt/index.apt.vm
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsExports.java
hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/nfs/TestNfsExports.java
Modified: hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-auth/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-auth/pom.xml?rev=1612742&r1=1612741&r2=1612742&view=diff
==============================================================================
--- hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-auth/pom.xml (original)
+++ hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-auth/pom.xml Wed Jul 23 01:47:28 2014
@@ -139,6 +139,17 @@
<attach>true</attach>
</configuration>
</plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-jar-plugin</artifactId>
+ <executions>
+ <execution>
+ <goals>
+ <goal>test-jar</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
</plugins>
</build>
Modified: hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1612742&r1=1612741&r2=1612742&view=diff
==============================================================================
--- hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/CHANGES.txt (original)
+++ hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/CHANGES.txt Wed Jul 23 01:47:28 2014
@@ -36,10 +36,6 @@ Trunk (Unreleased)
HADOOP-7595. Upgrade dependency to Avro 1.5.3. (Alejandro Abdelnur via atm)
- HADOOP-7664. Remove warmings when overriding final parameter configuration
- if the override value is same as the final parameter value.
- (Ravi Prakash via suresh)
-
HADOOP-8078. Add capability to turn on security in unit tests. (Jaimin
Jetly via jitendra)
@@ -162,9 +158,6 @@ Trunk (Unreleased)
HADOOP-10485. Remove dead classes in hadoop-streaming. (wheat9)
- HADOOP-10607. Create API to separate credential/password storage from
- applications. (Larry McCay via omalley)
-
HADOOP-10696. Add optional attributes to KeyProvider Options and Metadata.
(tucu)
@@ -182,6 +175,20 @@ Trunk (Unreleased)
HADOOP-10736. Add key attributes to the key shell. (Mike Yoder via wang)
+ HADOOP-10824. Refactor KMSACLs to avoid locking. (Benoy Antony via umamahesh)
+
+ HADOOP-10841. EncryptedKeyVersion should have a key name property.
+ (asuresh via tucu)
+
+ HADOOP-10842. CryptoExtension generateEncryptedKey method should
+ receive the key name. (asuresh via tucu)
+
+ HADOOP-10750. KMSKeyProviderCache should be in hadoop-common.
+ (asuresh via tucu)
+
+ HADOOP-10720. KMS: Implement generateEncryptedKey and decryptEncryptedKey
+ in the REST API. (asuresh via tucu)
+
BUG FIXES
HADOOP-9451. Fault single-layer config if node group topology is enabled.
@@ -383,6 +390,18 @@ Trunk (Unreleased)
HADOOP-10834. Typo in CredentialShell usage. (Benoy Antony via umamahesh)
+ HADOOP-10816. KeyShell returns -1 on error to the shell, should be 1.
+ (Mike Yoder via wang)
+
+ HADOOP-10840. Fix OutOfMemoryError caused by metrics system in Azure File
+ System. (Shanyu Zhao via cnauroth)
+
+ HADOOP-10826. Iteration on KeyProviderFactory.serviceLoader is
+ thread-unsafe. (benoyantony viat tucu)
+
+ HADOOP-10881. Clarify usage of encryption and encrypted encryption
+ key in KeyProviderCryptoExtension. (wang)
+
OPTIMIZATIONS
HADOOP-7761. Improve the performance of raw comparisons. (todd)
@@ -401,6 +420,38 @@ Release 2.6.0 - UNRELEASED
HADOOP-10815. Implement Windows equivalent of mlock. (cnauroth)
+ HADOOP-7664. Remove warmings when overriding final parameter configuration
+ if the override value is same as the final parameter value.
+ (Ravi Prakash via suresh)
+
+ HADOOP-10673. Update rpc metrics when the call throws an exception. (Ming Ma
+ via jing9)
+
+ HADOOP-10845. Add common tests for ACLs in combination with viewfs.
+ (Stephen Chu via cnauroth)
+
+ HADOOP-10839. Add unregisterSource() to MetricsSystem API.
+ (Shanyu Zhao via cnauroth)
+
+ HADOOP-10607. Create an API to separate credentials/password storage
+ from applications (Larry McCay via omalley)
+
+ HADOOP-10732. Fix locking in credential update. (Ted Yu via omalley)
+
+ HADOOP-10733. Fix potential null dereference in CredShell. (Ted Yu via
+ omalley)
+
+ HADOOP-10610. Upgrade S3n s3.fs.buffer.dir to support multi directories.
+ (Ted Malaska via atm)
+
+ HADOOP-10817. ProxyUsers configuration should support configurable
+ prefixes. (tucu)
+
+ HADOOP-10755. Support negative caching of user-group mapping.
+ (Lei Xu via wang)
+
+ HADOOP-10855. Allow Text to be read with a known Length. (todd)
+
OPTIMIZATIONS
BUG FIXES
@@ -416,6 +467,19 @@ Release 2.6.0 - UNRELEASED
HADOOP-10810. Clean up native code compilation warnings. (cnauroth)
+ HADOOP-9921. daemon scripts should remove pid file on stop call after stop
+ or process is found not running ( vinayakumarb )
+
+ HADOOP-10591. Compression codecs must used pooled direct buffers or
+ deallocate direct buffers when stream is closed (cmccabe)
+
+ HADOOP-10857. Native Libraries Guide doen't mention a dependency on
+ openssl-development package (ozawa via cmccabe)
+
+ HADOOP-10866. RawLocalFileSystem fails to read symlink targets via the stat
+ command when the format of the stat command uses non-curly quotes (yzhang
+ via cmccabe)
+
Release 2.5.0 - UNRELEASED
INCOMPATIBLE CHANGES
@@ -738,6 +802,9 @@ Release 2.5.0 - UNRELEASED
HADOOP-10710. hadoop.auth cookie is not properly constructed according to
RFC2109. (Juan Yu via tucu)
+ HADOOP-10864. Tool documentenation is broken. (Akira Ajisaka
+ via Arpit Agarwal)
+
Release 2.4.1 - 2014-06-23
INCOMPATIBLE CHANGES
Propchange: hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/CHANGES.txt
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:r1610815-1612740
Modified: hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/bin/hadoop-daemon.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/bin/hadoop-daemon.sh?rev=1612742&r1=1612741&r2=1612742&view=diff
==============================================================================
--- hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/bin/hadoop-daemon.sh (original)
+++ hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/bin/hadoop-daemon.sh Wed Jul 23 01:47:28 2014
@@ -198,6 +198,7 @@ case $startStop in
else
echo no $command to stop
fi
+ rm -f $pid
else
echo no $command to stop
fi
Propchange: hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:r1610815-1612740
Modified: hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java?rev=1612742&r1=1612741&r2=1612742&view=diff
==============================================================================
--- hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java (original)
+++ hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java Wed Jul 23 01:47:28 2014
@@ -21,52 +21,117 @@ package org.apache.hadoop.crypto.key;
import java.io.IOException;
import java.security.GeneralSecurityException;
import java.security.SecureRandom;
-
import javax.crypto.Cipher;
import javax.crypto.spec.IvParameterSpec;
import javax.crypto.spec.SecretKeySpec;
import com.google.common.base.Preconditions;
+import org.apache.hadoop.classification.InterfaceAudience;
/**
- * A KeyProvider with Cytographic Extensions specifically for generating
- * Encrypted Keys as well as decrypting them
+ * A KeyProvider with Cryptographic Extensions specifically for generating
+ * and decrypting encrypted encryption keys.
*
*/
+@InterfaceAudience.Private
public class KeyProviderCryptoExtension extends
KeyProviderExtension<KeyProviderCryptoExtension.CryptoExtension> {
- protected static final String EEK = "EEK";
- protected static final String EK = "EK";
+ /**
+ * Designates an encrypted encryption key, or EEK.
+ */
+ public static final String EEK = "EEK";
+ /**
+ * Designates a decrypted encrypted encryption key, that is, an encryption key
+ * (EK).
+ */
+ public static final String EK = "EK";
/**
- * This is a holder class whose instance contains the keyVersionName, iv
- * used to generate the encrypted Key and the encrypted KeyVersion
+ * An encrypted encryption key (EEK) and related information. An EEK must be
+ * decrypted using the key's encryption key before it can be used.
*/
public static class EncryptedKeyVersion {
- private String keyVersionName;
- private byte[] iv;
- private KeyVersion encryptedKey;
+ private String encryptionKeyName;
+ private String encryptionKeyVersionName;
+ private byte[] encryptedKeyIv;
+ private KeyVersion encryptedKeyVersion;
- protected EncryptedKeyVersion(String keyVersionName, byte[] iv,
- KeyVersion encryptedKey) {
- this.keyVersionName = keyVersionName;
- this.iv = iv;
- this.encryptedKey = encryptedKey;
+ /**
+ * Create a new EncryptedKeyVersion.
+ *
+ * @param keyName Name of the encryption key used to
+ * encrypt the encrypted key.
+ * @param encryptionKeyVersionName Version name of the encryption key used
+ * to encrypt the encrypted key.
+ * @param encryptedKeyIv Initialization vector of the encrypted
+ * key. The IV of the encryption key used to
+ * encrypt the encrypted key is derived from
+ * this IV.
+ * @param encryptedKeyVersion The encrypted encryption key version.
+ */
+ protected EncryptedKeyVersion(String keyName,
+ String encryptionKeyVersionName, byte[] encryptedKeyIv,
+ KeyVersion encryptedKeyVersion) {
+ this.encryptionKeyName = keyName;
+ this.encryptionKeyVersionName = encryptionKeyVersionName;
+ this.encryptedKeyIv = encryptedKeyIv;
+ this.encryptedKeyVersion = encryptedKeyVersion;
+ }
+
+ /**
+ * @return Name of the encryption key used to encrypt the encrypted key.
+ */
+ public String getEncryptionKeyName() {
+ return encryptionKeyName;
}
- public String getKeyVersionName() {
- return keyVersionName;
+ /**
+ * @return Version name of the encryption key used to encrypt the encrypted
+ * key.
+ */
+ public String getEncryptionKeyVersionName() {
+ return encryptionKeyVersionName;
}
- public byte[] getIv() {
- return iv;
+ /**
+ * @return Initialization vector of the encrypted key. The IV of the
+ * encryption key used to encrypt the encrypted key is derived from this
+ * IV.
+ */
+ public byte[] getEncryptedKeyIv() {
+ return encryptedKeyIv;
}
- public KeyVersion getEncryptedKey() {
- return encryptedKey;
+ /**
+ * @return The encrypted encryption key version.
+ */
+ public KeyVersion getEncryptedKeyVersion() {
+ return encryptedKeyVersion;
}
+ /**
+ * Derive the initialization vector (IV) for the encryption key from the IV
+ * of the encrypted key. This derived IV is used with the encryption key to
+ * decrypt the encrypted key.
+ * <p/>
+ * The alternative to this is using the same IV for both the encryption key
+ * and the encrypted key. Even a simple symmetric transformation like this
+ * improves security by avoiding IV re-use. IVs will also be fairly unique
+ * among different EEKs.
+ *
+ * @param encryptedKeyIV of the encrypted key (i.e. {@link
+ * #getEncryptedKeyIv()})
+ * @return IV for the encryption key
+ */
+ protected static byte[] deriveIV(byte[] encryptedKeyIV) {
+ byte[] rIv = new byte[encryptedKeyIV.length];
+ // Do a simple XOR transformation to flip all the bits
+ for (int i = 0; i < encryptedKeyIV.length; i++) {
+ rIv[i] = (byte) (encryptedKeyIV[i] ^ 0xff);
+ }
+ return rIv;
+ }
}
/**
@@ -76,16 +141,23 @@ public class KeyProviderCryptoExtension
public interface CryptoExtension extends KeyProviderExtension.Extension {
/**
+ * Calls to this method allows the underlying KeyProvider to warm-up any
+ * implementation specific caches used to store the Encrypted Keys.
+ * @param keyNames Array of Key Names
+ */
+ public void warmUpEncryptedKeys(String... keyNames)
+ throws IOException;
+
+ /**
* Generates a key material and encrypts it using the given key version name
* and initialization vector. The generated key material is of the same
- * length as the <code>KeyVersion</code> material and is encrypted using the
- * same cipher.
+ * length as the <code>KeyVersion</code> material of the latest key version
+ * of the key and is encrypted using the same cipher.
* <p/>
* NOTE: The generated key is not stored by the <code>KeyProvider</code>
*
- * @param encryptionKeyVersion
- * a KeyVersion object containing the keyVersion name and material
- * to encrypt.
+ * @param encryptionKeyName
+ * The latest KeyVersion of this key's material will be encrypted.
* @return EncryptedKeyVersion with the generated key material, the version
* name is 'EEK' (for Encrypted Encryption Key)
* @throws IOException
@@ -95,7 +167,7 @@ public class KeyProviderCryptoExtension
* cryptographic issue.
*/
public EncryptedKeyVersion generateEncryptedKey(
- KeyVersion encryptionKeyVersion) throws IOException,
+ String encryptionKeyName) throws IOException,
GeneralSecurityException;
/**
@@ -126,63 +198,88 @@ public class KeyProviderCryptoExtension
this.keyProvider = keyProvider;
}
- // the IV used to encrypt a EK typically will be the same IV used to
- // encrypt data with the EK. To avoid any chance of weakening the
- // encryption because the same IV is used, we simply XOR the IV thus we
- // are not using the same IV for 2 different encryptions (even if they
- // are done using different keys)
- private byte[] flipIV(byte[] iv) {
- byte[] rIv = new byte[iv.length];
- for (int i = 0; i < iv.length; i++) {
- rIv[i] = (byte) (iv[i] ^ 0xff);
- }
- return rIv;
- }
-
@Override
- public EncryptedKeyVersion generateEncryptedKey(KeyVersion keyVersion)
+ public EncryptedKeyVersion generateEncryptedKey(String encryptionKeyName)
throws IOException, GeneralSecurityException {
- KeyVersion keyVer =
- keyProvider.getKeyVersion(keyVersion.getVersionName());
- Preconditions.checkNotNull(keyVer, "KeyVersion name '%s' does not exist",
- keyVersion.getVersionName());
- byte[] newKey = new byte[keyVer.getMaterial().length];
- SecureRandom.getInstance("SHA1PRNG").nextBytes(newKey);
+ // Fetch the encryption key
+ KeyVersion encryptionKey = keyProvider.getCurrentKey(encryptionKeyName);
+ Preconditions.checkNotNull(encryptionKey,
+ "No KeyVersion exists for key '%s' ", encryptionKeyName);
+ // Generate random bytes for new key and IV
Cipher cipher = Cipher.getInstance("AES/CTR/NoPadding");
- byte[] iv = SecureRandom.getSeed(cipher.getBlockSize());
- cipher.init(Cipher.ENCRYPT_MODE, new SecretKeySpec(keyVer.getMaterial(),
- "AES"), new IvParameterSpec(flipIV(iv)));
- byte[] ek = cipher.doFinal(newKey);
- return new EncryptedKeyVersion(keyVersion.getVersionName(), iv,
- new KeyVersion(keyVer.getName(), EEK, ek));
+ SecureRandom random = SecureRandom.getInstance("SHA1PRNG");
+ final byte[] newKey = new byte[encryptionKey.getMaterial().length];
+ random.nextBytes(newKey);
+ final byte[] iv = random.generateSeed(cipher.getBlockSize());
+ // Encryption key IV is derived from new key's IV
+ final byte[] encryptionIV = EncryptedKeyVersion.deriveIV(iv);
+ // Encrypt the new key
+ cipher.init(Cipher.ENCRYPT_MODE,
+ new SecretKeySpec(encryptionKey.getMaterial(), "AES"),
+ new IvParameterSpec(encryptionIV));
+ final byte[] encryptedKey = cipher.doFinal(newKey);
+ return new EncryptedKeyVersion(encryptionKeyName,
+ encryptionKey.getVersionName(), iv,
+ new KeyVersion(encryptionKey.getName(), EEK, encryptedKey));
}
@Override
public KeyVersion decryptEncryptedKey(
EncryptedKeyVersion encryptedKeyVersion) throws IOException,
GeneralSecurityException {
- KeyVersion keyVer =
- keyProvider.getKeyVersion(encryptedKeyVersion.getKeyVersionName());
- Preconditions.checkNotNull(keyVer, "KeyVersion name '%s' does not exist",
- encryptedKeyVersion.getKeyVersionName());
- KeyVersion keyVersion = encryptedKeyVersion.getEncryptedKey();
+ // Fetch the encryption key material
+ final String encryptionKeyVersionName =
+ encryptedKeyVersion.getEncryptionKeyVersionName();
+ final KeyVersion encryptionKey =
+ keyProvider.getKeyVersion(encryptionKeyVersionName);
+ Preconditions.checkNotNull(encryptionKey,
+ "KeyVersion name '%s' does not exist", encryptionKeyVersionName);
+ final byte[] encryptionKeyMaterial = encryptionKey.getMaterial();
+ // Encryption key IV is determined from encrypted key's IV
+ final byte[] encryptionIV =
+ EncryptedKeyVersion.deriveIV(encryptedKeyVersion.getEncryptedKeyIv());
+ // Init the cipher with encryption key parameters
Cipher cipher = Cipher.getInstance("AES/CTR/NoPadding");
cipher.init(Cipher.DECRYPT_MODE,
- new SecretKeySpec(keyVersion.getMaterial(), "AES"),
- new IvParameterSpec(flipIV(encryptedKeyVersion.getIv())));
- byte[] ek =
- cipher.doFinal(encryptedKeyVersion.getEncryptedKey().getMaterial());
- return new KeyVersion(keyVer.getName(), EK, ek);
+ new SecretKeySpec(encryptionKeyMaterial, "AES"),
+ new IvParameterSpec(encryptionIV));
+ // Decrypt the encrypted key
+ final KeyVersion encryptedKV =
+ encryptedKeyVersion.getEncryptedKeyVersion();
+ final byte[] decryptedKey = cipher.doFinal(encryptedKV.getMaterial());
+ return new KeyVersion(encryptionKey.getName(), EK, decryptedKey);
+ }
+
+ @Override
+ public void warmUpEncryptedKeys(String... keyNames)
+ throws IOException {
+ // NO-OP since the default version does not cache any keys
}
}
- private KeyProviderCryptoExtension(KeyProvider keyProvider,
+ /**
+ * This constructor is to be used by sub classes that provide
+ * delegating/proxying functionality to the {@link KeyProviderCryptoExtension}
+ * @param keyProvider
+ * @param extension
+ */
+ protected KeyProviderCryptoExtension(KeyProvider keyProvider,
CryptoExtension extension) {
super(keyProvider, extension);
}
/**
+ * Notifies the Underlying CryptoExtension implementation to warm up any
+ * implementation specific caches for the specified KeyVersions
+ * @param keyNames Arrays of key Names
+ */
+ public void warmUpEncryptedKeys(String... keyNames)
+ throws IOException {
+ getExtension().warmUpEncryptedKeys(keyNames);
+ }
+
+ /**
* Generates a key material and encrypts it using the given key version name
* and initialization vector. The generated key material is of the same
* length as the <code>KeyVersion</code> material and is encrypted using the
@@ -190,18 +287,18 @@ public class KeyProviderCryptoExtension
* <p/>
* NOTE: The generated key is not stored by the <code>KeyProvider</code>
*
- * @param encryptionKey a KeyVersion object containing the keyVersion name and
- * material to encrypt.
+ * @param encryptionKeyName The latest KeyVersion of this key's material will
+ * be encrypted.
* @return EncryptedKeyVersion with the generated key material, the version
* name is 'EEK' (for Encrypted Encryption Key)
* @throws IOException thrown if the key material could not be generated
* @throws GeneralSecurityException thrown if the key material could not be
* encrypted because of a cryptographic issue.
*/
- public EncryptedKeyVersion generateEncryptedKey(KeyVersion encryptionKey)
+ public EncryptedKeyVersion generateEncryptedKey(String encryptionKeyName)
throws IOException,
GeneralSecurityException {
- return getExtension().generateEncryptedKey(encryptionKey);
+ return getExtension().generateEncryptedKey(encryptionKeyName);
}
/**
Modified: hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderFactory.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderFactory.java?rev=1612742&r1=1612741&r2=1612742&view=diff
==============================================================================
--- hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderFactory.java (original)
+++ hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderFactory.java Wed Jul 23 01:47:28 2014
@@ -22,6 +22,7 @@ import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
+import java.util.Iterator;
import java.util.List;
import java.util.ServiceLoader;
@@ -47,6 +48,15 @@ public abstract class KeyProviderFactory
private static final ServiceLoader<KeyProviderFactory> serviceLoader =
ServiceLoader.load(KeyProviderFactory.class);
+ // Iterate through the serviceLoader to avoid lazy loading.
+ // Lazy loading would require synchronization in concurrent use cases.
+ static {
+ Iterator<KeyProviderFactory> iterServices = serviceLoader.iterator();
+ while (iterServices.hasNext()) {
+ iterServices.next();
+ }
+ }
+
public static List<KeyProvider> getProviders(Configuration conf
) throws IOException {
List<KeyProvider> result = new ArrayList<KeyProvider>();
Modified: hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java?rev=1612742&r1=1612741&r2=1612742&view=diff
==============================================================================
--- hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java (original)
+++ hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java Wed Jul 23 01:47:28 2014
@@ -57,6 +57,16 @@ public class KeyShell extends Configured
private boolean userSuppliedProvider = false;
+ /**
+ * Primary entry point for the KeyShell; called via main().
+ *
+ * @param args Command line arguments.
+ * @return 0 on success and 1 on failure. This value is passed back to
+ * the unix shell, so we must follow shell return code conventions:
+ * the return code is an unsigned character, and 0 means success, and
+ * small positive integers mean failure.
+ * @throws Exception
+ */
@Override
public int run(String[] args) throws Exception {
int exitCode = 0;
@@ -68,11 +78,11 @@ public class KeyShell extends Configured
if (command.validate()) {
command.execute();
} else {
- exitCode = -1;
+ exitCode = 1;
}
} catch (Exception e) {
e.printStackTrace(err);
- return -1;
+ return 1;
}
return exitCode;
}
@@ -86,8 +96,8 @@ public class KeyShell extends Configured
* % hadoop key list [-provider providerPath]
* % hadoop key delete keyName [--provider providerPath] [-i]
* </pre>
- * @param args
- * @return
+ * @param args Command line arguments.
+ * @return 0 on success, 1 on failure.
* @throws IOException
*/
private int init(String[] args) throws IOException {
@@ -105,7 +115,7 @@ public class KeyShell extends Configured
command = new CreateCommand(keyName, options);
if ("--help".equals(keyName)) {
printKeyShellUsage();
- return -1;
+ return 1;
}
} else if (args[i].equals("delete")) {
String keyName = "--help";
@@ -116,7 +126,7 @@ public class KeyShell extends Configured
command = new DeleteCommand(keyName);
if ("--help".equals(keyName)) {
printKeyShellUsage();
- return -1;
+ return 1;
}
} else if (args[i].equals("roll")) {
String keyName = "--help";
@@ -127,7 +137,7 @@ public class KeyShell extends Configured
command = new RollCommand(keyName);
if ("--help".equals(keyName)) {
printKeyShellUsage();
- return -1;
+ return 1;
}
} else if ("list".equals(args[i])) {
command = new ListCommand();
@@ -145,13 +155,13 @@ public class KeyShell extends Configured
out.println("\nAttributes must be in attribute=value form, " +
"or quoted\nlike \"attribute = value\"\n");
printKeyShellUsage();
- return -1;
+ return 1;
}
if (attributes.containsKey(attr)) {
out.println("\nEach attribute must correspond to only one value:\n" +
"atttribute \"" + attr + "\" was repeated\n" );
printKeyShellUsage();
- return -1;
+ return 1;
}
attributes.put(attr, val);
} else if ("--provider".equals(args[i]) && moreTokens) {
@@ -163,17 +173,17 @@ public class KeyShell extends Configured
interactive = true;
} else if ("--help".equals(args[i])) {
printKeyShellUsage();
- return -1;
+ return 1;
} else {
printKeyShellUsage();
ToolRunner.printGenericCommandUsage(System.err);
- return -1;
+ return 1;
}
}
if (command == null) {
printKeyShellUsage();
- return -1;
+ return 1;
}
if (!attributes.isEmpty()) {
@@ -491,10 +501,11 @@ public class KeyShell extends Configured
}
/**
- * Main program.
+ * main() entry point for the KeyShell. While strictly speaking the
+ * return is void, it will System.exit() with a return code: 0 is for
+ * success and 1 for failure.
*
- * @param args
- * Command line arguments
+ * @param args Command line arguments.
* @throws Exception
*/
public static void main(String[] args) throws Exception {
Modified: hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java?rev=1612742&r1=1612741&r2=1612742&view=diff
==============================================================================
--- hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java (original)
+++ hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java Wed Jul 23 01:47:28 2014
@@ -21,7 +21,9 @@ import org.apache.commons.codec.binary.B
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.crypto.key.KeyProvider;
+import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.EncryptedKeyVersion;
import org.apache.hadoop.crypto.key.KeyProviderFactory;
+import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.security.ProviderUtils;
import org.apache.hadoop.security.authentication.client.AuthenticatedURL;
@@ -33,6 +35,7 @@ import org.apache.http.client.utils.URIB
import org.codehaus.jackson.map.ObjectMapper;
import javax.net.ssl.HttpsURLConnection;
+
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
@@ -40,6 +43,7 @@ import java.io.OutputStreamWriter;
import java.io.Writer;
import java.lang.reflect.Constructor;
import java.net.HttpURLConnection;
+import java.net.SocketTimeoutException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
@@ -50,14 +54,22 @@ import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
+import java.util.LinkedList;
import java.util.List;
import java.util.Map;
+import java.util.Queue;
+import java.util.concurrent.ExecutionException;
+
+import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension;
+import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.CryptoExtension;
+
+import com.google.common.base.Preconditions;
/**
* KMS client <code>KeyProvider</code> implementation.
*/
@InterfaceAudience.Private
-public class KMSClientProvider extends KeyProvider {
+public class KMSClientProvider extends KeyProvider implements CryptoExtension {
public static final String SCHEME_NAME = "kms";
@@ -78,6 +90,73 @@ public class KMSClientProvider extends K
public static final String TIMEOUT_ATTR = CONFIG_PREFIX + "timeout";
public static final int DEFAULT_TIMEOUT = 60;
+ private final ValueQueue<EncryptedKeyVersion> encKeyVersionQueue;
+
+ private class EncryptedQueueRefiller implements
+ ValueQueue.QueueRefiller<EncryptedKeyVersion> {
+
+ @Override
+ public void fillQueueForKey(String keyName,
+ Queue<EncryptedKeyVersion> keyQueue, int numEKVs) throws IOException {
+ checkNotNull(keyName, "keyName");
+ Map<String, String> params = new HashMap<String, String>();
+ params.put(KMSRESTConstants.EEK_OP, KMSRESTConstants.EEK_GENERATE);
+ params.put(KMSRESTConstants.EEK_NUM_KEYS, "" + numEKVs);
+ URL url = createURL(KMSRESTConstants.KEY_RESOURCE, keyName,
+ KMSRESTConstants.EEK_SUB_RESOURCE, params);
+ HttpURLConnection conn = createConnection(url, HTTP_GET);
+ conn.setRequestProperty(CONTENT_TYPE, APPLICATION_JSON_MIME);
+ List response = call(conn, null,
+ HttpURLConnection.HTTP_OK, List.class);
+ List<EncryptedKeyVersion> ekvs =
+ parseJSONEncKeyVersion(keyName, response);
+ keyQueue.addAll(ekvs);
+ }
+ }
+
+ public static class KMSEncryptedKeyVersion extends EncryptedKeyVersion {
+ public KMSEncryptedKeyVersion(String keyName, String keyVersionName,
+ byte[] iv, String encryptedVersionName, byte[] keyMaterial) {
+ super(keyName, keyVersionName, iv, new KMSKeyVersion(null,
+ encryptedVersionName, keyMaterial));
+ }
+ }
+
+ @SuppressWarnings("rawtypes")
+ private static List<EncryptedKeyVersion>
+ parseJSONEncKeyVersion(String keyName, List valueList) {
+ List<EncryptedKeyVersion> ekvs = new LinkedList<EncryptedKeyVersion>();
+ if (!valueList.isEmpty()) {
+ for (Object values : valueList) {
+ Map valueMap = (Map) values;
+
+ String versionName = checkNotNull(
+ (String) valueMap.get(KMSRESTConstants.VERSION_NAME_FIELD),
+ KMSRESTConstants.VERSION_NAME_FIELD);
+
+ byte[] iv = Base64.decodeBase64(checkNotNull(
+ (String) valueMap.get(KMSRESTConstants.IV_FIELD),
+ KMSRESTConstants.IV_FIELD));
+
+ Map encValueMap = checkNotNull((Map)
+ valueMap.get(KMSRESTConstants.ENCRYPTED_KEY_VERSION_FIELD),
+ KMSRESTConstants.ENCRYPTED_KEY_VERSION_FIELD);
+
+ String encVersionName = checkNotNull((String)
+ encValueMap.get(KMSRESTConstants.VERSION_NAME_FIELD),
+ KMSRESTConstants.VERSION_NAME_FIELD);
+
+ byte[] encKeyMaterial = Base64.decodeBase64(checkNotNull((String)
+ encValueMap.get(KMSRESTConstants.MATERIAL_FIELD),
+ KMSRESTConstants.MATERIAL_FIELD));
+
+ ekvs.add(new KMSEncryptedKeyVersion(keyName, versionName, iv,
+ encVersionName, encKeyMaterial));
+ }
+ }
+ return ekvs;
+ }
+
private static KeyVersion parseJSONKeyVersion(Map valueMap) {
KeyVersion keyVersion = null;
if (!valueMap.isEmpty()) {
@@ -208,6 +287,28 @@ public class KMSClientProvider extends K
}
int timeout = conf.getInt(TIMEOUT_ATTR, DEFAULT_TIMEOUT);
configurator = new TimeoutConnConfigurator(timeout, sslFactory);
+ encKeyVersionQueue =
+ new ValueQueue<KeyProviderCryptoExtension.EncryptedKeyVersion>(
+ conf.getInt(
+ CommonConfigurationKeysPublic.KMS_CLIENT_ENC_KEY_CACHE_SIZE,
+ CommonConfigurationKeysPublic.
+ KMS_CLIENT_ENC_KEY_CACHE_SIZE_DEFAULT),
+ conf.getFloat(
+ CommonConfigurationKeysPublic.
+ KMS_CLIENT_ENC_KEY_CACHE_LOW_WATERMARK,
+ CommonConfigurationKeysPublic.
+ KMS_CLIENT_ENC_KEY_CACHE_LOW_WATERMARK_DEFAULT),
+ conf.getInt(
+ CommonConfigurationKeysPublic.
+ KMS_CLIENT_ENC_KEY_CACHE_EXPIRY_MS,
+ CommonConfigurationKeysPublic.
+ KMS_CLIENT_ENC_KEY_CACHE_EXPIRY_DEFAULT),
+ conf.getInt(
+ CommonConfigurationKeysPublic.
+ KMS_CLIENT_ENC_KEY_CACHE_NUM_REFILL_THREADS,
+ CommonConfigurationKeysPublic.
+ KMS_CLIENT_ENC_KEY_CACHE_NUM_REFILL_THREADS_DEFAULT),
+ new EncryptedQueueRefiller());
}
private String createServiceURL(URL url) throws IOException {
@@ -528,6 +629,54 @@ public class KMSClientProvider extends K
}
@Override
+ public EncryptedKeyVersion generateEncryptedKey(
+ String encryptionKeyName) throws IOException, GeneralSecurityException {
+ try {
+ return encKeyVersionQueue.getNext(encryptionKeyName);
+ } catch (ExecutionException e) {
+ if (e.getCause() instanceof SocketTimeoutException) {
+ throw (SocketTimeoutException)e.getCause();
+ }
+ throw new IOException(e);
+ }
+ }
+
+ @SuppressWarnings("rawtypes")
+ @Override
+ public KeyVersion decryptEncryptedKey(
+ EncryptedKeyVersion encryptedKeyVersion) throws IOException,
+ GeneralSecurityException {
+ checkNotNull(encryptedKeyVersion.getEncryptionKeyVersionName(),
+ "versionName");
+ checkNotNull(encryptedKeyVersion.getEncryptedKeyIv(), "iv");
+ Preconditions.checkArgument(
+ encryptedKeyVersion.getEncryptedKeyVersion().getVersionName()
+ .equals(KeyProviderCryptoExtension.EEK),
+ "encryptedKey version name must be '%s', is '%s'",
+ KeyProviderCryptoExtension.EK,
+ encryptedKeyVersion.getEncryptedKeyVersion().getVersionName()
+ );
+ checkNotNull(encryptedKeyVersion.getEncryptedKeyVersion(), "encryptedKey");
+ Map<String, String> params = new HashMap<String, String>();
+ params.put(KMSRESTConstants.EEK_OP, KMSRESTConstants.EEK_DECRYPT);
+ Map<String, Object> jsonPayload = new HashMap<String, Object>();
+ jsonPayload.put(KMSRESTConstants.NAME_FIELD,
+ encryptedKeyVersion.getEncryptionKeyName());
+ jsonPayload.put(KMSRESTConstants.IV_FIELD, Base64.encodeBase64String(
+ encryptedKeyVersion.getEncryptedKeyIv()));
+ jsonPayload.put(KMSRESTConstants.MATERIAL_FIELD, Base64.encodeBase64String(
+ encryptedKeyVersion.getEncryptedKeyVersion().getMaterial()));
+ URL url = createURL(KMSRESTConstants.KEY_VERSION_RESOURCE,
+ encryptedKeyVersion.getEncryptionKeyVersionName(),
+ KMSRESTConstants.EEK_SUB_RESOURCE, params);
+ HttpURLConnection conn = createConnection(url, HTTP_POST);
+ conn.setRequestProperty(CONTENT_TYPE, APPLICATION_JSON_MIME);
+ Map response =
+ call(conn, jsonPayload, HttpURLConnection.HTTP_OK, Map.class);
+ return parseJSONKeyVersion(response);
+ }
+
+ @Override
public List<KeyVersion> getKeyVersions(String name) throws IOException {
checkNotEmpty(name, "name");
URL url = createURL(KMSRESTConstants.KEY_RESOURCE, name,
@@ -570,4 +719,14 @@ public class KMSClientProvider extends K
// the server should not keep in memory state on behalf of clients either.
}
+ @Override
+ public void warmUpEncryptedKeys(String... keyNames)
+ throws IOException {
+ try {
+ encKeyVersionQueue.initializeQueuesForKeys(keyNames);
+ } catch (ExecutionException e) {
+ throw new IOException(e);
+ }
+ }
+
}
Modified: hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSRESTConstants.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSRESTConstants.java?rev=1612742&r1=1612741&r2=1612742&view=diff
==============================================================================
--- hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSRESTConstants.java (original)
+++ hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSRESTConstants.java Wed Jul 23 01:47:28 2014
@@ -34,10 +34,16 @@ public class KMSRESTConstants {
public static final String KEY_VERSION_RESOURCE = "keyversion";
public static final String METADATA_SUB_RESOURCE = "_metadata";
public static final String VERSIONS_SUB_RESOURCE = "_versions";
+ public static final String EEK_SUB_RESOURCE = "_eek";
public static final String CURRENT_VERSION_SUB_RESOURCE = "_currentversion";
public static final String KEY_OP = "key";
+ public static final String EEK_OP = "eek_op";
+ public static final String EEK_GENERATE = "generate";
+ public static final String EEK_DECRYPT = "decrypt";
+ public static final String EEK_NUM_KEYS = "num_keys";
+ public static final String IV_FIELD = "iv";
public static final String NAME_FIELD = "name";
public static final String CIPHER_FIELD = "cipher";
public static final String LENGTH_FIELD = "length";
@@ -47,6 +53,8 @@ public class KMSRESTConstants {
public static final String VERSIONS_FIELD = "versions";
public static final String MATERIAL_FIELD = "material";
public static final String VERSION_NAME_FIELD = "versionName";
+ public static final String ENCRYPTED_KEY_VERSION_FIELD =
+ "encryptedKeyVersion";
public static final String ERROR_EXCEPTION_JSON = "exception";
public static final String ERROR_MESSAGE_JSON = "message";
Modified: hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java?rev=1612742&r1=1612741&r2=1612742&view=diff
==============================================================================
--- hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java (original)
+++ hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java Wed Jul 23 01:47:28 2014
@@ -250,6 +250,12 @@ public class CommonConfigurationKeysPubl
public static final long HADOOP_SECURITY_GROUPS_CACHE_SECS_DEFAULT =
300;
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
+ public static final String HADOOP_SECURITY_GROUPS_NEGATIVE_CACHE_SECS =
+ "hadoop.security.groups.negative-cache.secs";
+ /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
+ public static final long HADOOP_SECURITY_GROUPS_NEGATIVE_CACHE_SECS_DEFAULT =
+ 30;
+ /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String HADOOP_SECURITY_GROUPS_CACHE_WARN_AFTER_MS =
"hadoop.security.groups.cache.warn.after.ms";
public static final long HADOOP_SECURITY_GROUPS_CACHE_WARN_AFTER_MS_DEFAULT =
@@ -285,5 +291,32 @@ public class CommonConfigurationKeysPubl
/** Class to override Impersonation provider */
public static final String HADOOP_SECURITY_IMPERSONATION_PROVIDER_CLASS =
"hadoop.security.impersonation.provider.class";
+
+ // <!--- KMSClientProvider configurations â>
+ /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
+ public static final String KMS_CLIENT_ENC_KEY_CACHE_SIZE =
+ "hadoop.security.kms.client.encrypted.key.cache.size";
+ /** Default value for KMS_CLIENT_ENC_KEY_CACHE_SIZE */
+ public static final int KMS_CLIENT_ENC_KEY_CACHE_SIZE_DEFAULT = 500;
+
+ /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
+ public static final String KMS_CLIENT_ENC_KEY_CACHE_LOW_WATERMARK =
+ "hadoop.security.kms.client.encrypted.key.cache.low-watermark";
+ /** Default value for KMS_CLIENT_ENC_KEY_CACHE_LOW_WATERMARK */
+ public static final float KMS_CLIENT_ENC_KEY_CACHE_LOW_WATERMARK_DEFAULT =
+ 0.3f;
+
+ /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
+ public static final String KMS_CLIENT_ENC_KEY_CACHE_NUM_REFILL_THREADS =
+ "hadoop.security.kms.client.encrypted.key.cache.num.refill.threads";
+ /** Default value for KMS_CLIENT_ENC_KEY_NUM_REFILL_THREADS */
+ public static final int KMS_CLIENT_ENC_KEY_CACHE_NUM_REFILL_THREADS_DEFAULT =
+ 2;
+
+ /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
+ public static final String KMS_CLIENT_ENC_KEY_CACHE_EXPIRY_MS =
+ "hadoop.security.kms.client.encrypted.key.cache.expiry";
+ /** Default value for KMS_CLIENT_ENC_KEY_CACHE_EXPIRY (12 hrs)*/
+ public static final int KMS_CLIENT_ENC_KEY_CACHE_EXPIRY_DEFAULT = 43200000;
}
Modified: hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Stat.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Stat.java?rev=1612742&r1=1612741&r2=1612742&view=diff
==============================================================================
--- hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Stat.java (original)
+++ hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Stat.java Wed Jul 23 01:47:28 2014
@@ -128,6 +128,8 @@ public class Stat extends Shell {
" link " + original);
}
// 6,symbolic link,6,1373584236,1373584236,lrwxrwxrwx,andrew,andrew,`link' -> `target'
+ // OR
+ // 6,symbolic link,6,1373584236,1373584236,lrwxrwxrwx,andrew,andrew,'link' -> 'target'
StringTokenizer tokens = new StringTokenizer(line, ",");
try {
long length = Long.parseLong(tokens.nextToken());
@@ -147,18 +149,17 @@ public class Stat extends Shell {
String group = tokens.nextToken();
String symStr = tokens.nextToken();
// 'notalink'
- // 'link' -> `target'
+ // `link' -> `target' OR 'link' -> 'target'
// '' -> ''
Path symlink = null;
- StringTokenizer symTokens = new StringTokenizer(symStr, "`");
- symTokens.nextToken();
+ String parts[] = symStr.split(" -> ");
try {
- String target = symTokens.nextToken();
- target = target.substring(0, target.length()-1);
+ String target = parts[1];
+ target = target.substring(1, target.length()-1);
if (!target.isEmpty()) {
symlink = new Path(target);
}
- } catch (NoSuchElementException e) {
+ } catch (ArrayIndexOutOfBoundsException e) {
// null if not a symlink
}
// Set stat
Modified: hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java?rev=1612742&r1=1612741&r2=1612742&view=diff
==============================================================================
--- hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java (original)
+++ hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java Wed Jul 23 01:47:28 2014
@@ -50,6 +50,7 @@ import org.apache.hadoop.fs.FSInputStrea
import org.apache.hadoop.fs.FileAlreadyExistsException;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.LocalDirAllocator;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.fs.s3.S3Exception;
@@ -225,6 +226,7 @@ public class NativeS3FileSystem extends
private OutputStream backupStream;
private MessageDigest digest;
private boolean closed;
+ private LocalDirAllocator lDirAlloc;
public NativeS3FsOutputStream(Configuration conf,
NativeFileSystemStore store, String key, Progressable progress,
@@ -246,11 +248,10 @@ public class NativeS3FileSystem extends
}
private File newBackupFile() throws IOException {
- File dir = new File(conf.get("fs.s3.buffer.dir"));
- if (!dir.mkdirs() && !dir.exists()) {
- throw new IOException("Cannot create S3 buffer directory: " + dir);
+ if (lDirAlloc == null) {
+ lDirAlloc = new LocalDirAllocator("fs.s3.buffer.dir");
}
- File result = File.createTempFile("output-", ".tmp", dir);
+ File result = lDirAlloc.createTmpFileForWrite("output-", LocalDirAllocator.SIZE_UNKNOWN, conf);
result.deleteOnExit();
return result;
}
Modified: hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFs.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFs.java?rev=1612742&r1=1612741&r2=1612742&view=diff
==============================================================================
--- hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFs.java (original)
+++ hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFs.java Wed Jul 23 01:47:28 2014
@@ -37,6 +37,8 @@ import org.apache.hadoop.fs.FsStatus;
import org.apache.hadoop.fs.Options.ChecksumOpt;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.UnresolvedLinkException;
+import org.apache.hadoop.fs.permission.AclEntry;
+import org.apache.hadoop.fs.permission.AclStatus;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.util.Progressable;
@@ -280,6 +282,38 @@ class ChRootedFs extends AbstractFileSys
}
@Override
+ public void modifyAclEntries(Path path, List<AclEntry> aclSpec)
+ throws IOException {
+ myFs.modifyAclEntries(fullPath(path), aclSpec);
+ }
+
+ @Override
+ public void removeAclEntries(Path path, List<AclEntry> aclSpec)
+ throws IOException {
+ myFs.removeAclEntries(fullPath(path), aclSpec);
+ }
+
+ @Override
+ public void removeDefaultAcl(Path path) throws IOException {
+ myFs.removeDefaultAcl(fullPath(path));
+ }
+
+ @Override
+ public void removeAcl(Path path) throws IOException {
+ myFs.removeAcl(fullPath(path));
+ }
+
+ @Override
+ public void setAcl(Path path, List<AclEntry> aclSpec) throws IOException {
+ myFs.setAcl(fullPath(path), aclSpec);
+ }
+
+ @Override
+ public AclStatus getAclStatus(Path path) throws IOException {
+ return myFs.getAclStatus(fullPath(path));
+ }
+
+ @Override
public void setVerifyChecksum(final boolean verifyChecksum)
throws IOException, UnresolvedLinkException {
myFs.setVerifyChecksum(verifyChecksum);
Modified: hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java?rev=1612742&r1=1612741&r2=1612742&view=diff
==============================================================================
--- hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java (original)
+++ hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java Wed Jul 23 01:47:28 2014
@@ -50,6 +50,7 @@ import org.apache.hadoop.fs.UnsupportedF
import org.apache.hadoop.fs.XAttrSetFlag;
import org.apache.hadoop.fs.permission.AclEntry;
import org.apache.hadoop.fs.permission.AclStatus;
+import org.apache.hadoop.fs.permission.AclUtil;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.fs.viewfs.InodeTree.INode;
import org.apache.hadoop.fs.viewfs.InodeTree.INodeLink;
@@ -871,5 +872,46 @@ public class ViewFileSystem extends File
public short getDefaultReplication(Path f) {
throw new NotInMountpointException(f, "getDefaultReplication");
}
+
+ @Override
+ public void modifyAclEntries(Path path, List<AclEntry> aclSpec)
+ throws IOException {
+ checkPathIsSlash(path);
+ throw readOnlyMountTable("modifyAclEntries", path);
+ }
+
+ @Override
+ public void removeAclEntries(Path path, List<AclEntry> aclSpec)
+ throws IOException {
+ checkPathIsSlash(path);
+ throw readOnlyMountTable("removeAclEntries", path);
+ }
+
+ @Override
+ public void removeDefaultAcl(Path path) throws IOException {
+ checkPathIsSlash(path);
+ throw readOnlyMountTable("removeDefaultAcl", path);
+ }
+
+ @Override
+ public void removeAcl(Path path) throws IOException {
+ checkPathIsSlash(path);
+ throw readOnlyMountTable("removeAcl", path);
+ }
+
+ @Override
+ public void setAcl(Path path, List<AclEntry> aclSpec) throws IOException {
+ checkPathIsSlash(path);
+ throw readOnlyMountTable("setAcl", path);
+ }
+
+ @Override
+ public AclStatus getAclStatus(Path path) throws IOException {
+ checkPathIsSlash(path);
+ return new AclStatus.Builder().owner(ugi.getUserName())
+ .group(ugi.getGroupNames()[0])
+ .addEntries(AclUtil.getMinimalAcl(PERMISSION_555))
+ .stickyBit(false).build();
+ }
}
}
Modified: hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java?rev=1612742&r1=1612741&r2=1612742&view=diff
==============================================================================
--- hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java (original)
+++ hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java Wed Jul 23 01:47:28 2014
@@ -49,6 +49,9 @@ import org.apache.hadoop.fs.RemoteIterat
import org.apache.hadoop.fs.UnresolvedLinkException;
import org.apache.hadoop.fs.UnsupportedFileSystemException;
import org.apache.hadoop.fs.local.LocalConfigKeys;
+import org.apache.hadoop.fs.permission.AclEntry;
+import org.apache.hadoop.fs.permission.AclUtil;
+import org.apache.hadoop.fs.permission.AclStatus;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.fs.viewfs.InodeTree.INode;
import org.apache.hadoop.fs.viewfs.InodeTree.INodeLink;
@@ -603,6 +606,51 @@ public class ViewFs extends AbstractFile
return true;
}
+ @Override
+ public void modifyAclEntries(Path path, List<AclEntry> aclSpec)
+ throws IOException {
+ InodeTree.ResolveResult<AbstractFileSystem> res =
+ fsState.resolve(getUriPath(path), true);
+ res.targetFileSystem.modifyAclEntries(res.remainingPath, aclSpec);
+ }
+
+ @Override
+ public void removeAclEntries(Path path, List<AclEntry> aclSpec)
+ throws IOException {
+ InodeTree.ResolveResult<AbstractFileSystem> res =
+ fsState.resolve(getUriPath(path), true);
+ res.targetFileSystem.removeAclEntries(res.remainingPath, aclSpec);
+ }
+
+ @Override
+ public void removeDefaultAcl(Path path)
+ throws IOException {
+ InodeTree.ResolveResult<AbstractFileSystem> res =
+ fsState.resolve(getUriPath(path), true);
+ res.targetFileSystem.removeDefaultAcl(res.remainingPath);
+ }
+
+ @Override
+ public void removeAcl(Path path)
+ throws IOException {
+ InodeTree.ResolveResult<AbstractFileSystem> res =
+ fsState.resolve(getUriPath(path), true);
+ res.targetFileSystem.removeAcl(res.remainingPath);
+ }
+
+ @Override
+ public void setAcl(Path path, List<AclEntry> aclSpec) throws IOException {
+ InodeTree.ResolveResult<AbstractFileSystem> res =
+ fsState.resolve(getUriPath(path), true);
+ res.targetFileSystem.setAcl(res.remainingPath, aclSpec);
+ }
+
+ @Override
+ public AclStatus getAclStatus(Path path) throws IOException {
+ InodeTree.ResolveResult<AbstractFileSystem> res =
+ fsState.resolve(getUriPath(path), true);
+ return res.targetFileSystem.getAclStatus(res.remainingPath);
+ }
/*
@@ -832,5 +880,46 @@ public class ViewFs extends AbstractFile
throws AccessControlException {
throw readOnlyMountTable("setVerifyChecksum", "");
}
+
+ @Override
+ public void modifyAclEntries(Path path, List<AclEntry> aclSpec)
+ throws IOException {
+ checkPathIsSlash(path);
+ throw readOnlyMountTable("modifyAclEntries", path);
+ }
+
+ @Override
+ public void removeAclEntries(Path path, List<AclEntry> aclSpec)
+ throws IOException {
+ checkPathIsSlash(path);
+ throw readOnlyMountTable("removeAclEntries", path);
+ }
+
+ @Override
+ public void removeDefaultAcl(Path path) throws IOException {
+ checkPathIsSlash(path);
+ throw readOnlyMountTable("removeDefaultAcl", path);
+ }
+
+ @Override
+ public void removeAcl(Path path) throws IOException {
+ checkPathIsSlash(path);
+ throw readOnlyMountTable("removeAcl", path);
+ }
+
+ @Override
+ public void setAcl(Path path, List<AclEntry> aclSpec) throws IOException {
+ checkPathIsSlash(path);
+ throw readOnlyMountTable("setAcl", path);
+ }
+
+ @Override
+ public AclStatus getAclStatus(Path path) throws IOException {
+ checkPathIsSlash(path);
+ return new AclStatus.Builder().owner(ugi.getUserName())
+ .group(ugi.getGroupNames()[0])
+ .addEntries(AclUtil.getMinimalAcl(PERMISSION_555))
+ .stickyBit(false).build();
+ }
}
}
Modified: hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java?rev=1612742&r1=1612741&r2=1612742&view=diff
==============================================================================
--- hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java (original)
+++ hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java Wed Jul 23 01:47:28 2014
@@ -292,9 +292,7 @@ public class Text extends BinaryComparab
@Override
public void readFields(DataInput in) throws IOException {
int newLength = WritableUtils.readVInt(in);
- setCapacity(newLength, false);
- in.readFully(bytes, 0, newLength);
- length = newLength;
+ readWithKnownLength(in, newLength);
}
public void readFields(DataInput in, int maxLength) throws IOException {
@@ -306,9 +304,7 @@ public class Text extends BinaryComparab
throw new IOException("tried to deserialize " + newLength +
" bytes of data, but maxLength = " + maxLength);
}
- setCapacity(newLength, false);
- in.readFully(bytes, 0, newLength);
- length = newLength;
+ readWithKnownLength(in, newLength);
}
/** Skips over one Text in the input. */
@@ -317,6 +313,17 @@ public class Text extends BinaryComparab
WritableUtils.skipFully(in, length);
}
+ /**
+ * Read a Text object whose length is already known.
+ * This allows creating Text from a stream which uses a different serialization
+ * format.
+ */
+ public void readWithKnownLength(DataInput in, int len) throws IOException {
+ setCapacity(len, false);
+ in.readFully(bytes, 0, len);
+ length = len;
+ }
+
/** serialize
* write this object to out
* length uses zero-compressed encoding
Modified: hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BZip2Codec.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BZip2Codec.java?rev=1612742&r1=1612741&r2=1612742&view=diff
==============================================================================
--- hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BZip2Codec.java (original)
+++ hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BZip2Codec.java Wed Jul 23 01:47:28 2014
@@ -100,7 +100,8 @@ public class BZip2Codec implements Confi
@Override
public CompressionOutputStream createOutputStream(OutputStream out)
throws IOException {
- return createOutputStream(out, createCompressor());
+ return CompressionCodec.Util.
+ createOutputStreamWithCodecPool(this, conf, out);
}
/**
@@ -153,7 +154,8 @@ public class BZip2Codec implements Confi
@Override
public CompressionInputStream createInputStream(InputStream in)
throws IOException {
- return createInputStream(in, createDecompressor());
+ return CompressionCodec.Util.
+ createInputStreamWithCodecPool(this, conf, in);
}
/**
Modified: hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodec.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodec.java?rev=1612742&r1=1612741&r2=1612742&view=diff
==============================================================================
--- hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodec.java (original)
+++ hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodec.java Wed Jul 23 01:47:28 2014
@@ -24,6 +24,7 @@ import java.io.OutputStream;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.conf.Configuration;
/**
* This class encapsulates a streaming compression/decompression pair.
@@ -113,4 +114,58 @@ public interface CompressionCodec {
* @return the extension including the '.'
*/
String getDefaultExtension();
+
+ static class Util {
+ /**
+ * Create an output stream with a codec taken from the global CodecPool.
+ *
+ * @param codec The codec to use to create the output stream.
+ * @param conf The configuration to use if we need to create a new codec.
+ * @param out The output stream to wrap.
+ * @return The new output stream
+ * @throws IOException
+ */
+ static CompressionOutputStream createOutputStreamWithCodecPool(
+ CompressionCodec codec, Configuration conf, OutputStream out)
+ throws IOException {
+ Compressor compressor = CodecPool.getCompressor(codec, conf);
+ CompressionOutputStream stream = null;
+ try {
+ stream = codec.createOutputStream(out, compressor);
+ } finally {
+ if (stream == null) {
+ CodecPool.returnCompressor(compressor);
+ } else {
+ stream.setTrackedCompressor(compressor);
+ }
+ }
+ return stream;
+ }
+
+ /**
+ * Create an input stream with a codec taken from the global CodecPool.
+ *
+ * @param codec The codec to use to create the input stream.
+ * @param conf The configuration to use if we need to create a new codec.
+ * @param in The input stream to wrap.
+ * @return The new input stream
+ * @throws IOException
+ */
+ static CompressionInputStream createInputStreamWithCodecPool(
+ CompressionCodec codec, Configuration conf, InputStream in)
+ throws IOException {
+ Decompressor decompressor = CodecPool.getDecompressor(codec);
+ CompressionInputStream stream = null;
+ try {
+ stream = codec.createInputStream(in, decompressor);
+ } finally {
+ if (stream == null) {
+ CodecPool.returnDecompressor(decompressor);
+ } else {
+ stream.setTrackedDecompressor(decompressor);
+ }
+ }
+ return stream;
+ }
+ }
}
Modified: hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionInputStream.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionInputStream.java?rev=1612742&r1=1612741&r2=1612742&view=diff
==============================================================================
--- hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionInputStream.java (original)
+++ hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionInputStream.java Wed Jul 23 01:47:28 2014
@@ -41,6 +41,8 @@ public abstract class CompressionInputSt
protected final InputStream in;
protected long maxAvailableData = 0L;
+ private Decompressor trackedDecompressor;
+
/**
* Create a compression input stream that reads
* the decompressed bytes from the given stream.
@@ -58,6 +60,10 @@ public abstract class CompressionInputSt
@Override
public void close() throws IOException {
in.close();
+ if (trackedDecompressor != null) {
+ CodecPool.returnDecompressor(trackedDecompressor);
+ trackedDecompressor = null;
+ }
}
/**
@@ -112,4 +118,8 @@ public abstract class CompressionInputSt
public boolean seekToNewSource(long targetPos) throws UnsupportedOperationException {
throw new UnsupportedOperationException();
}
+
+ void setTrackedDecompressor(Decompressor decompressor) {
+ trackedDecompressor = decompressor;
+ }
}
Modified: hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionOutputStream.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionOutputStream.java?rev=1612742&r1=1612741&r2=1612742&view=diff
==============================================================================
--- hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionOutputStream.java (original)
+++ hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionOutputStream.java Wed Jul 23 01:47:28 2014
@@ -34,7 +34,13 @@ public abstract class CompressionOutputS
* The output stream to be compressed.
*/
protected final OutputStream out;
-
+
+ /**
+ * If non-null, this is the Compressor object that we should call
+ * CodecPool#returnCompressor on when this stream is closed.
+ */
+ private Compressor trackedCompressor;
+
/**
* Create a compression output stream that writes
* the compressed bytes to the given stream.
@@ -43,11 +49,19 @@ public abstract class CompressionOutputS
protected CompressionOutputStream(OutputStream out) {
this.out = out;
}
-
+
+ void setTrackedCompressor(Compressor compressor) {
+ trackedCompressor = compressor;
+ }
+
@Override
public void close() throws IOException {
finish();
out.close();
+ if (trackedCompressor != null) {
+ CodecPool.returnCompressor(trackedCompressor);
+ trackedCompressor = null;
+ }
}
@Override
Modified: hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DefaultCodec.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DefaultCodec.java?rev=1612742&r1=1612741&r2=1612742&view=diff
==============================================================================
--- hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DefaultCodec.java (original)
+++ hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DefaultCodec.java Wed Jul 23 01:47:28 2014
@@ -51,14 +51,8 @@ public class DefaultCodec implements Con
@Override
public CompressionOutputStream createOutputStream(OutputStream out)
throws IOException {
- // This may leak memory if called in a loop. The createCompressor() call
- // may cause allocation of an untracked direct-backed buffer if native
- // libs are being used (even if you close the stream). A Compressor
- // object should be reused between successive calls.
- LOG.warn("DefaultCodec.createOutputStream() may leak memory. "
- + "Create a compressor first.");
- return new CompressorStream(out, createCompressor(),
- conf.getInt("io.file.buffer.size", 4*1024));
+ return CompressionCodec.Util.
+ createOutputStreamWithCodecPool(this, conf, out);
}
@Override
@@ -82,8 +76,8 @@ public class DefaultCodec implements Con
@Override
public CompressionInputStream createInputStream(InputStream in)
throws IOException {
- return new DecompressorStream(in, createDecompressor(),
- conf.getInt("io.file.buffer.size", 4*1024));
+ return CompressionCodec.Util.
+ createInputStreamWithCodecPool(this, conf, in);
}
@Override
Modified: hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/GzipCodec.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/GzipCodec.java?rev=1612742&r1=1612741&r2=1612742&view=diff
==============================================================================
--- hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/GzipCodec.java (original)
+++ hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/GzipCodec.java Wed Jul 23 01:47:28 2014
@@ -159,10 +159,11 @@ public class GzipCodec extends DefaultCo
@Override
public CompressionOutputStream createOutputStream(OutputStream out)
throws IOException {
- return (ZlibFactory.isNativeZlibLoaded(conf)) ?
- new CompressorStream(out, createCompressor(),
- conf.getInt("io.file.buffer.size", 4*1024)) :
- new GzipOutputStream(out);
+ if (!ZlibFactory.isNativeZlibLoaded(conf)) {
+ return new GzipOutputStream(out);
+ }
+ return CompressionCodec.Util.
+ createOutputStreamWithCodecPool(this, conf, out);
}
@Override
@@ -192,8 +193,9 @@ public class GzipCodec extends DefaultCo
@Override
public CompressionInputStream createInputStream(InputStream in)
- throws IOException {
- return createInputStream(in, null);
+ throws IOException {
+ return CompressionCodec.Util.
+ createInputStreamWithCodecPool(this, conf, in);
}
@Override
Modified: hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Lz4Codec.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Lz4Codec.java?rev=1612742&r1=1612741&r2=1612742&view=diff
==============================================================================
--- hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Lz4Codec.java (original)
+++ hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Lz4Codec.java Wed Jul 23 01:47:28 2014
@@ -84,7 +84,8 @@ public class Lz4Codec implements Configu
@Override
public CompressionOutputStream createOutputStream(OutputStream out)
throws IOException {
- return createOutputStream(out, createCompressor());
+ return CompressionCodec.Util.
+ createOutputStreamWithCodecPool(this, conf, out);
}
/**
@@ -157,7 +158,8 @@ public class Lz4Codec implements Configu
@Override
public CompressionInputStream createInputStream(InputStream in)
throws IOException {
- return createInputStream(in, createDecompressor());
+ return CompressionCodec.Util.
+ createInputStreamWithCodecPool(this, conf, in);
}
/**
Modified: hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/SnappyCodec.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/SnappyCodec.java?rev=1612742&r1=1612741&r2=1612742&view=diff
==============================================================================
--- hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/SnappyCodec.java (original)
+++ hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/SnappyCodec.java Wed Jul 23 01:47:28 2014
@@ -95,7 +95,8 @@ public class SnappyCodec implements Conf
@Override
public CompressionOutputStream createOutputStream(OutputStream out)
throws IOException {
- return createOutputStream(out, createCompressor());
+ return CompressionCodec.Util.
+ createOutputStreamWithCodecPool(this, conf, out);
}
/**
@@ -158,7 +159,8 @@ public class SnappyCodec implements Conf
@Override
public CompressionInputStream createInputStream(InputStream in)
throws IOException {
- return createInputStream(in, createDecompressor());
+ return CompressionCodec.Util.
+ createInputStreamWithCodecPool(this, conf, in);
}
/**
Modified: hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java?rev=1612742&r1=1612741&r2=1612742&view=diff
==============================================================================
--- hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java (original)
+++ hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java Wed Jul 23 01:47:28 2014
@@ -599,24 +599,35 @@ public class ProtobufRpcEngine implement
.mergeFrom(request.theRequestRead).build();
Message result;
+ long startTime = Time.now();
+ int qTime = (int) (startTime - receiveTime);
+ Exception exception = null;
try {
- long startTime = Time.now();
server.rpcDetailedMetrics.init(protocolImpl.protocolClass);
result = service.callBlockingMethod(methodDescriptor, null, param);
+ } catch (ServiceException e) {
+ exception = (Exception) e.getCause();
+ throw (Exception) e.getCause();
+ } catch (Exception e) {
+ exception = e;
+ throw e;
+ } finally {
int processingTime = (int) (Time.now() - startTime);
- int qTime = (int) (startTime - receiveTime);
if (LOG.isDebugEnabled()) {
- LOG.info("Served: " + methodName + " queueTime= " + qTime +
- " procesingTime= " + processingTime);
+ String msg = "Served: " + methodName + " queueTime= " + qTime +
+ " procesingTime= " + processingTime;
+ if (exception != null) {
+ msg += " exception= " + exception.getClass().getSimpleName();
+ }
+ LOG.debug(msg);
}
+ String detailedMetricsName = (exception == null) ?
+ methodName :
+ exception.getClass().getSimpleName();
server.rpcMetrics.addRpcQueueTime(qTime);
server.rpcMetrics.addRpcProcessingTime(processingTime);
- server.rpcDetailedMetrics.addProcessingTime(methodName,
+ server.rpcDetailedMetrics.addProcessingTime(detailedMetricsName,
processingTime);
- } catch (ServiceException e) {
- throw (Exception) e.getCause();
- } catch (Exception e) {
- throw e;
}
return new RpcResponseWrapper(result);
}
Modified: hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java?rev=1612742&r1=1612741&r2=1612742&view=diff
==============================================================================
--- hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java (original)
+++ hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java Wed Jul 23 01:47:28 2014
@@ -355,8 +355,8 @@ public abstract class Server {
private int readThreads; // number of read threads
private int readerPendingConnectionQueue; // number of connections to queue per read thread
private Class<? extends Writable> rpcRequestClass; // class used for deserializing the rpc request
- protected RpcMetrics rpcMetrics;
- protected RpcDetailedMetrics rpcDetailedMetrics;
+ final protected RpcMetrics rpcMetrics;
+ final protected RpcDetailedMetrics rpcDetailedMetrics;
private Configuration conf;
private String portRangeConfig = null;
@@ -2494,12 +2494,8 @@ public abstract class Server {
listener.doStop();
responder.interrupt();
notifyAll();
- if (this.rpcMetrics != null) {
- this.rpcMetrics.shutdown();
- }
- if (this.rpcDetailedMetrics != null) {
- this.rpcDetailedMetrics.shutdown();
- }
+ this.rpcMetrics.shutdown();
+ this.rpcDetailedMetrics.shutdown();
}
/** Wait for the server to be stopped.