You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ac...@apache.org on 2014/11/09 14:24:43 UTC

[1/2] hadoop git commit: HADOOP-10563. Fix bad merge which messed up hadoop-common/CHANGES.txt.

Repository: hadoop
Updated Branches:
  refs/heads/branch-2 98b81c662 -> 1f5bad04c


HADOOP-10563. Fix bad merge which messed up hadoop-common/CHANGES.txt.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/242404b0
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/242404b0
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/242404b0

Branch: refs/heads/branch-2
Commit: 242404b025dc5bac801b6aa318df7b8ab70f451c
Parents: 98b81c6
Author: Arun C. Murthy <ac...@apache.org>
Authored: Sun Nov 9 05:23:05 2014 -0800
Committer: Arun C. Murthy <ac...@apache.org>
Committed: Sun Nov 9 05:23:05 2014 -0800

----------------------------------------------------------------------
 hadoop-common-project/hadoop-common/CHANGES.txt | 187 +++++++++++++++++++
 1 file changed, 187 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/242404b0/hadoop-common-project/hadoop-common/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt
index 3e8720a..fa7f915 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -194,6 +194,193 @@ Release 2.6.0 - UNRELEASED
     HADOOP-10534. KeyProvider getKeysMetadata should take a list of names
     rather than returning all keys. (omalley)
 
+    HADOOP-10719. Add generateEncryptedKey and decryptEncryptedKey
+    methods to KeyProvider. (asuresh via tucu)
+
+    HADOOP-10817. ProxyUsers configuration should support configurable
+    prefixes. (tucu)
+
+    HADOOP-10881. Clarify usage of encryption and encrypted encryption
+    key in KeyProviderCryptoExtension. (wang)
+
+    HADOOP-10770. KMS add delegation token support. (tucu)
+
+    HADOOP-10698. KMS, add proxyuser support. (tucu)
+
+    HADOOP-8896. Javadoc points to Wrong Reader and Writer classes 
+    in SequenceFile (Ray Chiang via aw)
+
+    HADOOP-10998. Fix bash tab completion code to work (Jim Hester via aw)
+
+    HADOOP-10880. Move HTTP delegation tokens out of URL querystring to 
+    a header. (tucu)
+
+    HADOOP-11005. Fix HTTP content type for ReconfigurationServlet.
+    (Lei Xu via wang)
+
+    HADOOP-10814. Update Tomcat version used by HttpFS and KMS to latest
+    6.x version. (rkanter via tucu)
+
+    HADOOP-10994. KeyProviderCryptoExtension should use CryptoCodec for 
+    generation/decryption of keys. (tucu)
+
+    HADOOP-11021. Configurable replication factor in the hadoop archive
+    command. (Zhe Zhang via wang)
+
+    HADOOP-11030. Define a variable jackson.version instead of using constant 
+    at multiple places. (Juan Yu via kasha)
+
+    HADOOP-10990. Add missed NFSv3 request and response classes (brandonli)
+
+    HADOOP-10863. KMS should have a blacklist for decrypting EEKs. 
+    (asuresh via tucu)
+
+    HADOOP-11054. Add a KeyProvider instantiation based on a URI. (tucu)
+
+    HADOOP-11015. Http server/client utils to propagate and recreate 
+    Exceptions from server to client. (tucu)
+
+    HADOOP-11060. Create a CryptoCodec test that verifies interoperability 
+    between the JCE and OpenSSL implementations. (hitliuyi via tucu)
+
+    HADOOP-11070. Create MiniKMS for testing. (tucu)
+
+    HADOOP-11057. checknative command to probe for winutils.exe on windows.
+    (Xiaoyu Yao via cnauroth)
+
+    HADOOP-10758. KMS: add ACLs on per key basis. (tucu)
+
+    HADOOP-9540. Expose the InMemoryS3 and S3N FilesystemStores implementations
+    for Unit testing. (Steve Lougran)
+
+    HADOOP-10373 create tools/hadoop-amazon for aws/EMR support (stevel)
+
+    HADOOP-11074. Move s3-related FS connector code to hadoop-aws (David S.
+    Wang via Colin Patrick McCabe)
+
+    HADOOP-11091. Eliminate old configuration parameter names from s3a (David
+    S. Wang via Colin Patrick McCabe)
+
+    HADOOP-10868. AuthenticationFilter should support externalizing the 
+    secret for signing and provide rotation support. (rkanter via tucu)
+
+    HADOOP-10922. User documentation for CredentialShell. (Larry McCay via wang)
+
+    HADOOP-11016. KMS should support signing cookies with zookeeper secret
+    manager. (tucu)
+
+    HADOOP-11106. Document considerations of HAR and Encryption. (clamb via wang)
+
+    HADOOP-10970. Cleanup KMS configuration keys. (wang)
+
+    HADOOP-11017. KMS delegation token secret manager should be able to use 
+    zookeeper as store. (asuresh via tucu)
+
+    HADOOP-11009. Add Timestamp Preservation to DistCp (Gary Steelman via aw)
+
+    HADOOP-11101. How about inputstream close statement from catch block to
+    finally block in FileContext#copy() ( skrho via vinayakumarb )
+
+    HADOOP-8808. Update FsShell documentation to mention deprecation of some 
+    of the commands, and mention alternatives (Akira AJISAKA via aw)
+
+    HADOOP-10954. Adding site documents of hadoop-tools (Masatake Iwasaki 
+    via aw)
+
+    HADOOP-10731. Remove @date JavaDoc comment in ProgramDriver class (Henry 
+    Saputra via aw)
+
+    HADOOP-11153. Make number of KMS threads configurable. (wang)
+
+    HADOOP-11007. Reinstate building of ant tasks support. (jlowe via kihwal)
+
+    HADOOP-11178. Fix findbugs exclude file. (Arun Suresh via wang)
+
+    HADOOP-11174. Delegation token for KMS should only be got once if it
+    already exists. (Yi Liu via wang)
+
+    HADOOP-11184. Update Hadoop's lz4 to version r123. (cmccabe)
+
+    HADOOP-11181. Generalized o.a.h.s.t.d.DelegationTokenManager to handle all
+    sub-classes of AbstractDelegationTokenIdentifier. (zjshen)
+
+    HADOOP-11207. Enhanced common DelegationTokenAuthenticationHandler to support
+    proxy-users on Delegation-token management operations. (Zhijie Shen via
+    vinodkv)
+
+    HADOOP-11216. Improve Openssl library finding. (cmccabe via yliu)
+
+    HADOOP-11254. Changed visibility of AccessControlList to be public for
+    consumption by ecosystem. (Zhijie Shen via vinodkv)
+
+  OPTIMIZATIONS
+
+    HADOOP-10838. Byte array native checksumming. (James Thomas via todd)
+
+    HADOOP-10696. Add optional attributes to KeyProvider Options and Metadata.
+    (tucu)
+
+    HADOOP-10695. KMSClientProvider should respect a configurable timeout.
+    (yoderme via tucu)
+
+    HADOOP-10757. KeyProvider KeyVersion should provide the key name.
+    (asuresh via tucu)
+
+    HADOOP-10769. Create KeyProvider extension to handle delegation tokens.
+    (Arun Suresh via atm)
+
+    HADOOP-10812. Delegate KeyProviderExtension#toString to underlying
+    KeyProvider. (wang)
+
+    HADOOP-10736. Add key attributes to the key shell. (Mike Yoder via wang)
+
+    HADOOP-10824. Refactor KMSACLs to avoid locking. (Benoy Antony via umamahesh)
+
+    HADOOP-10841. EncryptedKeyVersion should have a key name property.
+    (asuresh via tucu)
+
+    HADOOP-10842. CryptoExtension generateEncryptedKey method should
+    receive the key name. (asuresh via tucu)
+
+    HADOOP-10750. KMSKeyProviderCache should be in hadoop-common.
+    (asuresh via tucu)
+
+    HADOOP-10720. KMS: Implement generateEncryptedKey and decryptEncryptedKey
+    in the REST API. (asuresh via tucu)
+
+    HADOOP-10891. Add EncryptedKeyVersion factory method to
+    KeyProviderCryptoExtension. (wang)
+
+    HADOOP-10756. KMS audit log should consolidate successful similar requests.
+    (asuresh via tucu)
+
+    HADOOP-10793. KeyShell args should use single-dash style. (wang)
+
+    HADOOP-10936. Change default KeyProvider bitlength to 128. (wang)
+
+    HADOOP-10224. JavaKeyStoreProvider has to protect against corrupting
+    underlying store. (asuresh via tucu)
+
+    HADOOP-10282. Create a FairCallQueue: a multi-level call queue which
+    schedules incoming calls and multiplexes outgoing calls. (Chris Li via
+    Arpit Agarwal)
+
+    HADOOP-10833. Remove unused cache in UserProvider. (Benoy Antony)
+
+    HADOOP-11112. TestKMSWithZK does not use KEY_PROVIDER_URI. (tucu via wang)
+
+    HADOOP-11111 MiniKDC to use locale EN_US for case conversions. (stevel)
+
+    HADOOP-10681. Remove unnecessary synchronization from Snappy & Zlib
+    codecs. (Gopal Vijayaraghavan via acmurthy)
+
+    HADOOP-11194. Ignore .keep files. (kasha)
+
+    HADOOP-11195. Move Id-Name mapping in NFS to the hadoop-common area for
+    better maintenance (Yongjun Zhang via brandonli)
+
+    HADOOP-11247. Fix a couple javac warnings in NFS. (Brandon Li via wheat9)
+
   BUG FIXES
 
     HADOOP-11182. GraphiteSink emits wrong timestamps (Sascha Coenen via raviprak)


[2/2] hadoop git commit: HADOOP-11286. Copied LimitInputStream from guava-0.14 to hadoop to avoid issues with newer versions of guava in applications. Contributed by Christopher Tubbs.

Posted by ac...@apache.org.
HADOOP-11286. Copied LimitInputStream from guava-0.14 to hadoop to avoid issues with newer versions of guava in applications. Contributed by Christopher Tubbs.

(cherry picked from commit 6caa8100d5d2547e34356dc279fd5e65b81a925a)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/1f5bad04
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/1f5bad04
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/1f5bad04

Branch: refs/heads/branch-2
Commit: 1f5bad04ce16cfe5cffe1ef171c13121e7734f5f
Parents: 242404b
Author: Arun C. Murthy <ac...@apache.org>
Authored: Sat Nov 8 15:39:56 2014 -0800
Committer: Arun C. Murthy <ac...@apache.org>
Committed: Sun Nov 9 05:23:10 2014 -0800

----------------------------------------------------------------------
 hadoop-common-project/hadoop-common/CHANGES.txt |   4 +
 .../apache/hadoop/util/LimitInputStream.java    | 109 +++++++++++++++++++
 .../server/namenode/FSImageFormatProtobuf.java  |   2 +-
 .../tools/offlineImageViewer/FSImageLoader.java |   2 +-
 .../FileDistributionCalculator.java             |   2 +-
 .../offlineImageViewer/PBImageXmlWriter.java    |   2 +-
 .../apache/hadoop/mapreduce/CryptoUtils.java    |   3 +-
 7 files changed, 118 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/1f5bad04/hadoop-common-project/hadoop-common/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt
index fa7f915..acd6b60 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -313,6 +313,10 @@ Release 2.6.0 - UNRELEASED
     HADOOP-11254. Changed visibility of AccessControlList to be public for
     consumption by ecosystem. (Zhijie Shen via vinodkv)
 
+    HADOOP-11286. Copied LimitInputStream from guava-0.14 to hadoop to avoid
+    issues with newer versions of guava in applications. (Christopher Tubbs
+    via acmurthy)
+
   OPTIMIZATIONS
 
     HADOOP-10838. Byte array native checksumming. (James Thomas via todd)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1f5bad04/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LimitInputStream.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LimitInputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LimitInputStream.java
new file mode 100644
index 0000000..c94a517
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LimitInputStream.java
@@ -0,0 +1,109 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.util;
+
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import java.io.FilterInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+
+import org.apache.hadoop.classification.InterfaceStability.Unstable;
+
+/**
+ * Copied from guava source code v15 (LimitedInputStream)
+ * Guava deprecated LimitInputStream in v14 and removed it in v15. Copying this class here
+ * allows to be compatible with guava 11 to 15+.
+ * 
+ * Originally: org.apache.hadoop.hbase.io.LimitInputStream
+ */
+@Unstable
+public final class LimitInputStream extends FilterInputStream {
+  private long left;
+  private long mark = -1;
+
+  public LimitInputStream(InputStream in, long limit) {
+    super(in);
+    checkNotNull(in);
+    checkArgument(limit >= 0, "limit must be non-negative");
+    left = limit;
+  }
+
+  @Override
+  public int available() throws IOException {
+    return (int) Math.min(in.available(), left);
+  }
+
+  // it's okay to mark even if mark isn't supported, as reset won't work
+  @Override
+  public synchronized void mark(int readLimit) {
+    in.mark(readLimit);
+    mark = left;
+  }
+
+  @Override
+  public int read() throws IOException {
+    if (left == 0) {
+      return -1;
+    }
+
+    int result = in.read();
+    if (result != -1) {
+      --left;
+    }
+    return result;
+  }
+
+  @Override
+  public int read(byte[] b, int off, int len) throws IOException {
+    if (left == 0) {
+      return -1;
+    }
+
+    len = (int) Math.min(len, left);
+    int result = in.read(b, off, len);
+    if (result != -1) {
+      left -= result;
+    }
+    return result;
+  }
+
+  @Override
+  public synchronized void reset() throws IOException {
+    if (!in.markSupported()) {
+      throw new IOException("Mark not supported");
+    }
+    if (mark == -1) {
+      throw new IOException("Mark not set");
+    }
+
+    in.reset();
+    left = mark;
+  }
+
+  @Override
+  public long skip(long n) throws IOException {
+    n = Math.min(n, left);
+    long skipped = in.skip(n);
+    left -= skipped;
+    return skipped;
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1f5bad04/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatProtobuf.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatProtobuf.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatProtobuf.java
index e0d5a5f..4387cff 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatProtobuf.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatProtobuf.java
@@ -62,11 +62,11 @@ import org.apache.hadoop.hdfs.util.MD5FileUtils;
 import org.apache.hadoop.io.MD5Hash;
 import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.io.compress.CompressorStream;
+import org.apache.hadoop.util.LimitInputStream;
 import org.apache.hadoop.util.Time;
 
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
-import com.google.common.io.LimitInputStream;
 import com.google.protobuf.CodedOutputStream;
 
 /**

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1f5bad04/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageLoader.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageLoader.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageLoader.java
index b68d842..ff665e7 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageLoader.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageLoader.java
@@ -47,12 +47,12 @@ import org.apache.hadoop.hdfs.server.namenode.FSImageUtil;
 import org.apache.hadoop.hdfs.server.namenode.FsImageProto;
 import org.apache.hadoop.hdfs.server.namenode.INodeId;
 import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.util.LimitInputStream;
 import org.codehaus.jackson.map.ObjectMapper;
 
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
-import com.google.common.io.LimitInputStream;
 
 /**
  * FSImageLoader loads fsimage and provide methods to return JSON formatted

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1f5bad04/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FileDistributionCalculator.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FileDistributionCalculator.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FileDistributionCalculator.java
index c8033dd..61c3650 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FileDistributionCalculator.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FileDistributionCalculator.java
@@ -31,9 +31,9 @@ import org.apache.hadoop.hdfs.server.namenode.FSImageUtil;
 import org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary;
 import org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection;
 import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.util.LimitInputStream;
 
 import com.google.common.base.Preconditions;
-import com.google.common.io.LimitInputStream;
 
 /**
  * This is the tool for analyzing file sizes in the namespace image. In order to

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1f5bad04/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java
index fa8c59d..3e3f021 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java
@@ -51,9 +51,9 @@ import org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection;
 import org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection;
 import org.apache.hadoop.hdfs.util.XMLUtils;
 import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.util.LimitInputStream;
 
 import com.google.common.collect.Lists;
-import com.google.common.io.LimitInputStream;
 
 /**
  * PBImageXmlWriter walks over an fsimage structure and writes out

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1f5bad04/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/CryptoUtils.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/CryptoUtils.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/CryptoUtils.java
index 7d8a496..184cdf0 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/CryptoUtils.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/CryptoUtils.java
@@ -37,8 +37,7 @@ import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.security.TokenCache;
 import org.apache.hadoop.security.UserGroupInformation;
-
-import com.google.common.io.LimitInputStream;
+import org.apache.hadoop.util.LimitInputStream;
 
 /**
  * This class provides utilities to make it easier to work with Cryptographic