You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by zj...@apache.org on 2015/03/13 00:04:47 UTC

[28/49] hadoop git commit: HADOOP-10115. Exclude duplicate jars in hadoop package under different component's lib (Vinayakumar B via aw)

HADOOP-10115. Exclude duplicate jars in hadoop package under different  component's lib (Vinayakumar B via aw)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/47f7f18d
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/47f7f18d
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/47f7f18d

Branch: refs/heads/YARN-2928
Commit: 47f7f18d4cc9145607ef3dfb70aa88748cd9dbec
Parents: 54639c7
Author: Allen Wittenauer <aw...@apache.org>
Authored: Mon Mar 9 21:44:06 2015 -0700
Committer: Allen Wittenauer <aw...@apache.org>
Committed: Mon Mar 9 21:44:06 2015 -0700

----------------------------------------------------------------------
 hadoop-common-project/hadoop-common/CHANGES.txt |  3 +
 hadoop-dist/pom.xml                             | 89 +++++++++++++++++---
 2 files changed, 78 insertions(+), 14 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/47f7f18d/hadoop-common-project/hadoop-common/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt
index fa73ba1..f831d1a 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -187,6 +187,9 @@ Trunk (Unreleased)
     HADOOP-11673. Skip using JUnit Assume in TestCodec. (Brahma Reddy Battula
     via cdouglas)
 
+    HADOOP-10115. Exclude duplicate jars in hadoop package under different
+    component's lib (Vinayakumar B via aw)
+
   BUG FIXES
 
     HADOOP-11473. test-patch says "-1 overall" even when all checks are +1

http://git-wip-us.apache.org/repos/asf/hadoop/blob/47f7f18d/hadoop-dist/pom.xml
----------------------------------------------------------------------
diff --git a/hadoop-dist/pom.xml b/hadoop-dist/pom.xml
index 0c82332..f894c01 100644
--- a/hadoop-dist/pom.xml
+++ b/hadoop-dist/pom.xml
@@ -107,25 +107,86 @@
                         fi
                       }
 
-                      ROOT=`cd ../..;pwd`
+                      findFileInDir(){
+                        local file="$1";
+                        local dir="${2:-./share}";
+                        local count=$(find "$dir" -iname "$file"|wc -l)
+                        echo "$count";
+                      }
+
+                      copyIfNotExists(){
+                        local src="$1"
+                        local srcName=$(basename "$src")
+                        local dest="$2";
+                        if [ -f "$src" ]; then
+                          if [[ "$srcName" != *.jar ]] || [ $(findFileInDir "$srcName") -eq "0" ]; then
+                            local destDir=$(dirname "$dest")
+                            mkdir -p "$destDir"
+                            cp "$src" "$dest"
+                          fi
+                        else
+                          for childPath in "$src"/* ;
+                          do
+                            child=$(basename "$childPath");
+                            if [ "$child" == "doc" ] || [ "$child" == "webapps" ]; then
+                              mkdir -p "$dest"/"$child"
+                              cp -r "$src"/"$child"/* "$dest"/"$child"
+                              continue;
+                            fi
+                            copyIfNotExists "$src"/"$child" "$dest"/"$child"
+                          done
+                        fi
+                      }
+
+                      #Copy all contents as is except the lib.
+                      #for libs check for existence in share directory, if not exist then only copy.
+                      copy(){
+                        local src="$1";
+                        local dest="$2";
+                        if [ -d "$src" ]; then
+                          for childPath in "$src"/* ;
+                          do
+                            child=$(basename "$childPath");
+                            if [ "$child" == "share" ]; then
+                              copyIfNotExists "$src"/"$child" "$dest"/"$child"
+                            else
+                              if [ -d "$src"/"$child" ]; then
+                                mkdir -p "$dest"/"$child"
+                                cp -r "$src"/"$child"/* "$dest"/"$child"
+                              else
+                                cp -r "$src"/"$child" "$dest"/"$child"
+                              fi
+                            fi
+                          done
+                        fi
+                      }
+
+                      # Shellcheck SC2086
+                      ROOT=$(cd "${project.build.directory}"/../..;pwd)
                       echo
-                      echo "Current directory `pwd`"
+                      echo "Current directory $(pwd)"
                       echo
                       run rm -rf hadoop-${project.version}
                       run mkdir hadoop-${project.version}
                       run cd hadoop-${project.version}
-                      run cp $ROOT/LICENSE.txt .
-                      run cp $ROOT/NOTICE.txt .
-                      run cp $ROOT/README.txt .
-                      run cp -r $ROOT/hadoop-common-project/hadoop-common/target/hadoop-common-${project.version}/* .
-                      run cp -r $ROOT/hadoop-common-project/hadoop-nfs/target/hadoop-nfs-${project.version}/* .
-                      run cp -r $ROOT/hadoop-hdfs-project/hadoop-hdfs/target/hadoop-hdfs-${project.version}/* .
-                      run cp -r $ROOT/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/hadoop-hdfs-httpfs-${project.version}/* .
-                      run cp -r $ROOT/hadoop-common-project/hadoop-kms/target/hadoop-kms-${project.version}/* .
-                      run cp -r $ROOT/hadoop-hdfs-project/hadoop-hdfs-nfs/target/hadoop-hdfs-nfs-${project.version}/* .
-                      run cp -r $ROOT/hadoop-yarn-project/target/hadoop-yarn-project-${project.version}/* .
-                      run cp -r $ROOT/hadoop-mapreduce-project/target/hadoop-mapreduce-${project.version}/* .
-                      run cp -r $ROOT/hadoop-tools/hadoop-tools-dist/target/hadoop-tools-dist-${project.version}/* .
+                      run cp "$ROOT"/LICENSE.txt .
+                      run cp "$ROOT"/NOTICE.txt .
+                      run cp "$ROOT"/README.txt .
+
+                      # Copy hadoop-common first so that it have always have all dependencies.
+                      # Remaining projects will copy only libraries which are not present already in 'share' directory.
+                      run copy "$ROOT"/hadoop-common-project/hadoop-common/target/hadoop-common-${project.version} .
+                      run copy "$ROOT"/hadoop-common-project/hadoop-nfs/target/hadoop-nfs-${project.version} .
+                      run copy "$ROOT"/hadoop-hdfs-project/hadoop-hdfs/target/hadoop-hdfs-${project.version} .
+                      run copy "$ROOT"/hadoop-hdfs-project/hadoop-hdfs-nfs/target/hadoop-hdfs-nfs-${project.version} .
+                      run copy "$ROOT"/hadoop-yarn-project/target/hadoop-yarn-project-${project.version} .
+                      run copy "$ROOT"/hadoop-mapreduce-project/target/hadoop-mapreduce-${project.version} .
+                      run copy "$ROOT"/hadoop-tools/hadoop-tools-dist/target/hadoop-tools-dist-${project.version} .
+
+                      #copy httpfs and kms as is
+                      run cp -r "$ROOT"/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/hadoop-hdfs-httpfs-${project.version}/* .
+                      run cp -r "$ROOT"/hadoop-common-project/hadoop-kms/target/hadoop-kms-${project.version}/* .
+
                       echo
                       echo "Hadoop dist layout available at: ${project.build.directory}/hadoop-${project.version}"
                       echo