You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@trafodion.apache.org by db...@apache.org on 2016/03/09 18:04:56 UTC

[1/4] incubator-trafodion git commit: [TRAFODION-1880] Do not build libhdfs dependency from source

Repository: incubator-trafodion
Updated Branches:
  refs/heads/master e82ed976a -> a1bc4654a


[TRAFODION-1880] Do not build libhdfs dependency from source

Make sure various pom.xml files with hadoop dependency are all pulling in
the same version.

Make HDFS dependencies a build-time pre-req. As with other build
dependencies on shared libraries, make them part of the build environment
(TOOLSDIR).

Prior to this change, we were picking up these dependencies (2 shlib &
1 header) from environment (such as local_hadoop) or downloading the
source and building it on the fly. Building it made trafodion build more
than twice as long.

Now requiring a consistent build dependency, separate from the runtime
environment.

The 64-bit native libraries are now available from binary distro of
hadoop-common. So we can download binaries and extract files we need rather
than build them.  Dependencies can be updated via install/traf_tools_setup.sh,
but in case the environment is not updated, the build-time script get_hdfs_files
is also updated to downoad the distro. If you really want to build
hadoop-common, that portion of the script was retained under a new --source
option.


Project: http://git-wip-us.apache.org/repos/asf/incubator-trafodion/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-trafodion/commit/3853382a
Tree: http://git-wip-us.apache.org/repos/asf/incubator-trafodion/tree/3853382a
Diff: http://git-wip-us.apache.org/repos/asf/incubator-trafodion/diff/3853382a

Branch: refs/heads/master
Commit: 3853382a5d5386f1b048f2c1bdb4eea0440b4a16
Parents: 57d3ebd
Author: Steve Varnau <sv...@apache.org>
Authored: Tue Mar 8 23:26:48 2016 +0000
Committer: Steve Varnau <sv...@apache.org>
Committed: Tue Mar 8 23:26:48 2016 +0000

----------------------------------------------------------------------
 core/rest/pom.xml                      |   2 +-
 core/sqf/hbase_utilities/pom.xml       |   2 +-
 core/sqf/sqenvcom.sh                   |   5 +
 core/sqf/sql/scripts/get_libhdfs_files | 201 +++++++++++++++++-----------
 core/sql/nskgmake/Makerules.linux      |   8 +-
 core/sql/regress/tools/dll-compile.ksh |   4 +-
 dcs/pom.xml                            |   2 +-
 install/traf_tools_setup.sh            |  56 +++++---
 wms/pom.xml                            |   2 +-
 9 files changed, 177 insertions(+), 105 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/3853382a/core/rest/pom.xml
----------------------------------------------------------------------
diff --git a/core/rest/pom.xml b/core/rest/pom.xml
index 540660a..be74105 100644
--- a/core/rest/pom.xml
+++ b/core/rest/pom.xml
@@ -417,7 +417,7 @@
   	<compileSource>1.6</compileSource>
     
   	<!-- Dependencies -->
-    <hadoop.version>2.4.0</hadoop.version>
+    <hadoop.version>${env.HADOOP_DEP_VER}</hadoop.version>
   	<commons-cli.version>1.2</commons-cli.version>
   	<commons-codec.version>1.4</commons-codec.version>
   	<commons-io.version>2.1</commons-io.version>

http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/3853382a/core/sqf/hbase_utilities/pom.xml
----------------------------------------------------------------------
diff --git a/core/sqf/hbase_utilities/pom.xml b/core/sqf/hbase_utilities/pom.xml
index 4e77b68..2f86f07 100644
--- a/core/sqf/hbase_utilities/pom.xml
+++ b/core/sqf/hbase_utilities/pom.xml
@@ -28,7 +28,7 @@
 
   <properties>
     <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
-    <hadoop.version>2.5.0</hadoop.version>
+    <hadoop.version>${env.HADOOP_DEP_VER}</hadoop.version>
     <hbase.version>${env.HBASE_DEP_VER}</hbase.version>
     <java.version>1.7</java.version>
   </properties>

http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/3853382a/core/sqf/sqenvcom.sh
----------------------------------------------------------------------
diff --git a/core/sqf/sqenvcom.sh b/core/sqf/sqenvcom.sh
index c2335d4..7c3ec61 100644
--- a/core/sqf/sqenvcom.sh
+++ b/core/sqf/sqenvcom.sh
@@ -152,6 +152,11 @@ fi
 # set common version to be consistent between shared lib and maven dependencies
 export THRIFT_DEP_VER=0.9.0
 export HIVE_DEP_VER=0.13.1
+export HADOOP_DEP_VER=2.6.0
+
+# staged build-time dependencies
+export HADOOP_BLD_LIB=${TOOLSDIR}/hadoop-${HADOOP_DEP_VER}/lib/native
+export HADOOP_BLD_INC=${TOOLSDIR}/hadoop-${HADOOP_DEP_VER}/include
 
 # check for workstation env
 # want to make sure SQ_VIRTUAL_NODES is set in the shell running sqstart

http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/3853382a/core/sqf/sql/scripts/get_libhdfs_files
----------------------------------------------------------------------
diff --git a/core/sqf/sql/scripts/get_libhdfs_files b/core/sqf/sql/scripts/get_libhdfs_files
index abe92d2..ed21e9c 100755
--- a/core/sqf/sql/scripts/get_libhdfs_files
+++ b/core/sqf/sql/scripts/get_libhdfs_files
@@ -23,6 +23,11 @@
 # This script downloads and/or makes the required libhdfs files
 # to be able to build Trafodion, which acts as a libhdfs client.
 #
+# This script is now redundant, given that install/traf_tools_setup.sh
+# puts the libhdfs dependency in TOOLSDIR. This is now just a
+# transitional script until build environment is updated to include
+# these files as prerequisite.
+#
 # Basically, what we need are three files:
 #
 # hdfs.h       (copied to $TGT_INC_DIR)
@@ -37,10 +42,11 @@ fi
 LOGFILE=${LIBHDFS_TEMP_DIR}/build.log
 
 # Hadoop source tar file to build libhdfs from
-HADOOP_SRC_MIRROR_URL=https://archive.apache.org/dist/hadoop/common/hadoop-2.6.0
+HADOOP_MIRROR_URL=https://archive.apache.org/dist/hadoop/common/hadoop-2.6.0
 HADOOP_ID=hadoop-2.6.0
 HADOOP_SRC_ID=${HADOOP_ID}-src
 HADOOP_SRC_TAR=${HADOOP_SRC_ID}.tar.gz
+HADOOP_BIN_TAR=${HADOOP_ID}.tar.gz
 
 # files to build required version of Google Protocol Buffers
 PROTOBUF_MIRROR_URL=https://github.com/google/protobuf/releases/download/v2.5.0
@@ -56,11 +62,13 @@ TGT_INC_DIR=$MY_SQROOT/export/include
 TGT_LIB_DIR=$MY_SQROOT/export/lib${SQ_MBTYPE}
 
 FORCE_BUILD=false
+SOURCE_BUILD=false
 VERBOSE=false
 
 usage() {
   echo "Usage: $0"
   echo "    [ -f | --force ]"
+  echo "    [ -s | --source ]"
   echo "    [ -v | --verbose ]"
   echo "    [ -d <temp dir> | --tempDir <temp dir> ]"
 }
@@ -74,6 +82,10 @@ do
        FORCE_BUILD=true
        ;;
 
+    -s|--source)
+       SOURCE_BUILD=true
+       ;;
+
     -v|--verbose)
        VERBOSE=true
        ;;
@@ -99,8 +111,9 @@ do
 done
 
 
-if [[ $FORCE_BUILD == true || \
-      ! -e ${TGT_INC_DIR}/hdfs.h || \
+if [[ $FORCE_BUILD == true ||
+      ! -e ${TGT_INC_DIR}/hdfs.h ||
+      ! -e ${TGT_LIB_DIR}/libhadoop.so ||
       ! -e ${TGT_LIB_DIR}/libhdfs.so ]]; then
 
   if [[ ! -d $LIBHDFS_TEMP_DIR ]]; then
@@ -110,113 +123,141 @@ if [[ $FORCE_BUILD == true || \
       exit 1
     fi
   fi
-
   cd $LIBHDFS_TEMP_DIR
 
-  PROTOBUF_VER=`protoc --version 2>/dev/null | cut -f 2 -d ' '`
+  if [[ $SOURCE_BUILD != true ]]; then
+    echo "Downloading Hadoop-common binary distro..." | tee -a ${LOGFILE}
+    wget ${HADOOP_MIRROR_URL}/${HADOOP_BIN_TAR} 2>&1 >>${LOGFILE}
+    tar -xzf $HADOOP_BIN_TAR  \
+       $HADOOP_ID/lib/native/libhadoop\*so\* \
+       $HADOOP_ID/lib/native/libhdfs\*so\* \
+       $HADOOP_ID/include/hdfs.h
+
+    cp -f ${HADOOP_ID}/include/hdfs.h ${TGT_INC_DIR}
+    cp -Pf ${HADOOP_ID}/lib/native/libhdfs*.so* ${TGT_LIB_DIR}
+    cp -Pf ${HADOOP_ID}/lib/native/libhadoop*.so* ${TGT_LIB_DIR}
+
+  else
+
+    PROTOBUF_VER=`protoc --version 2>/dev/null | cut -f 2 -d ' '`
+
+    # download and build protoc v2.5.0 if not already in the path
+    if [[ "$PROTOBUF_VER" != "${PROTOBUF_VERSION}" ]]; then
+      if [[ ! -f ${PROTOBUF_TAR} ]]; then
+        echo "Downloading Google Protocol Buffers..." | tee -a ${LOGFILE}
+        wget ${PROTOBUF_MIRROR_URL}/${PROTOBUF_TAR} >${LOGFILE}
+      fi
+
+      if [[ $FORCE_BUILD == true ]]; then
+        rm -rf ${LIBHDFS_TEMP_DIR}/${PROTOBUF_ID}
+        rm -rf ${LIBHDFS_TEMP_DIR}/${PROTOBUF_TGT_ID}
+      fi
 
-  # download and build protoc v2.5.0 if not already in the path
-  if [[ "$PROTOBUF_VER" != "${PROTOBUF_VERSION}" ]]; then
-    if [[ ! -f ${PROTOBUF_TAR} ]]; then
-      echo "Downloading Google Protocol Buffers..." | tee -a ${LOGFILE}
-      wget ${PROTOBUF_MIRROR_URL}/${PROTOBUF_TAR} >${LOGFILE}
+      if [[ ! -d ${PROTOBUF_ID} ]]; then
+        echo "Unpacking Google Protocol Buffer tar file..." | tee -a ${LOGFILE}
+        rm -rf ${LIBHDFS_TEMP_DIR}/${PROTOBUF_TGT_ID}
+        tar -xzf ${PROTOBUF_TAR} >>${LOGFILE}
+      fi
+
+      if [[ ! -d $PROTOBUF_TGT_ID ]]; then
+        cd ${PROTOBUF_ID}
+        echo "Building Google Protocol Buffers, this could take a while..." | tee -a ${LOGFILE}
+        if [[ $VERBOSE == true ]]; then
+          ./configure --prefix=${LIBHDFS_TEMP_DIR}/${PROTOBUF_TGT_ID} 2>&1 | tee -a ${LOGFILE}
+        else
+          ./configure --prefix=${LIBHDFS_TEMP_DIR}/${PROTOBUF_TGT_ID} 2>&1 >>${LOGFILE}
+        fi
+        if [[ $? != 0 ]]; then
+          echo "Error during configure step, exiting" | tee -a ${LOGFILE}
+          exit 1
+        fi
+        make 2>&1 >>${LOGFILE}
+        if [[ $? != 0 ]]; then
+          echo "Error during make step, exiting" | tee -a ${LOGFILE}
+          exit 1
+        fi
+        # skip the tests
+        # make check 2>&1 >>${LOGFILE}
+        # if [[ $? != 0 ]]; then
+        #   echo "Error during check step, exiting" | tee -a ${LOGFILE}
+        #   exit 1
+        # fi
+        make install 2>&1 >>${LOGFILE}
+        if [[ $? != 0 ]]; then
+          echo "Error during install step, exiting" | tee -a ${LOGFILE}
+          # remove partial results, if any
+          rm -rf ${LIBHDFS_TEMP_DIR}/${PROTOBUF_TGT_ID}
+          exit 1
+        fi
+      fi
+
+      # Tell the Hadoop build to use our custom-built protoc
+      export HADOOP_PROTOC_PATH=${LIBHDFS_TEMP_DIR}/${PROTOBUF_TGT_ID}/bin/protoc
+    fi
+
+    cd $LIBHDFS_TEMP_DIR
+
+    if [[ ! -f ${HADOOP_SRC_TAR} ]]; then
+      echo "Downloading Hadoop tar file ${HADOOP_SRC_TAR}..." | tee -a ${LOGFILE}
+      wget ${HADOOP_MIRROR_URL}/${HADOOP_SRC_TAR} 2>&1 >>${LOGFILE}
     fi
 
     if [[ $FORCE_BUILD == true ]]; then
-      rm -rf ${LIBHDFS_TEMP_DIR}/${PROTOBUF_ID}
-      rm -rf ${LIBHDFS_TEMP_DIR}/${PROTOBUF_TGT_ID}
+      rm -rf ${LIBHDFS_TEMP_DIR}/${HADOOP_SRC_ID}
     fi
 
-    if [[ ! -d ${PROTOBUF_ID} ]]; then
-      echo "Unpacking Google Protocol Buffer tar file..." | tee -a ${LOGFILE}
-      rm -rf ${LIBHDFS_TEMP_DIR}/${PROTOBUF_TGT_ID}
-      tar -xzf ${PROTOBUF_TAR} >>${LOGFILE}
+    if [[ ! -d ${HADOOP_SRC_ID} ]]; then
+      echo "Unpacking Hadoop tar file..." | tee -a ${LOGFILE}
+      tar -xzf ${HADOOP_SRC_TAR}
     fi
 
-    if [[ ! -d $PROTOBUF_TGT_ID ]]; then
-      cd ${PROTOBUF_ID}
-      echo "Building Google Protocol Buffers, this could take a while..." | tee -a ${LOGFILE}
+    if [[ ! -d ${LIBHDFS_TEMP_DIR}/${HADOOP_SRC_ID}/hadoop-dist/target ]]; then
+      cd ${HADOOP_SRC_ID}
+      echo "Building native library, this will take several minutes..." | tee -a ${LOGFILE}
       if [[ $VERBOSE == true ]]; then
-        ./configure --prefix=${LIBHDFS_TEMP_DIR}/${PROTOBUF_TGT_ID} 2>&1 | tee -a ${LOGFILE}
+        mvn package -Pdist,native -Dmaven.javadoc.skip=true -DskipTests -Dtar 2>&1 | tee -a ${LOGFILE}
       else
-        ./configure --prefix=${LIBHDFS_TEMP_DIR}/${PROTOBUF_TGT_ID} 2>&1 >>${LOGFILE}
-      fi
-      if [[ $? != 0 ]]; then
-        echo "Error during configure step, exiting" | tee -a ${LOGFILE}
-        exit 1
+        mvn package -Pdist,native -Dmaven.javadoc.skip=true -DskipTests -Dtar 2>&1 >>${LOGFILE}
       fi
-      make 2>&1 >>${LOGFILE}
       if [[ $? != 0 ]]; then
-        echo "Error during make step, exiting" | tee -a ${LOGFILE}
-        exit 1
-      fi
-      # skip the tests
-      # make check 2>&1 >>${LOGFILE}
-      # if [[ $? != 0 ]]; then
-      #   echo "Error during check step, exiting" | tee -a ${LOGFILE}
-      #   exit 1
-      # fi
-      make install 2>&1 >>${LOGFILE}
-      if [[ $? != 0 ]]; then
-        echo "Error during install step, exiting" | tee -a ${LOGFILE}
-        # remove partial results, if any
-        rm -rf ${LIBHDFS_TEMP_DIR}/${PROTOBUF_TGT_ID}
+        echo "Error during Maven build step for libhdfs, exiting" | tee -a ${LOGFILE}
         exit 1
       fi
     fi
 
-    # Tell the Hadoop build to use our custom-built protoc
-    export HADOOP_PROTOC_PATH=${LIBHDFS_TEMP_DIR}/${PROTOBUF_TGT_ID}/bin/protoc
-  fi
-
-  cd $LIBHDFS_TEMP_DIR
-
-  if [[ ! -f ${HADOOP_SRC_TAR} ]]; then
-    echo "Downloading Hadoop tar file ${HADOOP_SRC_TAR}..." | tee -a ${LOGFILE}
-    wget ${HADOOP_SRC_MIRROR_URL}/${HADOOP_SRC_TAR} 2>&1 >>${LOGFILE}
-  fi
-
-  if [[ $FORCE_BUILD == true ]]; then
-    rm -rf ${LIBHDFS_TEMP_DIR}/${HADOOP_SRC_ID}
-  fi
-
-  if [[ ! -d ${HADOOP_SRC_ID} ]]; then
-    echo "Unpacking Hadoop tar file..." | tee -a ${LOGFILE}
-    tar -xzf ${HADOOP_SRC_TAR}
-  fi
-
-  if [[ ! -d ${LIBHDFS_TEMP_DIR}/${HADOOP_SRC_ID}/hadoop-dist/target ]]; then
-    cd ${HADOOP_SRC_ID}
-    echo "Building native library, this will take several minutes..." | tee -a ${LOGFILE}
+    echo "Copying include file and built libraries to Trafodion export dir..." | tee -a ${LOGFILE}
     if [[ $VERBOSE == true ]]; then
-      mvn package -Pdist,native -Dmaven.javadoc.skip=true -DskipTests -Dtar 2>&1 | tee -a ${LOGFILE}
-    else
-      mvn package -Pdist,native -Dmaven.javadoc.skip=true -DskipTests -Dtar 2>&1 >>${LOGFILE}
-    fi
-    if [[ $? != 0 ]]; then
-      echo "Error during Maven build step for libhdfs, exiting" | tee -a ${LOGFILE}
-      exit 1
+      set -x
     fi
-  fi
-
-  echo "Copying include file and built libraries to Trafodion export dir..." | tee -a ${LOGFILE}
-  if [[ $VERBOSE == true ]]; then
-    set -x
-  fi
-  cp -f ${LIBHDFS_TEMP_DIR}/${HADOOP_SRC_ID}/hadoop-dist/target/${HADOOP_ID}/include/hdfs.h ${TGT_INC_DIR}
-  cp -Pf ${LIBHDFS_TEMP_DIR}/${HADOOP_SRC_ID}/hadoop-dist/target/${HADOOP_ID}/lib/native/libhdfs*.so* ${TGT_LIB_DIR}
-  cp -Pf ${LIBHDFS_TEMP_DIR}/${HADOOP_SRC_ID}/hadoop-dist/target/${HADOOP_ID}/lib/native/libhadoop*.so* ${TGT_LIB_DIR}
+    cp -f ${LIBHDFS_TEMP_DIR}/${HADOOP_SRC_ID}/hadoop-dist/target/${HADOOP_ID}/include/hdfs.h ${TGT_INC_DIR}
+    cp -Pf ${LIBHDFS_TEMP_DIR}/${HADOOP_SRC_ID}/hadoop-dist/target/${HADOOP_ID}/lib/native/libhdfs*.so* ${TGT_LIB_DIR}
+    cp -Pf ${LIBHDFS_TEMP_DIR}/${HADOOP_SRC_ID}/hadoop-dist/target/${HADOOP_ID}/lib/native/libhadoop*.so* ${TGT_LIB_DIR}
+  fi # source build
 
   ls -l ${TGT_INC_DIR}/hdfs.h       >> ${LOGFILE}
   ls -l ${TGT_LIB_DIR}/libhdfs.so   >> ${LOGFILE}
   ls -l ${TGT_LIB_DIR}/libhadoop.so >> ${LOGFILE}
 
   # Final check whether all the needed files are there
-  if [[ ! -r ${TGT_INC_DIR}/hdfs.h || \
+  if [[ ! -r ${TGT_INC_DIR}/hdfs.h ||
+        ! -r ${TGT_LIB_DIR}/libhadoop.so ||
         ! -r ${TGT_LIB_DIR}/libhdfs.so ]]; then
     echo "Error, not all files were created" | tee -a ${LOGFILE}
     ls -l ${TGT_INC_DIR}/hdfs.h
     ls -l ${TGT_LIB_DIR}/libhdfs.so
+    ls -l ${TGT_LIB_DIR}/libhadoop.so
     exit 1
   fi
+  # check that we have 64=-bit libs
+  libcheck=0
+  file -L ${TGT_LIB_DIR}/libhdfs.so | grep -q 'ELF 64-bit' || libcheck=1
+  file -L ${TGT_LIB_DIR}/libhadoop.so | grep -q 'ELF 64-bit' || libcheck=1
+  if [[ $libcheck == 1 ]]; then
+    echo "Error, libraries are not 'ELF 64-bit'" | tee -a ${LOGFILE}
+    file -L ${TGT_LIB_DIR}/libhdfs.so
+    file -L ${TGT_LIB_DIR}/libhadoop
+    exit 1
+  fi
+
 fi
+exit 0

http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/3853382a/core/sql/nskgmake/Makerules.linux
----------------------------------------------------------------------
diff --git a/core/sql/nskgmake/Makerules.linux b/core/sql/nskgmake/Makerules.linux
index a707a5e..d0b0e63 100755
--- a/core/sql/nskgmake/Makerules.linux
+++ b/core/sql/nskgmake/Makerules.linux
@@ -423,10 +423,10 @@ copytoolslibs:
 	cp -Pf $(THRIFT_LIB_DIR)/$(THRIFT_SO)* $(LIBROOT)
 	cp -Pf $(THRIFT_LIB_DIR)/libthrift.so $(LIBROOT)
 	# if these are not found, then...
-	-cp -Pf $(HADOOP_LIB_DIR)/$(LIBHDFS_SO)* $(LIBROOT)
-	-cp -Pf $(HADOOP_LIB_DIR)/$(LIBHADOOP_SO)* $(LIBROOT)
-	-cp -Pf $(HADOOP_INC_DIR)/hdfs.h $(MY_SQROOT)/export/include
-	# download Hadoop source and build the hdfs library
+	-cp -Pf $(HADOOP_BLD_LIB)/$(LIBHDFS_SO)* $(LIBROOT)
+	-cp -Pf $(HADOOP_BLD_LIB)/$(LIBHADOOP_SO)* $(LIBROOT)
+	-cp -Pf $(HADOOP_BLD_INC)/hdfs.h $(MY_SQROOT)/export/include
+	# download Hadoop-common distro
 	get_libhdfs_files --verbose
 
 linuxmklinksdebug linuxmklinksrelease: copytoolslibs

http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/3853382a/core/sql/regress/tools/dll-compile.ksh
----------------------------------------------------------------------
diff --git a/core/sql/regress/tools/dll-compile.ksh b/core/sql/regress/tools/dll-compile.ksh
index e4ae68f..cf6a522 100755
--- a/core/sql/regress/tools/dll-compile.ksh
+++ b/core/sql/regress/tools/dll-compile.ksh
@@ -66,12 +66,12 @@ TARGET=
     fi
   CC_OPTS="-g "
   CC_OPTS="$CC_OPTS -I$MY_SQROOT/sql/sqludr"
-  CC_OPTS="$CC_OPTS -I$MY_SQROOT/export/include/sql  -I$MY_SQROOT/export/include/nsk -I${TOOLSDIR}/${HADOOP_DIST}/${HADOOP_INC_DIR} -I${JAVA_HOME}/include -I${JAVA_HOME}/include/linux"
+  CC_OPTS="$CC_OPTS -I$MY_SQROOT/export/include/sql  -I$MY_SQROOT/export/include/nsk -I${JAVA_HOME}/include -I${JAVA_HOME}/include/linux"
   CC_OPTS="$CC_OPTS -w -O0 -Wno-unknown-pragmas -fPIC -fshort-wchar -c -o $BASE.o $1"
   TARGET=$BASE.dll
   LD=$CC
   LD_OPTS=" -w -O0 -Wno-unknown-pragmas -fshort-wchar"
-  LD_OPTS="$LD_OPTS -shared -rdynamic -o $TARGET -lc -lhdfs -ljvm -L$MY_SQROOT/export/lib${SQ_MBTYPE} -ltdm_sqlcli -L${TOOLSDIR}/${HADOOP_DIST}/${HADOOP_LIB_DIR} -L${JAVA_HOME}/jre/lib/amd64/server $2 $BASE.o"
+  LD_OPTS="$LD_OPTS -shared -rdynamic -o $TARGET -lc -lhdfs -ljvm -L$MY_SQROOT/export/lib${SQ_MBTYPE} -ltdm_sqlcli -L${JAVA_HOME}/jre/lib/amd64/server $2 $BASE.o"
 
 LONGLINE=\
 ------------------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/3853382a/dcs/pom.xml
----------------------------------------------------------------------
diff --git a/dcs/pom.xml b/dcs/pom.xml
index 1fb2938..d8104f3 100644
--- a/dcs/pom.xml
+++ b/dcs/pom.xml
@@ -498,7 +498,7 @@
   	<compileSource>1.6</compileSource>
     
   	<!-- Dependencies -->
-    <hadoop.version>2.6.0</hadoop.version>
+    <hadoop.version>${env.HADOOP_DEP_VER}</hadoop.version>
   	<commons-cli.version>1.2</commons-cli.version>
   	<commons-codec.version>1.4</commons-codec.version>
   	<commons-io.version>2.1</commons-io.version>

http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/3853382a/install/traf_tools_setup.sh
----------------------------------------------------------------------
diff --git a/install/traf_tools_setup.sh b/install/traf_tools_setup.sh
index 84a4825..956cd59 100755
--- a/install/traf_tools_setup.sh
+++ b/install/traf_tools_setup.sh
@@ -45,6 +45,7 @@
 # Thrift: Communications and data serialization tool
 # Maven: Build tool that is only installed if compatible version does not exist
 # log4cxx: standard logging framework for C++
+# hadoop: shared libraries for libhadoop, libhdfs, and hdfs header file
 #
 # Script can be modified to meet the needs of your environment
 # May need root or SUDO access to install tools in desired location
@@ -65,29 +66,26 @@ function Usage {
 
 # -----------------------------------------------------------------------------
 # function: downloadSource - downloads and un-tars the requested file
-#    $1 - tar file to download 
-#    $2 - directory where source is untarred
+#    $1 - tar file URL to download
+#    $2 - directory where source is untarred (leave empty to skip untar)
 #
-# Suggestion:  instead use a single argument $1 and figure out the name of the
-#              file to extract with basename $1
 # -----------------------------------------------------------------------------
 function downloadSource
 {
-  # currently only tar files ending in "tar.gz" and "tgz" are recognized
-  TARSUFFIX="tar.gz"
-  if [[ ! $1 == *$"$TARSUFFIX" ]]; then
-    TARSUFFIX="tgz"
-  fi
+  URL="$1"
+  SRCDIR="$2"
+  TARFILE="${URL##*/}"
 
-  if [ ! -e $BASEDIR/$2.$TARSUFFIX ]; then
-    wget $1  >>$LOGFILE 2>&1
+  if [ ! -e $BASEDIR/$TARFILE ]; then
+    wget $URL  >>$LOGFILE 2>&1
+    echo "INFO:   downloaded tar file: $TARFILE " | tee -a $LOGFILE
   else
-    echo "INFO:   tar file already downloaded, step skipped" | tee -a $LOGFIL
+    echo "INFO:   tar file already downloaded, step skipped" | tee -a $LOGFILE
   fi
 
-  if [ ! -e $BASEDIR/$2 ]; then
-    tar -xzf $BASEDIR/$2.$TARSUFFIX
-    echo "INFO:   downloaded tar file: $2.$TARSUFFIX " | tee -a $LOGFILE
+  if [ ! -e $BASEDIR/$SRCDIR ]; then
+    cd $BASEDIR
+    tar -xzf $BASEDIR/$TARFILE
   else
     echo "INFO:   source tree already exists" | tee -a $LOGFILE
   fi
@@ -156,6 +154,11 @@ if [ "$BASEDIR" == "" ]; then
   Usage;
   exit 1;
 fi
+# handle relative path
+if [[ ! $BASEDIR =~ ^/ ]]
+then
+  BASEDIR=$(pwd)/$BASEDIR
+fi
 
 if [ ! -d "$BASEDIR" ]; then
   echo
@@ -170,6 +173,11 @@ if [ "$TOOLSDIR" == "" ]; then
   Usage;
   exit 1;
 fi
+# handle relative path
+if [[ ! $TOOLSDIR =~ ^/ ]]
+then
+  TOOLSDIR=$(pwd)/$TOOLSDIR
+fi
 
 if [ ! -d "$TOOLSDIR" ]; then                                                    
   read -p "Tools install directory $TOOLSDIR does not exist, do you want to to create it? y/n : " CREATEDIR
@@ -401,6 +409,24 @@ else
 fi
 
 # -----------------------------------------------------------------------------
+# download hadoop/hdfs libs
+echo
+echo "INFO: Hadoop/HDFS libs on $(date)" | tee -a $LOGFILE
+HVER="2.6.0"
+if [ -d $TOOLSDIR/hadoop-${HVER} ]; then
+  echo "INFO: Hadoop/HDFS is already installed, skipping to next tool" | tee -a $LOGFILE
+else
+  downloadSource http://archive.apache.org/dist/hadoop/common/hadoop-${HVER}/hadoop-${HVER}.tar.gz # no un-tar
+  cd $TOOLSDIR
+  tar -xzf $BASEDIR/hadoop-${HVER}.tar.gz \
+    hadoop-${HVER}/lib/native/libhadoop\*so\* \
+    hadoop-${HVER}/lib/native/libhdfs\*so\* \
+    hadoop-${HVER}/include/hdfs.h
+  echo "INFO:   extraction complete" | tee -a $LOGFILE
+fi
+echo " *********************************************************** " | tee -a $LOGFILE
+
+# -----------------------------------------------------------------------------
 
 echo
 echo "INFO: Waiting for all background builds. This might take a while." | tee -a $LOGFILE

http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/3853382a/wms/pom.xml
----------------------------------------------------------------------
diff --git a/wms/pom.xml b/wms/pom.xml
index 0080020..c79985c 100644
--- a/wms/pom.xml
+++ b/wms/pom.xml
@@ -381,7 +381,7 @@
   	
   	
   	<!-- Dependencies -->
-  	<hadoop.version>2.6.0</hadoop.version>   
+  	<hadoop.version>${env.HADOOP_DEP_VER}</hadoop.version>   
   	<commons-cli.version>1.2</commons-cli.version>
   	<commons-codec.version>1.4</commons-codec.version>
   	<commons-io.version>2.1</commons-io.version>


[2/4] incubator-trafodion git commit: [TRAFODION-1880] Carry over the common hadoop version number to get_hdfs_files

Posted by db...@apache.org.
[TRAFODION-1880] Carry over the common hadoop version number to get_hdfs_files


Project: http://git-wip-us.apache.org/repos/asf/incubator-trafodion/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-trafodion/commit/a63bec90
Tree: http://git-wip-us.apache.org/repos/asf/incubator-trafodion/tree/a63bec90
Diff: http://git-wip-us.apache.org/repos/asf/incubator-trafodion/diff/a63bec90

Branch: refs/heads/master
Commit: a63bec901e1e765bbbe9549117b6f6e433b32118
Parents: 3853382
Author: Steve Varnau <sv...@apache.org>
Authored: Wed Mar 9 00:10:23 2016 +0000
Committer: Steve Varnau <sv...@apache.org>
Committed: Wed Mar 9 00:10:23 2016 +0000

----------------------------------------------------------------------
 core/sqf/sql/scripts/get_libhdfs_files | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/a63bec90/core/sqf/sql/scripts/get_libhdfs_files
----------------------------------------------------------------------
diff --git a/core/sqf/sql/scripts/get_libhdfs_files b/core/sqf/sql/scripts/get_libhdfs_files
index ed21e9c..056f668 100755
--- a/core/sqf/sql/scripts/get_libhdfs_files
+++ b/core/sqf/sql/scripts/get_libhdfs_files
@@ -43,7 +43,7 @@ LOGFILE=${LIBHDFS_TEMP_DIR}/build.log
 
 # Hadoop source tar file to build libhdfs from
 HADOOP_MIRROR_URL=https://archive.apache.org/dist/hadoop/common/hadoop-2.6.0
-HADOOP_ID=hadoop-2.6.0
+HADOOP_ID=hadoop-${HADOOP_DEP_VER}
 HADOOP_SRC_ID=${HADOOP_ID}-src
 HADOOP_SRC_TAR=${HADOOP_SRC_ID}.tar.gz
 HADOOP_BIN_TAR=${HADOOP_ID}.tar.gz


[4/4] incubator-trafodion git commit: Merge [TRAFODION-1880] PR 373 Do not build libhdfs dependency from source

Posted by db...@apache.org.
Merge [TRAFODION-1880] PR 373 Do not build libhdfs dependency from source


Project: http://git-wip-us.apache.org/repos/asf/incubator-trafodion/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-trafodion/commit/a1bc4654
Tree: http://git-wip-us.apache.org/repos/asf/incubator-trafodion/tree/a1bc4654
Diff: http://git-wip-us.apache.org/repos/asf/incubator-trafodion/diff/a1bc4654

Branch: refs/heads/master
Commit: a1bc4654a70ca2550163a26dd025df5e5d164933
Parents: e82ed97 7d1393f
Author: Dave Birdsall <db...@apache.org>
Authored: Wed Mar 9 17:03:42 2016 +0000
Committer: Dave Birdsall <db...@apache.org>
Committed: Wed Mar 9 17:03:42 2016 +0000

----------------------------------------------------------------------
 core/rest/pom.xml                      |   2 +-
 core/sqf/hbase_utilities/pom.xml       |   2 +-
 core/sqf/sqenvcom.sh                   |   5 +
 core/sqf/sql/scripts/get_libhdfs_files | 203 +++++++++++++++++-----------
 core/sql/nskgmake/Makerules.linux      |   8 +-
 core/sql/regress/tools/dll-compile.ksh |   4 +-
 dcs/pom.xml                            |   2 +-
 install/traf_tools_setup.sh            |  56 ++++++--
 wms/pom.xml                            |   2 +-
 9 files changed, 178 insertions(+), 106 deletions(-)
----------------------------------------------------------------------



[3/4] incubator-trafodion git commit: [TRAFODION-1880] Remove another hard-coded hadoop version number

Posted by db...@apache.org.
[TRAFODION-1880] Remove another hard-coded hadoop version number


Project: http://git-wip-us.apache.org/repos/asf/incubator-trafodion/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-trafodion/commit/7d1393ff
Tree: http://git-wip-us.apache.org/repos/asf/incubator-trafodion/tree/7d1393ff
Diff: http://git-wip-us.apache.org/repos/asf/incubator-trafodion/diff/7d1393ff

Branch: refs/heads/master
Commit: 7d1393ff6245ee269673bf10ad1ab7f0c1badc2b
Parents: a63bec9
Author: Steve Varnau <sv...@apache.org>
Authored: Wed Mar 9 01:01:14 2016 +0000
Committer: Steve Varnau <sv...@apache.org>
Committed: Wed Mar 9 01:01:14 2016 +0000

----------------------------------------------------------------------
 core/sqf/sql/scripts/get_libhdfs_files | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/7d1393ff/core/sqf/sql/scripts/get_libhdfs_files
----------------------------------------------------------------------
diff --git a/core/sqf/sql/scripts/get_libhdfs_files b/core/sqf/sql/scripts/get_libhdfs_files
index 056f668..55d7197 100755
--- a/core/sqf/sql/scripts/get_libhdfs_files
+++ b/core/sqf/sql/scripts/get_libhdfs_files
@@ -42,8 +42,8 @@ fi
 LOGFILE=${LIBHDFS_TEMP_DIR}/build.log
 
 # Hadoop source tar file to build libhdfs from
-HADOOP_MIRROR_URL=https://archive.apache.org/dist/hadoop/common/hadoop-2.6.0
 HADOOP_ID=hadoop-${HADOOP_DEP_VER}
+HADOOP_MIRROR_URL=https://archive.apache.org/dist/hadoop/common/${HADOOP_ID}
 HADOOP_SRC_ID=${HADOOP_ID}-src
 HADOOP_SRC_TAR=${HADOOP_SRC_ID}.tar.gz
 HADOOP_BIN_TAR=${HADOOP_ID}.tar.gz