You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hawq.apache.org by wa...@apache.org on 2021/01/22 07:54:05 UTC

[hawq] branch master updated: HAWQ-1780. Add GitHub Action Step to Test against Running Instance

This is an automated email from the ASF dual-hosted git repository.

wangchunling pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hawq.git


The following commit(s) were added to refs/heads/master by this push:
     new 4cbd805  HAWQ-1780. Add GitHub Action Step to Test against Running Instance
4cbd805 is described below

commit 4cbd8057e5f27802c3e3a709bb0f735c71315c6b
Author: Chiyang Wan <ch...@gmail.com>
AuthorDate: Thu Jan 21 10:27:06 2021 +0800

    HAWQ-1780. Add GitHub Action Step to Test against Running Instance
    
    It fixes the install_name of libhdfs and libyarn with @rpath prefix,
    which is required when loading executable in different path, and
    enforces searching headers and libraries in those self-contained first.
---
 .github/workflows/build.yml             | 34 +++++++++++++++---
 .github/workflows/scripts/init_hawq.sh  | 64 +++++++++++++++++++++++++++++++++
 .github/workflows/scripts/init_hdfs.sh  | 52 +++++++++++++++++++++++++++
 .github/workflows/scripts/init_macos.sh | 49 +++++++++++++++++++++++++
 depends/libhdfs3/CMakeLists.txt         |  1 +
 depends/libyarn/CMakeLists.txt          |  1 +
 src/Makefile.global.in                  |  8 ++---
 7 files changed, 201 insertions(+), 8 deletions(-)

diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index ee4e22f..503109c 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -15,7 +15,7 @@
 
 name: Apache HAWQ
 
-on: [push]
+on: [push, pull_request]
 
 jobs:
   build-on-macOS:
@@ -44,6 +44,7 @@ jobs:
         install_name_tool -add_rpath $GITHUB_WORKSPACE/dependency-Darwin/package/lib/perl5/5.28.0/darwin-thread-multi-2level/CORE/ $GITHUB_WORKSPACE/dependency-Darwin/package/bin/perl
 
     - name: configure
+      timeout-minutes: 10
       run: |
         source $GITHUB_WORKSPACE/dependency-Darwin/package/env.sh
         export CFLAGS="$CFLAGS -w"
@@ -55,16 +56,41 @@ jobs:
     - name: build hawq
       run: |
         source $GITHUB_WORKSPACE/dependency-Darwin/package/env.sh
-        make -j$(nproc)
-        make -j$(nproc) install
+        make -j$(sysctl -n hw.ncpu)
+        make -j$(sysctl -n hw.ncpu) install
 
     - name: build feature-test
       run: |
         source $GITHUB_WORKSPACE/dependency-Darwin/package/env.sh
-        make -j$(nproc) feature-test
+        make -j$(sysctl -n hw.ncpu) feature-test
 
     - name: test executable
       run: |
+        for file in $(find /tmp/hawq/bin -name '*' -type f); do
+          if [[ $(file $file | grep Mach-O) ]]; then
+            install_name_tool -add_rpath /tmp/hawq/lib $file;
+          fi
+        done
         source /tmp/hawq/greenplum_path.sh
         postgres -V
         src/test/feature/feature-test --gtest_list_tests
+
+    - name: install HDFS
+      run: |
+        export HOMEBREW_NO_INSTALL_CLEANUP=1
+        brew install hadoop
+
+    - name: initilize macOS
+      run: .github/workflows/scripts/init_macos.sh
+
+    - name: initilize HDFS
+      run: |
+        export HADOOP_HOME=/usr/local/opt/hadoop/libexec
+        .github/workflows/scripts/init_hdfs.sh
+
+    - name: initilize HAWQ
+      run: |
+        source /tmp/hawq/greenplum_path.sh
+        .github/workflows/scripts/init_hawq.sh
+        psql -d postgres -c 'create database hawq_feature_test_db;'
+        src/test/feature/feature-test --gtest_filter=TestDatabase.BasicTest
diff --git a/.github/workflows/scripts/init_hawq.sh b/.github/workflows/scripts/init_hawq.sh
new file mode 100755
index 0000000..0793ebd
--- /dev/null
+++ b/.github/workflows/scripts/init_hawq.sh
@@ -0,0 +1,64 @@
+#!/bin/bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+set -e
+
+
+
+# Configure
+tee $GPHOME/etc/hawq-site.xml << EOF_hawq_site
+<configuration>
+    <property>
+        <name>hawq_dfs_url</name>
+        <value>localhost:8020/hawq_default</value>
+        <description>URL for accessing HDFS.</description>
+    </property>
+    <property>
+        <name>hawq_master_address_host</name>
+        <value>localhost</value>
+    </property>
+    <property>
+        <name>hawq_master_address_port</name>
+        <value>5432</value>
+    </property>
+    <property>
+        <name>hawq_segment_address_port</name>
+        <value>40000</value>
+    </property>
+    <property>
+        <name>hawq_master_directory</name>
+        <value>/tmp/db_data/hawq-data-directory/masterdd</value>
+    </property>
+    <property>
+        <name>hawq_segment_directory</name>
+        <value>/tmp/db_data/hawq-data-directory/segmentdd</value>
+    </property>
+    <property>
+        <name>hawq_master_temp_directory</name>
+        <value>/tmp</value>
+    </property>
+    <property>
+        <name>hawq_segment_temp_directory</name>
+        <value>/tmp</value>
+    </property>
+</configuration>
+EOF_hawq_site
+
+# Initialize
+rm -rf /opt/dependency*
+rm -rf /tmp/db_data/hawq-data-directory
+install -d /tmp/db_data/hawq-data-directory/masterdd
+install -d /tmp/db_data/hawq-data-directory/segmentdd
+hawq init cluster -a
diff --git a/.github/workflows/scripts/init_hdfs.sh b/.github/workflows/scripts/init_hdfs.sh
new file mode 100755
index 0000000..0ab2094
--- /dev/null
+++ b/.github/workflows/scripts/init_hdfs.sh
@@ -0,0 +1,52 @@
+#!/bin/bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+set -e
+
+
+
+# Configure
+tee $HADOOP_HOME/etc/hadoop/core-site.xml << EOF_core_site
+<configuration>
+    <property>
+        <name>fs.defaultFS</name>
+        <value>hdfs://localhost:8020</value>
+    </property>
+</configuration>
+EOF_core_site
+tee $HADOOP_HOME/etc/hadoop/hdfs-site.xml << EOF_hdfs_site
+<configuration>
+    <property>
+        <name>dfs.namenode.name.dir</name>
+        <value>file:///tmp/db_data/hdfs/name</value>
+    </property>
+    <property>
+        <name>dfs.datanode.data.dir</name>
+        <value>file:///tmp/db_data/hdfs/data</value>
+    </property>
+</configuration>
+EOF_hdfs_site
+
+# Initialize
+install -d /tmp/db_data/hdfs/name
+install -d /tmp/db_data/hdfs/data
+hdfs namenode -format
+
+# Start
+$HADOOP_HOME/sbin/start-dfs.sh
+
+# Connect
+hdfs dfsadmin -report
+hdfs dfs -ls /
diff --git a/.github/workflows/scripts/init_macos.sh b/.github/workflows/scripts/init_macos.sh
new file mode 100755
index 0000000..f02aa21
--- /dev/null
+++ b/.github/workflows/scripts/init_macos.sh
@@ -0,0 +1,49 @@
+#!/bin/bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+set -e
+
+
+
+# Setup passphraseless ssh
+sudo systemsetup -setremotelogin on
+ssh-keygen -t rsa -P '' -f ~/.ssh/id_rsa
+cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys
+chmod 0700 ~/.ssh
+chmod 0600 ~/.ssh/authorized_keys
+
+tee -a ~/.ssh/config <<EOF_ssh_config
+Host *
+   StrictHostKeyChecking no
+   UserKnownHostsFile=/dev/null
+EOF_ssh_config
+
+ssh -v localhost whoami
+
+# Configure system kernel state
+sudo tee /etc/sysctl.conf << EOF_sysctl
+kern.sysv.shmmax=2147483648
+kern.sysv.shmmin=1
+kern.sysv.shmmni=64
+kern.sysv.shmseg=16
+kern.sysv.shmall=524288
+kern.maxfiles=65535
+kern.maxfilesperproc=65536
+kern.corefile=/cores/core.%N.%P
+EOF_sysctl
+</etc/sysctl.conf xargs sudo sysctl
+
+# Add data folder
+sudo install -o $USER -d /tmp/db_data/
diff --git a/depends/libhdfs3/CMakeLists.txt b/depends/libhdfs3/CMakeLists.txt
index 4694942..ba158ed 100644
--- a/depends/libhdfs3/CMakeLists.txt
+++ b/depends/libhdfs3/CMakeLists.txt
@@ -3,6 +3,7 @@ CMAKE_MINIMUM_REQUIRED(VERSION 2.8)
 PROJECT(libhdfs3)
 
 SET(CMAKE_VERBOSE_MAKEFILE ON CACHE STRING "Verbose build." FORCE)
+SET(CMAKE_MACOSX_RPATH 1)
 
 IF(${CMAKE_SOURCE_DIR} STREQUAL ${CMAKE_BINARY_DIR})
     MESSAGE(FATAL_ERROR "cannot build the project in the source directory! Out-of-source build is enforced!")
diff --git a/depends/libyarn/CMakeLists.txt b/depends/libyarn/CMakeLists.txt
index e7719d4..7ad2a81 100644
--- a/depends/libyarn/CMakeLists.txt
+++ b/depends/libyarn/CMakeLists.txt
@@ -4,6 +4,7 @@ PROJECT(libyarn)
 
 SET(CMAKE_VERBOSE_MAKEFILE ON CACHE STRING "Verbose build." FORCE)
 SET(CMAKE_FIND_ROOT_PATH "/Users/weikui/Documents/project/osx106_x86")
+SET(CMAKE_MACOSX_RPATH 1)
 
 IF(${CMAKE_SOURCE_DIR} STREQUAL ${CMAKE_BINARY_DIR})
     MESSAGE(FATAL_ERROR "cannot build the project in the source directory! Out-of-source build is enforced!")
diff --git a/src/Makefile.global.in b/src/Makefile.global.in
index 51dffad..2b47748 100644
--- a/src/Makefile.global.in
+++ b/src/Makefile.global.in
@@ -221,8 +221,6 @@ COLLATEINDEX	= @COLLATEINDEX@
 
 CPP = @CPP@
 CPPFLAGS = @CPPFLAGS@
-CPPFLAGS += -I$(abs_top_srcdir)/depends/libhdfs3/build/install$(prefix)/include
-CPPFLAGS += -I$(abs_top_srcdir)/depends/libyarn/build/install$(prefix)/include
 CPPFLAGS += -I/usr/local/hawq/include
 
 ifdef PGXS
@@ -237,7 +235,9 @@ endif # not PGXS
 CC = @CC@
 GCC = @GCC@
 SUN_STUDIO_CC = @SUN_STUDIO_CC@
-CFLAGS = @CFLAGS@
+CFLAGS = -I$(abs_top_srcdir)/depends/libhdfs3/build/install$(prefix)/include
+CFLAGS += -I$(abs_top_srcdir)/depends/libyarn/build/install$(prefix)/include
+CFLAGS += @CFLAGS@
 CFLAGS_SSE42 = @CFLAGS_SSE42@
 
 # Kind-of compilers
@@ -272,9 +272,9 @@ ifdef PGXS
 else
   LDFLAGS = -L$(top_builddir)/src/port
 endif
-LDFLAGS += @LDFLAGS@
 LDFLAGS += -L$(abs_top_srcdir)/depends/libhdfs3/build/install$(prefix)/lib
 LDFLAGS += -L$(abs_top_srcdir)/depends/libyarn/build/install$(prefix)/lib
+LDFLAGS += @LDFLAGS@
 
 LDFLAGS_EX = @LDFLAGS_EX@
 # LDFLAGS_SL might have already been assigned by calling makefile