You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kylin.apache.org by xx...@apache.org on 2020/11/08 08:13:02 UTC
[kylin] 02/13: KYLIN-4775 Use docker-compose to deploy Hadoop and
Kylin
This is an automated email from the ASF dual-hosted git repository.
xxyu pushed a commit to branch kylin-on-parquet-v2
in repository https://gitbox.apache.org/repos/asf/kylin.git
commit c23238811aa66cf972555db8cd6ee16006d5f4af
Author: yongheng.liu <li...@gmail.com>
AuthorDate: Mon Oct 19 20:05:59 2020 +0800
KYLIN-4775 Use docker-compose to deploy Hadoop and Kylin
---
docker/.gitignore | 6 +
docker/build_cluster_images.sh | 111 +++++------
.../client.env => others/client-write-read.env} | 2 +-
.../{write/client.env => others/client-write.env} | 11 +-
.../others/docker-compose-kylin-write-read.yml | 69 +++++++
.../others/docker-compose-kylin-write.yml | 69 +++++++
.../docker-compose-metastore.yml} | 14 +-
.../docker-compose/read/docker-compose-hadoop.yml | 129 +++++++++++++
.../docker-compose/read/docker-compose-hbase.yml | 43 +++++
.../docker-compose/write/docker-compose-hadoop.yml | 128 ++++++++++++
.../docker-compose/write/docker-compose-hbase.yml | 43 +++++
.../docker-compose/write/docker-compose-hive.yml | 37 ++++
.../docker-compose/write/docker-compose-write.yml | 215 ---------------------
docker/docker-compose/write/write-hadoop.env | 6 +-
docker/dockerfile/cluster/base/Dockerfile | 4 +-
docker/dockerfile/cluster/client/Dockerfile | 2 +-
docker/dockerfile/cluster/hive/Dockerfile | 2 +
docker/dockerfile/cluster/hive/run_hv.sh | 4 +
docker/header.sh | 141 ++++++++++++++
docker/setup_cluster.sh | 26 +--
docker/stop_cluster.sh | 56 ++++--
21 files changed, 791 insertions(+), 327 deletions(-)
diff --git a/docker/.gitignore b/docker/.gitignore
new file mode 100644
index 0000000..db4d255
--- /dev/null
+++ b/docker/.gitignore
@@ -0,0 +1,6 @@
+docker-compose/others/data/
+docker-compose/read/data/
+docker-compose/write/data/
+docker-compose/others/kylin/kylin-all/
+docker-compose/others/kylin/kylin-job/
+docker-compose/others/kylin/kylin-query/
diff --git a/docker/build_cluster_images.sh b/docker/build_cluster_images.sh
index ac60533..b2aae80 100644
--- a/docker/build_cluster_images.sh
+++ b/docker/build_cluster_images.sh
@@ -1,57 +1,32 @@
#!/bin/bash
-
-ARGS=`getopt -o h:i:b --long hadoop_version:,hive_version:,hbase_version: -n 'parameter.bash' -- "$@"`
-
-if [ $? != 0 ]; then
- echo "Terminating..."
- exit 1
-fi
-
-eval set -- "${ARGS}"
-
-HADOOP_VERSION="2.8.5"
-HIVE_VERSION="1.2.2"
-HBASE_VERSION="1.1.2"
-
-while true;
-do
- case "$1" in
- --hadoop_version)
- HADOOP_VERSION=$2;
- shift 2;
- ;;
- --hive_version)
- HIVE_VERSION=$2;
- shift 2;
- ;;
- --hbase_version)
- HBASE_VERSION=$2;
- shift 2;
- ;;
- --)
- break
- ;;
- *)
- echo "Internal error!"
- break
- ;;
- esac
-done
-
-for arg in $@
-do
- echo "processing $arg"
-done
-
-echo "........hadoop version: "$HADOOP_VERSION
-echo "........hive version: "$HIVE_VERSION
-echo "........hbase version: "$HBASE_VERSION
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+SCRIPT_PATH=$(cd `dirname $0`; pwd)
+WS_ROOT=`dirname $SCRIPT_PATH`
+
+source ${SCRIPT_PATH}/header.sh
#docker build -t apachekylin/kylin-metastore:mysql_5.6.49 ./kylin/metastore-db
+#
docker build -t apachekylin/kylin-hadoop-base:hadoop_${HADOOP_VERSION} --build-arg HADOOP_VERSION=${HADOOP_VERSION} ./dockerfile/cluster/base
-docker build -t apachekylin/kylin-hadoop-namenode:hadoop_${HADOOP_VERSION} --build-arg HADOOP_VERSION=${HADOOP_VERSION} ./dockerfile/cluster/namenode
-docker build -t apachekylin/kylin-hadoop-datanode:hadoop_${HADOOP_VERSION} --build-arg HADOOP_VERSION=${HADOOP_VERSION} ./dockerfile/cluster/datanode
+docker build -t apachekylin/kylin-hadoop-namenode:hadoop_${HADOOP_VERSION} --build-arg HADOOP_VERSION=${HADOOP_VERSION} --build-arg HADOOP_WEBHDFS_PORT=${HADOOP_WEBHDFS_PORT} ./dockerfile/cluster/namenode
+docker build -t apachekylin/kylin-hadoop-datanode:hadoop_${HADOOP_VERSION} --build-arg HADOOP_VERSION=${HADOOP_VERSION} --build-arg HADOOP_DN_PORT=${HADOOP_DN_PORT} ./dockerfile/cluster/datanode
docker build -t apachekylin/kylin-hadoop-resourcemanager:hadoop_${HADOOP_VERSION} --build-arg HADOOP_VERSION=${HADOOP_VERSION} ./dockerfile/cluster/resourcemanager
docker build -t apachekylin/kylin-hadoop-nodemanager:hadoop_${HADOOP_VERSION} --build-arg HADOOP_VERSION=${HADOOP_VERSION} ./dockerfile/cluster/nodemanager
docker build -t apachekylin/kylin-hadoop-historyserver:hadoop_${HADOOP_VERSION} --build-arg HADOOP_VERSION=${HADOOP_VERSION} ./dockerfile/cluster/historyserver
@@ -61,29 +36,29 @@ docker build -t apachekylin/kylin-hive:hive_${HIVE_VERSION}_hadoop_${HADOOP_VERS
--build-arg HADOOP_VERSION=${HADOOP_VERSION} \
./dockerfile/cluster/hive
-docker build -t apachekylin/kylin-hbase-base:hbase_${HBASE_VERSION} --build-arg HBASE_VERSION=${HBASE_VERSION} ./dockerfile/cluster/hbase
-docker build -t apachekylin/kylin-hbase-master:hbase_${HBASE_VERSION} --build-arg HBASE_VERSION=${HBASE_VERSION} ./dockerfile/cluster/hmaster
-docker build -t apachekylin/kylin-hbase-regionserver:hbase_${HBASE_VERSION} --build-arg HBASE_VERSION=${HBASE_VERSION} ./dockerfile/cluster/hregionserver
+if [ $ENABLE_HBASE == "yes" ]; then
+ docker build -t apachekylin/kylin-hbase-base:hbase_${HBASE_VERSION} --build-arg HBASE_VERSION=${HBASE_VERSION} ./dockerfile/cluster/hbase
+ docker build -t apachekylin/kylin-hbase-master:hbase_${HBASE_VERSION} --build-arg HBASE_VERSION=${HBASE_VERSION} ./dockerfile/cluster/hmaster
+ docker build -t apachekylin/kylin-hbase-regionserver:hbase_${HBASE_VERSION} --build-arg HBASE_VERSION=${HBASE_VERSION} ./dockerfile/cluster/hregionserver
+fi
-docker build -t apachekylin/kylin-kerberos:latest ./dockerfile/cluster/kerberos
+if [ $ENABLE_KERBEROS == "yes" ]; then
+ docker build -t apachekylin/kylin-kerberos:latest ./dockerfile/cluster/kerberos
+fi
+
+if [ $ENABLE_LDAP == "yes" ]; then
+ docker pull osixia/openldap:1.3.0
+fi
+
+#if [ $ENABLE_KAFKA == "yes" ]; then
+# docker pull bitnami/kafka:2.0.0
+#fi
+docker pull bitnami/kafka:2.0.0
+
+docker pull mysql:5.6.49
docker build -t apachekylin/kylin-client:hadoop_${HADOOP_VERSION}_hive_${HIVE_VERSION}_hbase_${HBASE_VERSION} \
--build-arg HIVE_VERSION=${HIVE_VERSION} \
--build-arg HADOOP_VERSION=${HADOOP_VERSION} \
--build-arg HBASE_VERSION=${HBASE_VERSION} \
./dockerfile/cluster/client
-
-
-export HADOOP_NAMENODE_IMAGETAG=apachekylin/kylin-hadoop-base:hadoop_${HADOOP_VERSION}
-export HADOOP_DATANODE_IMAGETAG=apachekylin/kylin-hadoop-datanode:hadoop_${HADOOP_VERSION}
-export HADOOP_NAMENODE_IMAGETAG=apachekylin/kylin-hadoop-namenode:hadoop_${HADOOP_VERSION}
-export HADOOP_RESOURCEMANAGER_IMAGETAG=apachekylin/kylin-hadoop-resourcemanager:hadoop_${HADOOP_VERSION}
-export HADOOP_NODEMANAGER_IMAGETAG=apachekylin/kylin-hadoop-nodemanager:hadoop_${HADOOP_VERSION}
-export HADOOP_HISTORYSERVER_IMAGETAG=apachekylin/kylin-hadoop-historyserver:hadoop_${HADOOP_VERSION}
-export HIVE_IMAGETAG=apachekylin/kylin-hive:hive_${HIVE_VERSION}_hadoop_${HADOOP_VERSION}
-export HBASE_MASTER_IMAGETAG=apachekylin/kylin-hbase-base:hbase_${HBASE_VERSION}
-export HBASE_MASTER_IMAGETAG=apachekylin/kylin-hbase-master:hbase_${HBASE_VERSION}
-export HBASE_REGIONSERVER_IMAGETAG=apachekylin/kylin-hbase-regionserver:hbase_${HBASE_VERSION}
-export CLIENT_IMAGETAG=apachekylin/kylin-client:hadoop_${HADOOP_VERSION}_hive_${HIVE_VERSION}_hbase_${HBASE_VERSION}
-export KERBEROS_IMAGE=apachekylin/kylin-kerberos:latest
-
diff --git a/docker/docker-compose/write-read/client.env b/docker/docker-compose/others/client-write-read.env
similarity index 97%
rename from docker/docker-compose/write-read/client.env
rename to docker/docker-compose/others/client-write-read.env
index fc0743c..c61e986 100644
--- a/docker/docker-compose/write-read/client.env
+++ b/docker/docker-compose/others/client-write-read.env
@@ -39,7 +39,7 @@ MAPRED_CONF_mapreduce_reduce_memory_mb=8192
MAPRED_CONF_mapreduce_map_java_opts=-Xmx3072m
MAPRED_CONF_mapreduce_reduce_java_opts=-Xmx6144m
-HIVE_SITE_CONF_javax_jdo_option_ConnectionURL=jdbc:mysql://metastore-db/metastore
+HIVE_SITE_CONF_javax_jdo_option_ConnectionURL=jdbc:mysql://metastore-db/metastore?useSSL=false\&allowPublicKeyRetrieval=true
HIVE_SITE_CONF_javax_jdo_option_ConnectionDriverName=com.mysql.jdbc.Driver
HIVE_SITE_CONF_javax_jdo_option_ConnectionUserName=kylin
HIVE_SITE_CONF_javax_jdo_option_ConnectionPassword=kylin
diff --git a/docker/docker-compose/write/client.env b/docker/docker-compose/others/client-write.env
similarity index 91%
rename from docker/docker-compose/write/client.env
rename to docker/docker-compose/others/client-write.env
index fc0743c..edad60b 100644
--- a/docker/docker-compose/write/client.env
+++ b/docker/docker-compose/others/client-write.env
@@ -39,19 +39,18 @@ MAPRED_CONF_mapreduce_reduce_memory_mb=8192
MAPRED_CONF_mapreduce_map_java_opts=-Xmx3072m
MAPRED_CONF_mapreduce_reduce_java_opts=-Xmx6144m
-HIVE_SITE_CONF_javax_jdo_option_ConnectionURL=jdbc:mysql://metastore-db/metastore
+HIVE_SITE_CONF_javax_jdo_option_ConnectionURL=jdbc:mysql://metastore-db/metastore?useSSL=false\&allowPublicKeyRetrieval=true
HIVE_SITE_CONF_javax_jdo_option_ConnectionDriverName=com.mysql.jdbc.Driver
HIVE_SITE_CONF_javax_jdo_option_ConnectionUserName=kylin
HIVE_SITE_CONF_javax_jdo_option_ConnectionPassword=kylin
HIVE_SITE_CONF_datanucleus_autoCreateSchema=true
HIVE_SITE_CONF_hive_metastore_uris=thrift://write-hive-metastore:9083
-HBASE_CONF_hbase_rootdir=hdfs://read-namenode:8020/hbase
+HBASE_CONF_hbase_rootdir=hdfs://write-namenode:8020/hbase
HBASE_CONF_hbase_cluster_distributed=true
-HBASE_CONF_hbase_zookeeper_quorum=read-zookeeper
-
-HBASE_CONF_hbase_master=read-hbase-master:16000
-HBASE_CONF_hbase_master_hostname=read-hbase-master
+HBASE_CONF_hbase_zookeeper_quorum=write-zookeeper
+HBASE_CONF_hbase_master=write-hbase-master:16000
+HBASE_CONF_hbase_master_hostname=write-hbase-master
HBASE_CONF_hbase_master_port=16000
HBASE_CONF_hbase_master_info_port=16010
HBASE_CONF_hbase_regionserver_port=16020
diff --git a/docker/docker-compose/others/docker-compose-kylin-write-read.yml b/docker/docker-compose/others/docker-compose-kylin-write-read.yml
new file mode 100644
index 0000000..cb67b06
--- /dev/null
+++ b/docker/docker-compose/others/docker-compose-kylin-write-read.yml
@@ -0,0 +1,69 @@
+version: "3.3"
+
+services:
+ kylin-all:
+ image: ${CLIENT_IMAGETAG}
+ container_name: kylin-all
+ hostname: kylin-all
+ volumes:
+ - ./conf/hadoop:/etc/hadoop/conf
+ - ./conf/hbase:/etc/hbase/conf
+ - ./conf/hive:/etc/hive/conf
+ - ./kylin/kylin-all:/opt/kylin/kylin-all
+ env_file:
+ - client-write-read.env
+ environment:
+ HADOOP_CONF_DIR: /etc/hadoop/conf
+ HIVE_CONF_DIR: /etc/hive/conf
+ HBASE_CONF_DIR: /etc/hbase/conf
+ KYLIN_HOME: /opt/kylin/kylin-all
+ networks:
+ - write_kylin
+ ports:
+ - 7070:7070
+
+ kylin-job:
+ image: ${CLIENT_IMAGETAG}
+ container_name: kylin-job
+ hostname: kylin-job
+ volumes:
+ - ./conf/hadoop:/etc/hadoop/conf
+ - ./conf/hbase:/etc/hbase/conf
+ - ./conf/hive:/etc/hive/conf
+ - ./kylin/kylin-job:/opt/kylin/kylin-job
+ env_file:
+ - client-write-read.env
+ environment:
+ HADOOP_CONF_DIR: /etc/hadoop/conf
+ HIVE_CONF_DIR: /etc/hive/conf
+ HBASE_CONF_DIR: /etc/hbase/conf
+ KYLIN_HOME: /opt/kylin/kylin-job
+ networks:
+ - write_kylin
+ ports:
+ - 7071:7070
+
+ kylin-query:
+ image: ${CLIENT_IMAGETAG}
+ container_name: kylin-query
+ hostname: kylin-query
+ volumes:
+ - ./conf/hadoop:/etc/hadoop/conf
+ - ./conf/hbase:/etc/hbase/conf
+ - ./conf/hive:/etc/hive/conf
+ - ./kylin/kylin-query:/opt/kylin/kylin-query
+ env_file:
+ - client-write-read.env
+ environment:
+ HADOOP_CONF_DIR: /etc/hadoop/conf
+ HIVE_CONF_DIR: /etc/hive/conf
+ HBASE_CONF_DIR: /etc/hbase/conf
+ KYLIN_HOME: /opt/kylin/kylin-query
+ networks:
+ - write_kylin
+ ports:
+ - 7072:7070
+
+networks:
+ write_kylin:
+ external: true
\ No newline at end of file
diff --git a/docker/docker-compose/others/docker-compose-kylin-write.yml b/docker/docker-compose/others/docker-compose-kylin-write.yml
new file mode 100644
index 0000000..a78b88a
--- /dev/null
+++ b/docker/docker-compose/others/docker-compose-kylin-write.yml
@@ -0,0 +1,69 @@
+version: "3.3"
+
+services:
+ kylin-all:
+ image: ${CLIENT_IMAGETAG}
+ container_name: kylin-all
+ hostname: kylin-all
+ volumes:
+ - ./conf/hadoop:/etc/hadoop/conf
+ - ./conf/hbase:/etc/hbase/conf
+ - ./conf/hive:/etc/hive/conf
+ - ./kylin/kylin-all:/opt/kylin/kylin-all
+ env_file:
+ - client-write.env
+ environment:
+ HADOOP_CONF_DIR: /etc/hadoop/conf
+ HIVE_CONF_DIR: /etc/hive/conf
+ HBASE_CONF_DIR: /etc/hbase/conf
+ KYLIN_HOME: /opt/kylin/kylin-all
+ networks:
+ - write_kylin
+ ports:
+ - 7070:7070
+
+ kylin-job:
+ image: ${CLIENT_IMAGETAG}
+ container_name: kylin-job
+ hostname: kylin-job
+ volumes:
+ - ./conf/hadoop:/etc/hadoop/conf
+ - ./conf/hbase:/etc/hbase/conf
+ - ./conf/hive:/etc/hive/conf
+ - ./kylin/kylin-job:/opt/kylin/kylin-job
+ env_file:
+ - client-write.env
+ environment:
+ HADOOP_CONF_DIR: /etc/hadoop/conf
+ HIVE_CONF_DIR: /etc/hive/conf
+ HBASE_CONF_DIR: /etc/hbase/conf
+ KYLIN_HOME: /opt/kylin/kylin-job
+ networks:
+ - write_kylin
+ ports:
+ - 7071:7070
+
+ kylin-query:
+ image: ${CLIENT_IMAGETAG}
+ container_name: kylin-query
+ hostname: kylin-query
+ volumes:
+ - ./conf/hadoop:/etc/hadoop/conf
+ - ./conf/hbase:/etc/hbase/conf
+ - ./conf/hive:/etc/hive/conf
+ - ./kylin/kylin-query:/opt/kylin/kylin-query
+ env_file:
+ - client-write.env
+ environment:
+ HADOOP_CONF_DIR: /etc/hadoop/conf
+ HIVE_CONF_DIR: /etc/hive/conf
+ HBASE_CONF_DIR: /etc/hbase/conf
+ KYLIN_HOME: /opt/kylin/kylin-query
+ networks:
+ - write_kylin
+ ports:
+ - 7072:7070
+
+networks:
+ write_kylin:
+ external: true
\ No newline at end of file
diff --git a/docker/docker-compose/write-read/test-docker-compose-mysql.yml b/docker/docker-compose/others/docker-compose-metastore.yml
similarity index 54%
rename from docker/docker-compose/write-read/test-docker-compose-mysql.yml
rename to docker/docker-compose/others/docker-compose-metastore.yml
index 5906c1e..a36df07 100644
--- a/docker/docker-compose/write-read/test-docker-compose-mysql.yml
+++ b/docker/docker-compose/others/docker-compose-metastore.yml
@@ -1,16 +1,24 @@
-
version: "3.3"
services:
metastore-db:
- image: mysql:5.6.49
+# image: mysql:5.6.49
+# image: mysql:8.0.11
+ image: mysql:5.7.24
container_name: metastore-db
hostname: metastore-db
volumes:
- ./data/mysql:/var/lib/mysql
environment:
- MYSQL_ROOT_PASSWORD=kylin
- - MYSQL_DATABASE=kylin
+ - MYSQL_DATABASE=metastore
- MYSQL_USER=kylin
- MYSQL_PASSWORD=kylin
+ networks:
+ - write_kylin
+ ports:
+ - 3306:3306
+networks:
+ write_kylin:
+ external: true
diff --git a/docker/docker-compose/read/docker-compose-hadoop.yml b/docker/docker-compose/read/docker-compose-hadoop.yml
new file mode 100644
index 0000000..a0e2a66
--- /dev/null
+++ b/docker/docker-compose/read/docker-compose-hadoop.yml
@@ -0,0 +1,129 @@
+version: "3.3"
+
+services:
+ read-namenode:
+ image: ${HADOOP_NAMENODE_IMAGETAG:-apachekylin/kylin-hadoop-namenode:hadoop_2.8.5}
+ container_name: read-namenode
+ hostname: read-namenode
+ volumes:
+ - ./data/write_hadoop_namenode:/hadoop/dfs/name
+ environment:
+ - CLUSTER_NAME=test-kylin
+ - HADOOP_WEBHDFS_PORT=${HADOOP_WEBHDFS_PORT:-50070}
+ env_file:
+ - read-hadoop.env
+ networks:
+ - write_kylin
+ expose:
+ - 8020
+ ports:
+ - 50071:50070
+ - 9871:9870
+
+ read-datanode1:
+ image: ${HADOOP_DATANODE_IMAGETAG:-apachekylin/kylin-hadoop-datanode:hadoop_2.8.5}
+ container_name: read-datanode1
+ hostname: read-datanode1
+ volumes:
+ - ./data/write_hadoop_datanode1:/hadoop/dfs/data
+ environment:
+ SERVICE_PRECONDITION: "read-namenode:${HADOOP_WEBHDFS_PORT:-50070}"
+ HADOOP_WEBHDFS_PORT: ${HADOOP_WEBHDFS_PORT:-50070}
+ env_file:
+ - read-hadoop.env
+ networks:
+ - write_kylin
+ links:
+ - read-namenode
+ expose:
+ - ${HADOOP_DN_PORT:-50075}
+
+ read-datanode2:
+ image: ${HADOOP_DATANODE_IMAGETAG:-apachekylin/kylin-hadoop-datanode:hadoop_2.8.5}
+ container_name: read-datanode2
+ hostname: read-datanode2
+ volumes:
+ - ./data/write_hadoop_datanode2:/hadoop/dfs/data
+ environment:
+ SERVICE_PRECONDITION: "read-namenode:${HADOOP_WEBHDFS_PORT:-50070}"
+ HADOOP_WEBHDFS_PORT: ${HADOOP_WEBHDFS_PORT:-50070}
+ env_file:
+ - read-hadoop.env
+ networks:
+ - write_kylin
+ expose:
+ - ${HADOOP_DN_PORT:-50075}
+
+ read-datanode3:
+ image: ${HADOOP_DATANODE_IMAGETAG:-apachekylin/kylin-hadoop-datanode:hadoop_2.8.5}
+ container_name: read-datanode3
+ hostname: read-datanode3
+ volumes:
+ - ./data/write_hadoop_datanode3:/hadoop/dfs/data
+ environment:
+ SERVICE_PRECONDITION: "read-namenode:${HADOOP_WEBHDFS_PORT:-50070}"
+ HADOOP_WEBHDFS_PORT: ${HADOOP_WEBHDFS_PORT:-50070}
+ env_file:
+ - read-hadoop.env
+ networks:
+ - write_kylin
+ expose:
+ - ${HADOOP_DN_PORT:-50075}
+
+ read-resourcemanager:
+ image: ${HADOOP_RESOURCEMANAGER_IMAGETAG:-apachekylin/kylin-hadoop-resourcemanager:hadoop_2.8.5}
+ container_name: read-resourcemanager
+ hostname: read-resourcemanager
+ environment:
+ SERVICE_PRECONDITION: "read-namenode:${HADOOP_WEBHDFS_PORT:-50070} read-datanode1:${HADOOP_DN_PORT:-50075} read-datanode2:${HADOOP_DN_PORT:-50075} read-datanode3:${HADOOP_DN_PORT:-50075}"
+ HADOOP_WEBHDFS_PORT: ${HADOOP_WEBHDFS_PORT:-50070}
+ env_file:
+ - read-hadoop.env
+ networks:
+ - write_kylin
+ ports:
+ - 8089:8088
+
+ read-nodemanager1:
+ image: ${HADOOP_NODEMANAGER_IMAGETAG:-apachekylin/kylin-hadoop-nodemanager:hadoop_2.8.5}
+ container_name: read-nodemanager1
+ hostname: read-nodemanager1
+ environment:
+ SERVICE_PRECONDITION: "read-namenode:${HADOOP_WEBHDFS_PORT:-50070} read-datanode1:${HADOOP_DN_PORT:-50075} read-datanode2:${HADOOP_DN_PORT:-50075} read-datanode3:${HADOOP_DN_PORT:-50075} read-resourcemanager:8088"
+ HADOOP_WEBHDFS_PORT: ${HADOOP_WEBHDFS_PORT:-50070}
+ env_file:
+ - read-hadoop.env
+ networks:
+ - write_kylin
+
+ read-nodemanager2:
+ image: ${HADOOP_NODEMANAGER_IMAGETAG:-apachekylin/kylin-hadoop-nodemanager:hadoop_2.8.5}
+ container_name: read-nodemanager2
+ hostname: read-nodemanager2
+ environment:
+ SERVICE_PRECONDITION: "read-namenode:${HADOOP_WEBHDFS_PORT:-50070} read-datanode1:${HADOOP_DN_PORT:-50075} read-datanode2:${HADOOP_DN_PORT:-50075} read-datanode3:${HADOOP_DN_PORT:-50075} read-resourcemanager:8088"
+ HADOOP_WEBHDFS_PORT: ${HADOOP_WEBHDFS_PORT:-50070}
+ env_file:
+ - read-hadoop.env
+ networks:
+ - write_kylin
+
+ read-historyserver:
+ image: ${HADOOP_HISTORYSERVER_IMAGETAG:-apachekylin/kylin-hadoop-historyserver:hadoop_2.8.5}
+ container_name: read-historyserver
+ hostname: read-historyserver
+ volumes:
+ - ./data/write_hadoop_historyserver:/hadoop/yarn/timeline
+ environment:
+ SERVICE_PRECONDITION: "read-namenode:${HADOOP_WEBHDFS_PORT:-50070} read-datanode1:${HADOOP_DN_PORT:-50075} read-datanode2:${HADOOP_DN_PORT:-50075} read-datanode3:${HADOOP_DN_PORT:-50075} read-resourcemanager:8088"
+ HADOOP_WEBHDFS_PORT: ${HADOOP_WEBHDFS_PORT:-50070}
+ env_file:
+ - read-hadoop.env
+ networks:
+ - write_kylin
+ ports:
+ - 8189:8188
+
+networks:
+ write_kylin:
+ external: true
\ No newline at end of file
diff --git a/docker/docker-compose/read/docker-compose-hbase.yml b/docker/docker-compose/read/docker-compose-hbase.yml
new file mode 100644
index 0000000..ac4048b
--- /dev/null
+++ b/docker/docker-compose/read/docker-compose-hbase.yml
@@ -0,0 +1,43 @@
+version: "3.3"
+
+services:
+ read-hbase-master:
+ image: ${HBASE_MASTER_IMAGETAG:-apachekylin/kylin-hbase-master:hbase1.1.2}
+ container_name: read-hbase-master
+ hostname: read-hbase-master
+ env_file:
+ - read-hbase-distributed-local.env
+ environment:
+ SERVICE_PRECONDITION: "read-namenode:${HADOOP_WEBHDFS_PORT:-50070} read-datanode1:${HADOOP_DN_PORT:-50075} read-datanode2:${HADOOP_DN_PORT:-50075} read-datanode3:${HADOOP_DN_PORT:-50075} read-zookeeper:2181"
+ networks:
+ - write_kylin
+ ports:
+ - 16010:16010
+
+ read-hbase-regionserver1:
+ image: ${HBASE_REGIONSERVER_IMAGETAG:-apachekylin/kylin-hbase-regionserver:hbase_1.1.2}
+ container_name: read-hbase-regionserver1
+ hostname: read-hbase-regionserver1
+ env_file:
+ - read-hbase-distributed-local.env
+ environment:
+ HBASE_CONF_hbase_regionserver_hostname: read-hbase-regionserver1
+ SERVICE_PRECONDITION: "read-namenode:${HADOOP_WEBHDFS_PORT:-50070} read-datanode1:${HADOOP_DN_PORT:-50075} read-datanode2:${HADOOP_DN_PORT:-50075} read-datanode3:${HADOOP_DN_PORT:-50075} read-zookeeper:2181 read-hbase-master:16010"
+ networks:
+ - write_kylin
+
+ read-hbase-regionserver2:
+ image: ${HBASE_REGIONSERVER_IMAGETAG:-apachekylin/kylin-hbase-regionserver:hbase_1.1.2}
+ container_name: read-hbase-regionserver2
+ hostname: read-hbase-regionserver2
+ env_file:
+ - read-hbase-distributed-local.env
+ environment:
+ HBASE_CONF_hbase_regionserver_hostname: read-hbase-regionserver2
+ SERVICE_PRECONDITION: "read-namenode:${HADOOP_WEBHDFS_PORT:-50070} read-datanode1:${HADOOP_DN_PORT:-50075} read-datanode2:${HADOOP_DN_PORT:-50075} read-datanode3:${HADOOP_DN_PORT:-50075} read-zookeeper:2181 read-hbase-master:16010"
+ networks:
+ - write_kylin
+
+networks:
+ write_kylin:
+ external: true
diff --git a/docker/docker-compose/write/docker-compose-hadoop.yml b/docker/docker-compose/write/docker-compose-hadoop.yml
new file mode 100644
index 0000000..4286cfc
--- /dev/null
+++ b/docker/docker-compose/write/docker-compose-hadoop.yml
@@ -0,0 +1,128 @@
+version: "3.3"
+
+services:
+ write-namenode:
+ image: ${HADOOP_NAMENODE_IMAGETAG:-apachekylin/kylin-hadoop-namenode:hadoop_2.8.5}
+ container_name: write-namenode
+ hostname: write-namenode
+ volumes:
+ - ./data/write_hadoop_namenode:/hadoop/dfs/name
+ environment:
+ - CLUSTER_NAME=test-kylin
+ - HADOOP_WEBHDFS_PORT=${HADOOP_WEBHDFS_PORT:-50070}
+ env_file:
+ - write-hadoop.env
+ networks:
+ - kylin
+ expose:
+ - 8020
+ ports:
+ - 50070:50070
+ - 9870:9870
+
+ write-datanode1:
+ image: ${HADOOP_DATANODE_IMAGETAG:-apachekylin/kylin-hadoop-datanode:hadoop_2.8.5}
+ container_name: write-datanode1
+ hostname: write-datanode1
+ volumes:
+ - ./data/write_hadoop_datanode1:/hadoop/dfs/data
+ environment:
+ SERVICE_PRECONDITION: "write-namenode:${HADOOP_WEBHDFS_PORT:-50070}"
+ HADOOP_WEBHDFS_PORT: ${HADOOP_WEBHDFS_PORT:-50070}
+ env_file:
+ - write-hadoop.env
+ networks:
+ - kylin
+ links:
+ - write-namenode
+ expose:
+ - ${HADOOP_DN_PORT:-50075}
+
+ write-datanode2:
+ image: ${HADOOP_DATANODE_IMAGETAG:-apachekylin/kylin-hadoop-datanode:hadoop_2.8.5}
+ container_name: write-datanode2
+ hostname: write-datanode2
+ volumes:
+ - ./data/write_hadoop_datanode2:/hadoop/dfs/data
+ environment:
+ SERVICE_PRECONDITION: "write-namenode:${HADOOP_WEBHDFS_PORT:-50070}"
+ HADOOP_WEBHDFS_PORT: ${HADOOP_WEBHDFS_PORT:-50070}
+ env_file:
+ - write-hadoop.env
+ networks:
+ - kylin
+ expose:
+ - ${HADOOP_DN_PORT:-50075}
+
+ write-datanode3:
+ image: ${HADOOP_DATANODE_IMAGETAG:-apachekylin/kylin-hadoop-datanode:hadoop_2.8.5}
+ container_name: write-datanode3
+ hostname: write-datanode3
+ volumes:
+ - ./data/write_hadoop_datanode3:/hadoop/dfs/data
+ environment:
+ SERVICE_PRECONDITION: "write-namenode:${HADOOP_WEBHDFS_PORT:-50070}"
+ HADOOP_WEBHDFS_PORT: ${HADOOP_WEBHDFS_PORT:-50070}
+ env_file:
+ - write-hadoop.env
+ networks:
+ - kylin
+ expose:
+ - ${HADOOP_DN_PORT:-50075}
+
+ write-resourcemanager:
+ image: ${HADOOP_RESOURCEMANAGER_IMAGETAG:-apachekylin/kylin-hadoop-resourcemanager:hadoop_2.8.5}
+ container_name: write-resourcemanager
+ hostname: write-resourcemanager
+ environment:
+ SERVICE_PRECONDITION: "write-namenode:${HADOOP_WEBHDFS_PORT:-50070} write-datanode1:${HADOOP_DN_PORT:-50075} write-datanode2:${HADOOP_DN_PORT:-50075} write-datanode3:${HADOOP_DN_PORT:-50075}"
+ HADOOP_WEBHDFS_PORT: ${HADOOP_WEBHDFS_PORT:-50070}
+ env_file:
+ - write-hadoop.env
+ networks:
+ - kylin
+ ports:
+ - 8088:8088
+
+ write-nodemanager1:
+ image: ${HADOOP_NODEMANAGER_IMAGETAG:-apachekylin/kylin-hadoop-nodemanager:hadoop_2.8.5}
+ container_name: write-nodemanager1
+ hostname: write-nodemanager1
+ environment:
+ SERVICE_PRECONDITION: "write-namenode:${HADOOP_WEBHDFS_PORT:-50070} write-datanode1:${HADOOP_DN_PORT:-50075} write-datanode2:${HADOOP_DN_PORT:-50075} write-datanode3:${HADOOP_DN_PORT:-50075} write-resourcemanager:8088"
+ HADOOP_WEBHDFS_PORT: ${HADOOP_WEBHDFS_PORT:-50070}
+ env_file:
+ - write-hadoop.env
+ networks:
+ - kylin
+
+ write-nodemanager2:
+ image: ${HADOOP_NODEMANAGER_IMAGETAG:-apachekylin/kylin-hadoop-nodemanager:hadoop_2.8.5}
+ container_name: write-nodemanager2
+ hostname: write-nodemanager2
+ environment:
+ SERVICE_PRECONDITION: "write-namenode:${HADOOP_WEBHDFS_PORT:-50070} write-datanode1:${HADOOP_DN_PORT:-50075} write-datanode2:${HADOOP_DN_PORT:-50075} write-datanode3:${HADOOP_DN_PORT:-50075} write-resourcemanager:8088"
+ HADOOP_WEBHDFS_PORT: ${HADOOP_WEBHDFS_PORT:-50070}
+ env_file:
+ - write-hadoop.env
+ networks:
+ - kylin
+
+ write-historyserver:
+ image: ${HADOOP_HISTORYSERVER_IMAGETAG:-apachekylin/kylin-hadoop-historyserver:hadoop_2.8.5}
+ container_name: write-historyserver
+ hostname: write-historyserver
+ volumes:
+ - ./data/write_hadoop_historyserver:/hadoop/yarn/timeline
+ environment:
+ SERVICE_PRECONDITION: "write-namenode:${HADOOP_WEBHDFS_PORT:-50070} write-datanode1:${HADOOP_DN_PORT:-50075} write-datanode2:${HADOOP_DN_PORT:-50075} write-datanode3:${HADOOP_DN_PORT:-50075} write-resourcemanager:8088"
+ HADOOP_WEBHDFS_PORT: ${HADOOP_WEBHDFS_PORT:-50070}
+ env_file:
+ - write-hadoop.env
+ networks:
+ - kylin
+ ports:
+ - 8188:8188
+
+networks:
+ kylin:
\ No newline at end of file
diff --git a/docker/docker-compose/write/docker-compose-hbase.yml b/docker/docker-compose/write/docker-compose-hbase.yml
new file mode 100644
index 0000000..d95f32b
--- /dev/null
+++ b/docker/docker-compose/write/docker-compose-hbase.yml
@@ -0,0 +1,43 @@
+version: "3.3"
+
+services:
+ write-hbase-master:
+ image: ${HBASE_MASTER_IMAGETAG:-apachekylin/kylin-hbase-master:hbase1.1.2}
+ container_name: write-hbase-master
+ hostname: write-hbase-master
+ env_file:
+ - write-hbase-distributed-local.env
+ environment:
+ SERVICE_PRECONDITION: "write-namenode:${HADOOP_WEBHDFS_PORT:-50070} write-datanode1:${HADOOP_DN_PORT:-50075} write-datanode2:${HADOOP_DN_PORT:-50075} write-datanode3:${HADOOP_DN_PORT:-50075} write-zookeeper:2181"
+ networks:
+ - write_kylin
+ ports:
+ - 16010:16010
+
+ write-hbase-regionserver1:
+ image: ${HBASE_REGIONSERVER_IMAGETAG:-apachekylin/kylin-hbase-regionserver:hbase_1.1.2}
+ container_name: write-hbase-regionserver1
+ hostname: write-hbase-regionserver1
+ env_file:
+ - write-hbase-distributed-local.env
+ environment:
+ HBASE_CONF_hbase_regionserver_hostname: write-hbase-regionserver1
+ SERVICE_PRECONDITION: "write-namenode:${HADOOP_WEBHDFS_PORT:-50070} write-datanode1:${HADOOP_DN_PORT:-50075} write-datanode2:${HADOOP_DN_PORT:-50075} write-datanode3:${HADOOP_DN_PORT:-50075} write-zookeeper:2181 write-hbase-master:16010"
+ networks:
+ - write_kylin
+
+ write-hbase-regionserver2:
+ image: ${HBASE_REGIONSERVER_IMAGETAG:-apachekylin/kylin-hbase-regionserver:hbase_1.1.2}
+ container_name: write-hbase-regionserver2
+ hostname: write-hbase-regionserver2
+ env_file:
+ - write-hbase-distributed-local.env
+ environment:
+ HBASE_CONF_hbase_regionserver_hostname: write-hbase-regionserver2
+ SERVICE_PRECONDITION: "write-namenode:${HADOOP_WEBHDFS_PORT:-50070} write-datanode1:${HADOOP_DN_PORT:-50075} write-datanode2:${HADOOP_DN_PORT:-50075} write-datanode3:${HADOOP_DN_PORT:-50075} write-zookeeper:2181 write-hbase-master:16010"
+ networks:
+ - write_kylin
+
+networks:
+ write_kylin:
+ external: true
diff --git a/docker/docker-compose/write/docker-compose-hive.yml b/docker/docker-compose/write/docker-compose-hive.yml
new file mode 100644
index 0000000..9b94a34
--- /dev/null
+++ b/docker/docker-compose/write/docker-compose-hive.yml
@@ -0,0 +1,37 @@
+version: "3.3"
+
+services:
+ write-hive-server:
+ image: ${HIVE_IMAGETAG:-apachekylin/kylin-hive:hive_1.2.2_hadoop_2.8.5}
+ container_name: write-hive-server
+ hostname: write-hive-server
+ env_file:
+ - write-hadoop.env
+ environment:
+ HIVE_CORE_CONF_javax_jdo_option_ConnectionURL: "jdbc:mysql://metastore-db/metastore"
+ SERVICE_PRECONDITION: "write-hive-metastore:9083"
+ HIVE_SITE_CONF_javax_jdo_option_ConnectionDriverName: com.mysql.jdbc.Driver
+ networks:
+ - write_kylin
+ ports:
+ - 10000:10000
+
+ write-hive-metastore:
+ image: ${HIVE_IMAGETAG:-apachekylin/kylin-hive:hive_1.2.2_hadoop_2.8.5}
+ container_name: write-hive-metastore
+ hostname: write-hive-metastore
+ env_file:
+ - write-hadoop.env
+ environment:
+ SERVICE_PRECONDITION: "write-namenode:${HADOOP_WEBHDFS_PORT:-50070} write-datanode1:${HADOOP_DN_PORT:-50075} write-datanode2:${HADOOP_DN_PORT:-50075} write-datanode3:${HADOOP_DN_PORT:-50075} metastore-db:3306"
+ HIVE_SITE_CONF_javax_jdo_option_ConnectionDriverName: com.mysql.jdbc.Driver
+ command: /opt/hive/bin/hive --service metastore
+ networks:
+ - write_kylin
+ expose:
+ - 9083
+
+networks:
+ write_kylin:
+ external: true
+
diff --git a/docker/docker-compose/write/docker-compose-write.yml b/docker/docker-compose/write/docker-compose-write.yml
deleted file mode 100644
index aefe726..0000000
--- a/docker/docker-compose/write/docker-compose-write.yml
+++ /dev/null
@@ -1,215 +0,0 @@
-version: "3.3"
-
-services:
- write-namenode:
- image: ${HADOOP_NAMENODE_IMAGETAG:-bde2020/hadoop-namenode:2.0.0-hadoop2.7.4-java8}
- container_name: write-namenode
- hostname: write-namenode
- volumes:
- - ./data/write_hadoop_namenode:/hadoop/dfs/name
- environment:
- - CLUSTER_NAME=test-write
- env_file:
- - write-hadoop.env
- expose:
- - 8020
- ports:
- - 50070:50070
-
- write-datanode1:
- image: ${HADOOP_DATANODE_IMAGETAG:-bde2020/hadoop-datanode:2.0.0-hadoop2.7.4-java8}
- container_name: write-datanode1
- hostname: write-datanode1
- volumes:
- - ./data/write_hadoop_datanode1:/hadoop/dfs/data
- environment:
- SERVICE_PRECONDITION: "write-namenode:50070"
- env_file:
- - write-hadoop.env
- links:
- - write-namenode
-
- write-datanode2:
- image: ${HADOOP_DATANODE_IMAGETAG:-bde2020/hadoop-datanode:2.0.0-hadoop2.7.4-java8}
- container_name: write-datanode2
- hostname: write-datanode2
- volumes:
- - ./data/write_hadoop_datanode2:/hadoop/dfs/data
- environment:
- SERVICE_PRECONDITION: "write-namenode:50070"
- env_file:
- - write-hadoop.env
-
- write-datanode3:
- image: ${HADOOP_DATANODE_IMAGETAG:-bde2020/hadoop-datanode:2.0.0-hadoop2.7.4-java8}
- container_name: write-datanode3
- hostname: write-datanode3
- volumes:
- - ./data/write_hadoop_datanode3:/hadoop/dfs/data
- environment:
- SERVICE_PRECONDITION: "write-namenode:50070"
- env_file:
- - write-hadoop.env
-
- write-resourcemanager:
- image: ${HADOOP_RESOURCEMANAGER_IMAGETAG:-bde2020/hadoop-resourcemanager:2.0.0-hadoop2.7.4-java8}
- container_name: write-resourcemanager
- hostname: write-resourcemanager
- environment:
- SERVICE_PRECONDITION: "write-namenode:50070 write-datanode1:50075 write-datanode2:50075 write-datanode3:50075"
- env_file:
- - write-hadoop.env
- ports:
- - 8088:8088
-
- write-nodemanager1:
- image: ${HADOOP_NODEMANAGER_IMAGETAG:-bde2020/hadoop-nodemanager:2.0.0-hadoop2.7.4-java8}
- container_name: write-nodemanager1
- hostname: write-nodemanager1
- environment:
- SERVICE_PRECONDITION: "write-namenode:50070 write-datanode1:50075 write-datanode2:50075 write-datanode3:50075 write-resourcemanager:8088"
- env_file:
- - write-hadoop.env
-
- write-nodemanager2:
- image: ${HADOOP_NODEMANAGER_IMAGETAG:-bde2020/hadoop-nodemanager:2.0.0-hadoop2.7.4-java8}
- container_name: write-nodemanager2
- hostname: write-nodemanager2
- environment:
- SERVICE_PRECONDITION: "write-namenode:50070 write-datanode1:50075 write-datanode2:50075 write-datanode3:50075 write-resourcemanager:8088"
- env_file:
- - write-hadoop.env
-
- write-historyserver:
- image: ${HADOOP_HISTORYSERVER_IMAGETAG:-bde2020/hadoop-historyserver:2.0.0-hadoop2.7.4-java8}
- container_name: write-historyserver
- hostname: write-historyserver
- volumes:
- - ./data/write_hadoop_historyserver:/hadoop/yarn/timeline
- environment:
- SERVICE_PRECONDITION: "write-namenode:50070 write-datanode1:50075 write-datanode2:50075 write-datanode3:50075 write-resourcemanager:8088"
- env_file:
- - write-hadoop.env
- ports:
- - 8188:8188
-
- write-hive-server:
- image: ${HIVE_IMAGETAG:-apachekylin/kylin-hive:hive_1.2.2_hadoop_2.8.5}
- container_name: write-hive-server
- hostname: write-hive-server
- env_file:
- - write-hadoop.env
- environment:
-# HIVE_CORE_CONF_javax_jdo_option_ConnectionURL: "jdbc:postgresql://write-hive-metastore/metastore"
- HIVE_CORE_CONF_javax_jdo_option_ConnectionURL: "jdbc:mysql://metastore-db/metastore"
- SERVICE_PRECONDITION: "write-hive-metastore:9083"
- ports:
- - 10000:10000
-
- write-hive-metastore:
-# image: ${HIVE_IMAGETAG:-bde2020/hive:2.3.2-postgresql-metastore}
- image: ${HIVE_IMAGETAG:-apachekylin/kylin-hive:hive_1.2.2_hadoop_2.8.5}
- container_name: write-hive-metastore
- hostname: write-hive-metastore
- env_file:
- - write-hadoop.env
- command: /opt/hive/bin/hive --service metastore
- expose:
- - 9083
- environment:
- SERVICE_PRECONDITION: "write-namenode:50070 write-datanode1:50075 write-datanode2:50075 write-datanode3:50075 metastore-db:3306"
-# SERVICE_PRECONDITION: "write-namenode:50070 write-datanode1:50075 write-datanode2:50075 write-datanode3:50075 write-hive-metastore-postgresql:5432"
-
-# write-hive-metastore-postgresql:
-# image: bde2020/hive-metastore-postgresql:2.3.0
-# container_name: write-hive-metastore-postgresql
-# hostname: write-hive-metastore-postgresql
-
- metastore-db:
- image: mysql:5.6.49
- container_name: metastore-db
- hostname: metastore-db
- volumes:
- - ./data/mysql:/var/lib/mysql
- environment:
- - MYSQL_ROOT_PASSWORD=kylin
- - MYSQL_DATABASE=metastore
- - MYSQL_USER=kylin
- - MYSQL_PASSWORD=kylin
- ports:
- - 3306:3306
-
- write-zookeeper:
- image: ${ZOOKEEPER_IMAGETAG:-zookeeper:3.4.10}
- container_name: write-zookeeper
- hostname: write-zookeeper
- environment:
- ZOO_MY_ID: 1
- ZOO_SERVERS: server.1=0.0.0.0:2888:3888
- ports:
- - 2181:2181
-
- write-kafka:
- image: ${KAFKA_IMAGETAG:-bitnami/kafka:2.0.0}
- container_name: write-kafkabroker
- hostname: write-kafkabroker
- environment:
- - KAFKA_ZOOKEEPER_CONNECT=write-zookeeper:2181
- - ALLOW_PLAINTEXT_LISTENER=yes
- ports:
- - 9092:9092
-
- kerberos-kdc:
- image: ${KERBEROS_IMAGE}
- container_name: kerberos-kdc
- hostname: kerberos-kdc
-
- write-hbase-master:
- image: ${HBASE_MASTER_IMAGETAG:-bde2020/hbase-master:1.0.0-hbase1.2.6}
- container_name: write-hbase-master
- hostname: write-hbase-master
- env_file:
- - write-hbase-distributed-local.env
- environment:
- SERVICE_PRECONDITION: "write-namenode:50070 write-datanode1:50075 write-datanode2:50075 write-datanode3:50075 write-zookeeper:2181"
- ports:
- - 16010:16010
-
- write-hbase-regionserver1:
- image: ${HBASE_REGIONSERVER_IMAGETAG:-bde2020/hbase-regionserver:1.0.0-hbase1.2.6}
- container_name: write-hbase-regionserver1
- hostname: write-hbase-regionserver1
- env_file:
- - write-hbase-distributed-local.env
- environment:
- HBASE_CONF_hbase_regionserver_hostname: write-hbase-regionserver1
- SERVICE_PRECONDITION: "write-namenode:50070 write-datanode1:50075 write-datanode2:50075 write-datanode3:50075 write-zookeeper:2181 write-hbase-master:16010"
-
- write-hbase-regionserver2:
- image: ${HBASE_REGIONSERVER_IMAGETAG:-bde2020/hbase-regionserver:1.0.0-hbase1.2.6}
- container_name: write-hbase-regionserver2
- hostname: write-hbase-regionserver2
- env_file:
- - write-hbase-distributed-local.env
- environment:
- HBASE_CONF_hbase_regionserver_hostname: write-hbase-regionserver2
- SERVICE_PRECONDITION: "write-namenode:50070 write-datanode1:50075 write-datanode2:50075 write-datanode3:50075 write-zookeeper:2181 write-hbase-master:16010"
-
- kylin-all:
- image: ${CLIENT_IMAGETAG}
- container_name: kylin-all
- hostname: kylin-all
- volumes:
- - ./conf/hadoop:/etc/hadoop/conf
- - ./conf/hbase:/etc/hbase/conf
- - ./conf/hive:/etc/hive/conf
- - ./kylin:/opt/kylin/
- env_file:
- - client.env
- environment:
- HADOOP_CONF_DIR: /etc/hadoop/conf
- HIVE_CONF_DIR: /etc/hive/conf
- HBASE_CONF_DIR: /etc/hbase/conf
- KYLIN_HOME: /opt/kylin/kylin
- ports:
- - 7070:7070
diff --git a/docker/docker-compose/write/write-hadoop.env b/docker/docker-compose/write/write-hadoop.env
index 8ec98c9..ef4429a 100644
--- a/docker/docker-compose/write/write-hadoop.env
+++ b/docker/docker-compose/write/write-hadoop.env
@@ -39,9 +39,11 @@ MAPRED_CONF_mapreduce_reduce_memory_mb=8192
MAPRED_CONF_mapreduce_map_java_opts=-Xmx3072m
MAPRED_CONF_mapreduce_reduce_java_opts=-Xmx6144m
-HIVE_SITE_CONF_javax_jdo_option_ConnectionURL=jdbc:mysql://metastore-db/metastore
-HIVE_SITE_CONF_javax_jdo_option_ConnectionDriverName=com.mysql.jdbc.Driver
+HIVE_SITE_CONF_javax_jdo_option_ConnectionURL=jdbc:mysql://metastore-db/metastore?useSSL=false\&allowPublicKeyRetrieval=true
+HIVE_SITE_CONF_javax_jdo_option_ConnectionDriverName=com.mysql.cj.jdbc.Driver
HIVE_SITE_CONF_javax_jdo_option_ConnectionUserName=kylin
HIVE_SITE_CONF_javax_jdo_option_ConnectionPassword=kylin
HIVE_SITE_CONF_datanucleus_autoCreateSchema=true
+HIVE_SITE_CONF_datanucleus_schema_autoCreateAll=true
+HIVE_SITE_CONF_hive_metastore_schema_verification=false
HIVE_SITE_CONF_hive_metastore_uris=thrift://write-hive-metastore:9083
\ No newline at end of file
diff --git a/docker/dockerfile/cluster/base/Dockerfile b/docker/dockerfile/cluster/base/Dockerfile
index ccc05b3..8cf5ff0 100644
--- a/docker/dockerfile/cluster/base/Dockerfile
+++ b/docker/dockerfile/cluster/base/Dockerfile
@@ -52,13 +52,13 @@ RUN wget --no-cookies --no-check-certificate --header "Cookie: gpw_e24=http%3A%2
RUN set -x \
&& echo "Fetch URL2 is : ${HADOOP_URL}" \
&& curl -fSL "${HADOOP_URL}" -o /tmp/hadoop.tar.gz \
- && curl -fSL "${HADOOP_URL}.asc" -o /tmp/hadoop.tar.gz.asc \
+ && curl -fSL "${HADOOP_URL}.asc" -o /tmp/hadoop.tar.gz.asc
RUN set -x \
&& tar -xvf /tmp/hadoop.tar.gz -C /opt/ \
&& rm /tmp/hadoop.tar.gz* \
&& ln -s /opt/hadoop-$HADOOP_VERSION/etc/hadoop /etc/hadoop \
- && cp /etc/hadoop/mapred-site.xml.template /etc/hadoop/mapred-site.xml \
+ && if [ -e "/etc/hadoop/mapred-site.xml.template" ]; then cp /etc/hadoop/mapred-site.xml.template /etc/hadoop/mapred-site.xml ;fi \
&& mkdir -p /opt/hadoop-$HADOOP_VERSION/logs \
&& mkdir /hadoop-data
diff --git a/docker/dockerfile/cluster/client/Dockerfile b/docker/dockerfile/cluster/client/Dockerfile
index 38cbbac..46c1822 100644
--- a/docker/dockerfile/cluster/client/Dockerfile
+++ b/docker/dockerfile/cluster/client/Dockerfile
@@ -96,7 +96,7 @@ RUN chmod a+x /opt/entrypoint/kafka/entrypoint.sh
RUN set -x \
&& ln -s /opt/hadoop-$HADOOP_VERSION/etc/hadoop /etc/hadoop \
- && cp /etc/hadoop/mapred-site.xml.template /etc/hadoop/mapred-site.xml \
+ && if [ -e "/etc/hadoop/mapred-site.xml.template" ]; then cp /etc/hadoop/mapred-site.xml.template /etc/hadoop/mapred-site.xml ;fi \
&& mkdir -p /opt/hadoop-$HADOOP_VERSION/logs
RUN ln -s /opt/hbase-$HBASE_VERSION/conf /etc/hbase
diff --git a/docker/dockerfile/cluster/hive/Dockerfile b/docker/dockerfile/cluster/hive/Dockerfile
index 46f81f4..c3f11e5 100644
--- a/docker/dockerfile/cluster/hive/Dockerfile
+++ b/docker/dockerfile/cluster/hive/Dockerfile
@@ -49,6 +49,8 @@ RUN echo "Hive URL is :${HIVE_URL}" \
&& wget https://jdbc.postgresql.org/download/postgresql-9.4.1212.jar -O $HIVE_HOME/lib/postgresql-jdbc.jar \
&& rm hive.tar.gz
+RUN if [[ $HADOOP_VERSION > "3" ]]; then rm -rf $HIVE_HOME/lib/guava-* ; cp $HADOOP_HOME/share/hadoop/common/lib/guava-* $HIVE_HOME/lib; fi
+
#Custom configuration goes here
ADD conf/hive-site.xml $HIVE_HOME/conf
ADD conf/beeline-log4j2.properties $HIVE_HOME/conf
diff --git a/docker/dockerfile/cluster/hive/run_hv.sh b/docker/dockerfile/cluster/hive/run_hv.sh
index 675937f..fcc3547 100644
--- a/docker/dockerfile/cluster/hive/run_hv.sh
+++ b/docker/dockerfile/cluster/hive/run_hv.sh
@@ -22,5 +22,9 @@ hadoop fs -mkdir -p /user/hive/warehouse
hadoop fs -chmod g+w /tmp
hadoop fs -chmod g+w /user/hive/warehouse
+if [[ $HIVE_VERSION > "2" ]]; then
+ schematool -dbType mysql -initSchema
+fi
+
cd $HIVE_HOME/bin
./hiveserver2 --hiveconf hive.server2.enable.doAs=false
diff --git a/docker/header.sh b/docker/header.sh
new file mode 100644
index 0000000..a990d90
--- /dev/null
+++ b/docker/header.sh
@@ -0,0 +1,141 @@
+#!/bin/bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+ARGS=`getopt -o h:i:b:c:a:l:k:f:p --long hadoop_version:,hive_version:,hbase_version:,cluster_mode:,enable_hbase:,enable_ldap:,enable_kerberos:,enable_kafka,help -n 'parameter.bash' -- "$@"`
+
+if [ $? != 0 ]; then
+ echo "Terminating..."
+ exit 1
+fi
+
+eval set -- "${ARGS}"
+
+HADOOP_VERSION="2.8.5"
+HIVE_VERSION="1.2.2"
+HBASE_VERSION="1.1.2"
+
+# write write-read
+CLUSTER_MODE="write"
+# yes,no
+ENABLE_HBASE="yes"
+# yes,no
+ENABLE_LDAP="no"
+# yes,no
+ENABLE_KERBEROS="no"
+#
+ENABLE_KAFKA="no"
+
+while true;
+do
+ case "$1" in
+ --hadoop_version)
+ HADOOP_VERSION=$2;
+ shift 2;
+ ;;
+ --hive_version)
+ HIVE_VERSION=$2;
+ shift 2;
+ ;;
+ --hbase_version)
+ HBASE_VERSION=$2;
+ shift 2;
+ ;;
+ --cluster_mode)
+ CLUSTER_MODE=$2;
+ shift 2;
+ ;;
+ --enable_hbase)
+ ENABLE_HBASE=$2;
+ shift 2;
+ ;;
+ --enable_ldap)
+ ENABLE_LDAP=$2;
+ shift 2;
+ ;;
+ --enable_kerberos)
+ ENABLE_KERBEROS=$2;
+ shift 2;
+ ;;
+ --enable_kafka)
+ ENABLE_KAFKA=$2;
+ shift 2;
+ ;;
+ --help)
+echo << EOF "
+----------------------menu------------------------
+--hadoop_version hadoop version,default is 2.8.5
+--hive_version hive version,default is 1.2.2
+--hbase_version hbase version,default is 1.1.2
+--cluster_mode cluster mode, optional value : [write, write-read],default is write,
+--enable_hbase whether enable hbase server, optional value : [yes, no], default is yes
+--enable_ldap whether enable ldap server, optional value : [yes, no], default is no
+--enable_kerberos whether enable kerberos server, optional value : [yes, no], default is no
+--enable_kafka whether enable kafka server, optional value : [yes, no], default is no"
+EOF
+ exit 0
+ ;;
+ --)
+ break
+ ;;
+ *)
+ echo "Internal error!"
+ break
+ ;;
+ esac
+done
+
+for arg in $@
+do
+ echo "processing $arg"
+done
+
+echo "........hadoop version: "$HADOOP_VERSION
+echo "........hive version: "$HIVE_VERSION
+echo "........hbase version: "$HBASE_VERSION
+echo "........cluster_mode: "${CLUSTER_MODE}
+echo "........enable hbase: "${ENABLE_HBASE}
+echo "........enable ldap: "${ENABLE_LDAP}
+echo "........enable kerberos: "${ENABLE_KERBEROS}
+
+export HBASE_VERSION=$HBASE_VERSION
+export HADOOP_VERSION=$HADOOP_VERSION
+export HIVE_VERSION=$HIVE_VERSION
+
+export HADOOP_NAMENODE_IMAGETAG=apachekylin/kylin-hadoop-base:hadoop_${HADOOP_VERSION}
+export HADOOP_DATANODE_IMAGETAG=apachekylin/kylin-hadoop-datanode:hadoop_${HADOOP_VERSION}
+export HADOOP_NAMENODE_IMAGETAG=apachekylin/kylin-hadoop-namenode:hadoop_${HADOOP_VERSION}
+export HADOOP_RESOURCEMANAGER_IMAGETAG=apachekylin/kylin-hadoop-resourcemanager:hadoop_${HADOOP_VERSION}
+export HADOOP_NODEMANAGER_IMAGETAG=apachekylin/kylin-hadoop-nodemanager:hadoop_${HADOOP_VERSION}
+export HADOOP_HISTORYSERVER_IMAGETAG=apachekylin/kylin-hadoop-historyserver:hadoop_${HADOOP_VERSION}
+export HIVE_IMAGETAG=apachekylin/kylin-hive:hive_${HIVE_VERSION}_hadoop_${HADOOP_VERSION}
+
+export HBASE_MASTER_IMAGETAG=apachekylin/kylin-hbase-base:hbase_${HBASE_VERSION}
+export HBASE_MASTER_IMAGETAG=apachekylin/kylin-hbase-master:hbase_${HBASE_VERSION}
+export HBASE_REGIONSERVER_IMAGETAG=apachekylin/kylin-hbase-regionserver:hbase_${HBASE_VERSION}
+
+export KAFKA_IMAGE=bitnami/kafka:2.0.0
+export LDAP_IMAGE=osixia/openldap:1.3.0
+export CLIENT_IMAGETAG=apachekylin/kylin-client:hadoop_${HADOOP_VERSION}_hive_${HIVE_VERSION}_hbase_${HBASE_VERSION}
+
+if [[ $HADOOP_VERSION < "3" ]]; then
+ export HADOOP_WEBHDFS_PORT=50070
+ export HADOOP_DN_PORT=50075
+elif [[ $HADOOP_VERSION > "3" ]]; then
+ export HADOOP_WEBHDFS_PORT=9870
+ export HADOOP_DN_PORT=9864
+fi
diff --git a/docker/setup_cluster.sh b/docker/setup_cluster.sh
index 0e3a260..e7ae80f 100644
--- a/docker/setup_cluster.sh
+++ b/docker/setup_cluster.sh
@@ -1,20 +1,20 @@
#!/bin/bash
-
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
#
-# http://www.apache.org/licenses/LICENSE-2.0
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
# limitations under the License.
+#
SCRIPT_PATH=$(cd `dirname $0`; pwd)
WS_ROOT=`dirname $SCRIPT_PATH`
diff --git a/docker/stop_cluster.sh b/docker/stop_cluster.sh
index 87f0ac4..24ce4e8 100644
--- a/docker/stop_cluster.sh
+++ b/docker/stop_cluster.sh
@@ -1,23 +1,47 @@
#!/bin/bash
-
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
#
-# http://www.apache.org/licenses/LICENSE-2.0
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
# limitations under the License.
+#
SCRIPT_PATH=$(cd `dirname $0`; pwd)
-# set up root directory
WS_ROOT=`dirname $SCRIPT_PATH`
-# shut down cluster
-KYLIN_WS=${WS_ROOT} docker-compose -f ${SCRIPT_PATH}/docker-compose/write/docker-compose-write.yml down
+
+source ${SCRIPT_PATH}/header.sh
+
+KYLIN_WS=${WS_ROOT} docker-compose -f ${SCRIPT_PATH}/docker-compose/others/docker-compose-kylin-write.yml down
+KYLIN_WS=${WS_ROOT} docker-compose -f ${SCRIPT_PATH}/docker-compose/others/docker-compose-kylin-write-read.yml down
+
+KYLIN_WS=${WS_ROOT} docker-compose -f ${SCRIPT_PATH}/docker-compose/read/docker-compose-zookeeper.yml down
+KYLIN_WS=${WS_ROOT} docker-compose -f ${SCRIPT_PATH}/docker-compose/read/docker-compose-hadoop.yml down
+KYLIN_WS=${WS_ROOT} docker-compose -f ${SCRIPT_PATH}/docker-compose/read/docker-compose-hbase.yml down
+
+
+KYLIN_WS=${WS_ROOT} docker-compose -f ${SCRIPT_PATH}/docker-compose/write/docker-compose-kafka.yml down
+KYLIN_WS=${WS_ROOT} docker-compose -f ${SCRIPT_PATH}/docker-compose/write/docker-compose-hbase.yml down
+
+KYLIN_WS=${WS_ROOT} docker-compose -f ${SCRIPT_PATH}/docker-compose/write/docker-compose-hive.yml down
+KYLIN_WS=${WS_ROOT} docker-compose -f ${SCRIPT_PATH}/docker-compose/write/docker-compose-zookeeper.yml down
+
+KYLIN_WS=${WS_ROOT} docker-compose -f ${SCRIPT_PATH}/docker-compose/others/docker-compose-kerberos.yml down
+# KYLIN_WS=${WS_ROOT} docker-compose -f ${SCRIPT_PATH}/docker-compose/others/docker-compose-ldap.yml down
+KYLIN_WS=${WS_ROOT} docker-compose -f ${SCRIPT_PATH}/docker-compose/others/docker-compose-metastore.yml down
+
+KYLIN_WS=${WS_ROOT} docker-compose -f ${SCRIPT_PATH}/docker-compose/write/docker-compose-hadoop.yml down
+
+# clean data
+#rm -rf ${SCRIPT_PATH}/docker-compose/write/data/*
+#rm -rf ${SCRIPT_PATH}/docker-compose/read/data/*
+#rm -rf ${SCRIPT_PATH}/docker-compose/others/data/*