You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@griffin.apache.org by gu...@apache.org on 2017/05/26 09:17:34 UTC

[4/9] incubator-griffin git commit: [GRIFFIN-19] update document of docker

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/2de6a549/docker/griffin_demo/prep/data/gen-es-data.sh
----------------------------------------------------------------------
diff --git a/docker/griffin_demo/prep/data/gen-es-data.sh b/docker/griffin_demo/prep/data/gen-es-data.sh
new file mode 100755
index 0000000..99821f4
--- /dev/null
+++ b/docker/griffin_demo/prep/data/gen-es-data.sh
@@ -0,0 +1,64 @@
+#!/bin/bash
+
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "viewitem_hourly", "tmst": 1493951823461, "total": 8927368, "matched": 8861175}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "viewitem_hourly", "tmst": 1493955423461, "total": 9757306, "matched": 9753493}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "viewitem_hourly", "tmst": 1493959023461, "total": 8614285, "matched": 8559842}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "viewitem_hourly", "tmst": 1493962623461, "total": 8043288, "matched": 8034775}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "viewitem_hourly", "tmst": 1493966223461, "total": 9360576, "matched": 9288744}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "viewitem_hourly", "tmst": 1493969823461, "total": 8079795, "matched": 8078190}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "viewitem_hourly", "tmst": 1493973423461, "total": 9479698, "matched": 9476094}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "viewitem_hourly", "tmst": 1493977023461, "total": 8543483, "matched": 8524897}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "viewitem_hourly", "tmst": 1493980623461, "total": 9449484, "matched": 9412128}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "viewitem_hourly", "tmst": 1493984223461, "total": 9791098, "matched": 9735685}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "viewitem_hourly", "tmst": 1493987823461, "total": 9194117, "matched": 9164237}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "viewitem_hourly", "tmst": 1493991423461, "total": 9186464, "matched": 9183768}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "viewitem_hourly", "tmst": 1493995023461, "total": 9429018, "matched": 9375324}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "viewitem_hourly", "tmst": 1493998623461, "total": 8740571, "matched": 8733743}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "viewitem_hourly", "tmst": 1494002223461, "total": 8495330, "matched": 8461814}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "viewitem_hourly", "tmst": 1494005823461, "total": 8054780, "matched": 8001438}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "viewitem_hourly", "tmst": 1494009423461, "total": 8029660, "matched": 7979653}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "viewitem_hourly", "tmst": 1494013023461, "total": 8705272, "matched": 8627610}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "viewitem_hourly", "tmst": 1494016623461, "total": 8704313, "matched": 8684186}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "viewitem_hourly", "tmst": 1494020223461, "total": 8957171, "matched": 8868013}'
+
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "search_hourly", "tmst": 1493951823461, "total": 1067347, "matched": 1067346}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "search_hourly", "tmst": 1493955423461, "total": 1007213, "matched": 999576}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "search_hourly", "tmst": 1493959023461, "total": 1086389, "matched": 1083336}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "search_hourly", "tmst": 1493962623461, "total": 1071258, "matched": 1066583}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "search_hourly", "tmst": 1493966223461, "total": 1080928, "matched": 1080255}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "search_hourly", "tmst": 1493969823461, "total": 1012984, "matched": 1004201}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "search_hourly", "tmst": 1493973423461, "total": 1090650, "matched": 1090445}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "search_hourly", "tmst": 1493977023461, "total": 1056846, "matched": 1048078}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "search_hourly", "tmst": 1493980623461, "total": 1088940, "matched": 1079003}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "search_hourly", "tmst": 1493984223461, "total": 1056905, "matched": 1048087}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "search_hourly", "tmst": 1493987823461, "total": 1034134, "matched": 1030302}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "search_hourly", "tmst": 1493991423461, "total": 1024511, "matched": 1020197}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "search_hourly", "tmst": 1493995023461, "total": 1048833, "matched": 1047890}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "search_hourly", "tmst": 1493998623461, "total": 1063914, "matched": 1060807}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "search_hourly", "tmst": 1494002223461, "total": 1090843, "matched": 1089507}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "search_hourly", "tmst": 1494005823461, "total": 1015360, "matched": 1012678}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "search_hourly", "tmst": 1494009423461, "total": 1040409, "matched": 1039279}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "search_hourly", "tmst": 1494013023461, "total": 1063349, "matched": 1055783}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "search_hourly", "tmst": 1494016623461, "total": 1031706, "matched": 1028600}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "search_hourly", "tmst": 1494020223461, "total": 1066126, "matched": 1062413}'
+
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "buy_hourly", "tmst": 1493951823461, "total": 23224, "matched": 23028}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "buy_hourly", "tmst": 1493955423461, "total": 29988, "matched": 29876}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "buy_hourly", "tmst": 1493959023461, "total": 20840, "matched": 20687}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "buy_hourly", "tmst": 1493962623461, "total": 20679, "matched": 20674}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "buy_hourly", "tmst": 1493966223461, "total": 22713, "matched": 22623}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "buy_hourly", "tmst": 1493969823461, "total": 24865, "matched": 24619}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "buy_hourly", "tmst": 1493973423461, "total": 22017, "matched": 21818}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "buy_hourly", "tmst": 1493977023461, "total": 25047, "matched": 25005}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "buy_hourly", "tmst": 1493980623461, "total": 25149, "matched": 25033}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "buy_hourly", "tmst": 1493984223461, "total": 25653, "matched": 25438}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "buy_hourly", "tmst": 1493987823461, "total": 25829, "matched": 25815}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "buy_hourly", "tmst": 1493991423461, "total": 25550, "matched": 25518}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "buy_hourly", "tmst": 1493995023461, "total": 22669, "matched": 22656}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "buy_hourly", "tmst": 1493998623461, "total": 27805, "matched": 27668}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "buy_hourly", "tmst": 1494002223461, "total": 20237, "matched": 20103}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "buy_hourly", "tmst": 1494005823461, "total": 23782, "matched": 23551}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "buy_hourly", "tmst": 1494009423461, "total": 22540, "matched": 22323}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "buy_hourly", "tmst": 1494013023461, "total": 27692, "matched": 27691}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "buy_hourly", "tmst": 1494016623461, "total": 27558, "matched": 27539}'
+curl -XPOST 'http://127.0.0.1:9200/griffin/accuracy' -d '{"name": "buy_hourly", "tmst": 1494020223461, "total": 22951, "matched": 22934}'

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/2de6a549/docker/griffin_demo/prep/data/gen-hive-data.sh
----------------------------------------------------------------------
diff --git a/docker/griffin_demo/prep/data/gen-hive-data.sh b/docker/griffin_demo/prep/data/gen-hive-data.sh
new file mode 100755
index 0000000..a972edd
--- /dev/null
+++ b/docker/griffin_demo/prep/data/gen-hive-data.sh
@@ -0,0 +1,30 @@
+#!/bin/bash
+
+#create table
+hive -f create-table.hql
+echo "create table done"
+
+#current hour
+cur_date=`date +%Y%m%d%H`
+dt=${cur_date:0:8}
+hour=${cur_date:8:2}
+partition_date="dt='$dt',hour='$hour'"
+sed s/PARTITION_DATE/$partition_date/ ./insert-data.hql.template > insert-data.hql
+hive -f insert-data.hql
+echo "insert data [$partition_date] done"
+
+#next hours
+set +e
+while true
+do
+  cur_date=`date +%Y%m%d%H`
+  next_date=`date -d "+1hour" '+%Y%m%d%H'`
+  dt=${next_date:0:8}
+  hour=${next_date:8:2}
+  partition_date="dt='$dt',hour='$hour'"
+  sed s/PARTITION_DATE/$partition_date/ ./insert-data.hql.template > insert-data.hql
+  hive -f insert-data.hql
+  echo "insert data [$partition_date] done"
+  sleep 3600
+done
+set -e

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/2de6a549/docker/griffin_demo/prep/data/insert-data.hql.template
----------------------------------------------------------------------
diff --git a/docker/griffin_demo/prep/data/insert-data.hql.template b/docker/griffin_demo/prep/data/insert-data.hql.template
new file mode 100644
index 0000000..4e4039a
--- /dev/null
+++ b/docker/griffin_demo/prep/data/insert-data.hql.template
@@ -0,0 +1,2 @@
+LOAD DATA LOCAL INPATH 'demo_src' INTO TABLE demo_src PARTITION (PARTITION_DATE);
+LOAD DATA LOCAL INPATH 'demo_tgt' INTO TABLE demo_tgt PARTITION (PARTITION_DATE);

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/2de6a549/docker/griffin_demo/prep/jar/griffin-measure-batch.jar.placeholder
----------------------------------------------------------------------
diff --git a/docker/griffin_demo/prep/jar/griffin-measure-batch.jar.placeholder b/docker/griffin_demo/prep/jar/griffin-measure-batch.jar.placeholder
new file mode 100644
index 0000000..e69de29

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/2de6a549/docker/griffin_demo/prep/job/config.json
----------------------------------------------------------------------
diff --git a/docker/griffin_demo/prep/job/config.json b/docker/griffin_demo/prep/job/config.json
new file mode 100644
index 0000000..9d09ef1
--- /dev/null
+++ b/docker/griffin_demo/prep/job/config.json
@@ -0,0 +1,27 @@
+{
+  "name": "avr_accu",
+  "type": "accuracy",
+
+  "source": {
+    "type": "hive",
+    "version": "1.2",
+    "config": {
+      "database": "default",
+      "table.name": "demo_src"
+    }
+  },
+
+  "target": {
+    "type": "hive",
+    "version": "1.2",
+    "config": {
+      "database": "default",
+      "table.name": "demo_tgt"
+    }
+  },
+
+  "evaluateRule": {
+    "sampleRatio": 1,
+    "rules": "$source.id = $target.id AND $source.age = $target.age AND $source.desc = $target.desc"
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/2de6a549/docker/griffin_demo/prep/job/env.json.template
----------------------------------------------------------------------
diff --git a/docker/griffin_demo/prep/job/env.json.template b/docker/griffin_demo/prep/job/env.json.template
new file mode 100644
index 0000000..78870c6
--- /dev/null
+++ b/docker/griffin_demo/prep/job/env.json.template
@@ -0,0 +1,27 @@
+{
+  "spark": {
+    "log.level": "INFO",
+    "checkpoint.dir": "hdfs:///griffin/checkpoint",
+    "config": {}
+  },
+
+  "persist": [
+    {
+      "type": "hdfs",
+      "config": {
+        "path": "hdfs:///griffin/persist"
+      }
+    },
+    {
+      "type": "http",
+      "config": {
+        "method": "post",
+        "api": "http://HOSTNAME:9200/griffin/accuracy"
+      }
+    }
+  ],
+
+  "cleaner": {
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/2de6a549/docker/griffin_demo/prep/prepare.sh
----------------------------------------------------------------------
diff --git a/docker/griffin_demo/prep/prepare.sh b/docker/griffin_demo/prep/prepare.sh
new file mode 100755
index 0000000..bd95d55
--- /dev/null
+++ b/docker/griffin_demo/prep/prepare.sh
@@ -0,0 +1,20 @@
+#!/bin/bash
+
+hadoop fs -mkdir /griffin
+hadoop fs -mkdir /griffin/json
+hadoop fs -mkdir /griffin/persist
+hadoop fs -mkdir /griffin/checkpoint
+
+hadoop fs -mkdir /griffin/data
+hadoop fs -mkdir /griffin/data/batch
+
+#jar file
+hadoop fs -put jar/griffin-measure-batch.jar /griffin/
+
+#data
+
+#service
+
+#job
+#hadoop fs -put job/env.json /griffin/json/
+hadoop fs -put job/config.json /griffin/json/

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/2de6a549/docker/griffin_demo/prep/service/config/application.properties.template
----------------------------------------------------------------------
diff --git a/docker/griffin_demo/prep/service/config/application.properties.template b/docker/griffin_demo/prep/service/config/application.properties.template
new file mode 100644
index 0000000..84a48d3
--- /dev/null
+++ b/docker/griffin_demo/prep/service/config/application.properties.template
@@ -0,0 +1,22 @@
+spring.datasource.url= jdbc:mysql://HOSTNAME:3306/quartz
+spring.datasource.username =griffin
+spring.datasource.password =123456
+
+spring.datasource.driver-class-name=com.mysql.jdbc.Driver
+
+## Hibernate ddl auto (validate,create, create-drop, update)
+
+spring.jpa.hibernate.ddl-auto = create-drop
+spring.jpa.show-sql=true
+spring.jpa.properties.hibernate.dialect=org.hibernate.dialect.MySQL5Dialect
+#
+#
+## Naming strategy
+spring.jpa.hibernate.naming-strategy = org.hibernate.cfg.ImprovedNamingStrategy
+
+# hive metastore
+hive.metastore.uris = thrift://HOSTNAME:9083
+hive.metastore.dbname = default
+
+# kafka schema registry
+kafka.schema.registry.url = http://HOSTNAME:8181

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/2de6a549/docker/griffin_demo/prep/service/service.jar.placeholder
----------------------------------------------------------------------
diff --git a/docker/griffin_demo/prep/service/service.jar.placeholder b/docker/griffin_demo/prep/service/service.jar.placeholder
new file mode 100644
index 0000000..e69de29

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/2de6a549/docker/griffin_env/Dockerfile
----------------------------------------------------------------------
diff --git a/docker/griffin_env/Dockerfile b/docker/griffin_env/Dockerfile
new file mode 100644
index 0000000..73e1df5
--- /dev/null
+++ b/docker/griffin_env/Dockerfile
@@ -0,0 +1,71 @@
+FROM ubuntu:14.04
+MAINTAINER org.apache.griffin
+
+#install wget, ssh, unzip
+RUN apt-get update && apt-get install wget -y && apt-get install openssh-client openssh-server -y && apt-get install unzip -y
+RUN mkdir /apache 
+WORKDIR /apache
+ADD prep /apache
+
+EXPOSE 2122
+
+#java
+RUN ./software-install.sh
+ADD conf /apache/conf
+RUN ./dir.sh && ./software-config.sh
+ENV JAVA_HOME /apache/jdk
+ENV HADOOP_HOME /apache/hadoop
+ENV HADOOP_INSTALL $HADOOP_HOME
+ENV HADOOP_MAPRED_HOME $HADOOP_HOME
+ENV HADOOP_COMMON_HOME $HADOOP_HOME
+ENV HADOOP_HDFS_HOME $HADOOP_HOME
+ENV YARN_HOME $HADOOP_HOME
+ENV HADOOP_COMMON_LIB_NATIVE_DIR $HADOOP_HOME/lib/native
+ENV SCALA_HOME /apache/scala
+ENV SPARK_HOME /apache/spark
+ENV HIVE_HOME /apache/hive
+ENV HADOOP_USER_CLASSPATH_FIRST true
+ENV LIVY_HOME /apache/livy
+
+ENV PATH $JAVA_HOME/bin:$PATH:$HADOOP_HOME/bin:$HADOOP_HOME/sbin:$SCALA_HOME/bin:$SPARK_HOME/bin:$HIVE_HOME/bin:$LIVY_HOME/bin
+
+EXPOSE 3306
+EXPOSE 9000 10020
+EXPOSE 50010 50020 50070 50075 50090
+EXPOSE 19888
+EXPOSE 8030 8031 8032 8033 8040 8042 8088
+EXPOSE 49707
+EXPOSE 9083 27017 6066
+EXPOSE 8080
+EXPOSE 8998
+EXPOSE 9200
+
+#ssh without password
+ENV HOME /root
+WORKDIR /root
+RUN ssh-keygen -q -N "" -t rsa -f /root/.ssh/id_rsa && cp /root/.ssh/id_rsa.pub /root/.ssh/authorized_keys
+ADD ssh_config /root/.ssh/config
+RUN chmod 600 /root/.ssh/config && chown root:root /root/.ssh/config
+RUN sed  -i "/^[^#]*UsePAM/ s/.*/#&/"  /etc/ssh/sshd_config && echo "UsePAM no" >> /etc/ssh/sshd_config && echo "Port 2122" >> /etc/ssh/sshd_config
+
+#CURL
+RUN apt-get install curl -y
+
+#mysql init
+RUN DEBIAN_FRONTEND=noninteractive apt-get install mysql-server -y && DEBIAN_FRONTEND=noninteractive apt-get install libmysql-java -y
+RUN ln -s /usr/share/java/mysql-connector-java.jar $HIVE_HOME/lib/mysql-connector-java.jar && ln -s /usr/share/java/mysql.jar $HIVE_HOME/lib/mysql.jar
+RUN cd /apache/conf/mysql && cp bind_0.cnf /etc/mysql/conf.d/bind_0.cnf && ./mysql-init.sh
+
+#bootstrap
+ADD bootstrap.sh /etc/
+RUN chmod 755 /etc/bootstrap.sh
+
+#initial
+RUN hdfs namenode -format
+RUN /etc/bootstrap.sh && /apache/hdfs_file.sh && rm /apache/*.sh
+
+#bootstrap-all
+ADD bootstrap-all.sh /etc/
+RUN chmod 755 /etc/bootstrap-all.sh
+
+ENTRYPOINT ["/etc/bootstrap-all.sh"]

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/2de6a549/docker/griffin_env/bootstrap-all.sh
----------------------------------------------------------------------
diff --git a/docker/griffin_env/bootstrap-all.sh b/docker/griffin_env/bootstrap-all.sh
new file mode 100755
index 0000000..c3a11fc
--- /dev/null
+++ b/docker/griffin_env/bootstrap-all.sh
@@ -0,0 +1,39 @@
+#!/bin/bash
+
+$HADOOP_HOME/etc/hadoop/hadoop-env.sh
+rm /tmp/*.pid
+
+cd $HADOOP_HOME/share/hadoop/common ; for cp in ${ACP//,/ }; do  echo == $cp; curl -LO $cp ; done; cd -
+
+service mysql start
+
+sed s/HOSTNAME/$HOSTNAME/ $HADOOP_HOME/etc/hadoop/core-site.xml.template > $HADOOP_HOME/etc/hadoop/core-site.xml
+sed s/HOSTNAME/$HOSTNAME/ $HADOOP_HOME/etc/hadoop/yarn-site.xml.template > $HADOOP_HOME/etc/hadoop/yarn-site.xml
+sed s/HOSTNAME/$HOSTNAME/ $HADOOP_HOME/etc/hadoop/mapred-site.xml.template > $HADOOP_HOME/etc/hadoop/mapred-site.xml
+
+sed s/HOSTNAME/$HOSTNAME/ $HIVE_HOME/conf/hive-site.xml.template > $HIVE_HOME/conf/hive-site.xml
+
+/etc/init.d/ssh start
+
+start-dfs.sh
+start-yarn.sh
+mr-jobhistory-daemon.sh start historyserver
+
+
+$HADOOP_HOME/bin/hdfs dfsadmin -safemode wait
+
+
+hadoop fs -mkdir -p /home/spark_conf
+hadoop fs -put $HIVE_HOME/conf/hive-site.xml /home/spark_conf/
+echo "spark.yarn.dist.files		hdfs:///home/spark_conf/hive-site.xml" >> $SPARK_HOME/conf/spark-defaults.conf
+
+
+$SPARK_HOME/sbin/start-all.sh
+
+nohup hive --service metastore > metastore.log &
+
+nohup livy-server > livy.log &
+
+service elasticsearch start
+
+/bin/bash -c "bash"

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/2de6a549/docker/griffin_env/bootstrap.sh
----------------------------------------------------------------------
diff --git a/docker/griffin_env/bootstrap.sh b/docker/griffin_env/bootstrap.sh
new file mode 100755
index 0000000..197c6e3
--- /dev/null
+++ b/docker/griffin_env/bootstrap.sh
@@ -0,0 +1,25 @@
+#!/bin/bash
+
+$HADOOP_HOME/etc/hadoop/hadoop-env.sh
+rm /tmp/*.pid
+
+cd $HADOOP_HOME/share/hadoop/common ; for cp in ${ACP//,/ }; do  echo == $cp; curl -LO $cp ; done; cd -
+
+service mysql start
+
+sed s/HOSTNAME/$HOSTNAME/ $HADOOP_HOME/etc/hadoop/core-site.xml.template > $HADOOP_HOME/etc/hadoop/core-site.xml
+sed s/HOSTNAME/$HOSTNAME/ $HADOOP_HOME/etc/hadoop/yarn-site.xml.template > $HADOOP_HOME/etc/hadoop/yarn-site.xml
+sed s/HOSTNAME/$HOSTNAME/ $HADOOP_HOME/etc/hadoop/mapred-site.xml.template > $HADOOP_HOME/etc/hadoop/mapred-site.xml
+
+sed s/HOSTNAME/$HOSTNAME/ $HIVE_HOME/conf/hive-site.xml.template > $HIVE_HOME/conf/hive-site.xml
+
+/etc/init.d/ssh start
+
+start-dfs.sh
+start-yarn.sh
+mr-jobhistory-daemon.sh start historyserver
+
+$HADOOP_HOME/bin/hdfs dfsadmin -safemode leave
+
+
+/bin/bash -c "bash"

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/2de6a549/docker/griffin_env/conf/elasticsearch/elasticsearch
----------------------------------------------------------------------
diff --git a/docker/griffin_env/conf/elasticsearch/elasticsearch b/docker/griffin_env/conf/elasticsearch/elasticsearch
new file mode 100755
index 0000000..ecaaf5f
--- /dev/null
+++ b/docker/griffin_env/conf/elasticsearch/elasticsearch
@@ -0,0 +1,207 @@
+#!/bin/bash
+#
+# /etc/init.d/elasticsearch -- startup script for Elasticsearch
+#
+### BEGIN INIT INFO
+# Provides:          elasticsearch
+# Required-Start:    $network $remote_fs $named
+# Required-Stop:     $network $remote_fs $named
+# Default-Start:     2 3 4 5
+# Default-Stop:      0 1 6
+# Short-Description: Starts elasticsearch
+# Description:       Starts elasticsearch using start-stop-daemon
+### END INIT INFO
+
+#PATH=/bin:/usr/bin:/sbin:/usr/sbin
+PATH=$PATH
+NAME=elasticsearch
+DESC="Elasticsearch Server"
+DEFAULT=/etc/default/$NAME
+
+if [ `id -u` -ne 0 ]; then
+	echo "You need root privileges to run this script"
+	exit 1
+fi
+
+
+. /lib/lsb/init-functions
+
+if [ -r /etc/default/rcS ]; then
+	. /etc/default/rcS
+fi
+
+
+# The following variables can be overwritten in $DEFAULT
+
+# Run Elasticsearch as this user ID and group ID
+ES_USER=elasticsearch
+ES_GROUP=elasticsearch
+
+# Directory where the Elasticsearch binary distribution resides
+ES_HOME=/usr/share/$NAME
+
+# Additional Java OPTS
+#ES_JAVA_OPTS=
+
+# Maximum number of open files
+MAX_OPEN_FILES=65536
+
+# Maximum amount of locked memory
+#MAX_LOCKED_MEMORY=
+
+# Elasticsearch log directory
+LOG_DIR=/var/log/$NAME
+
+# Elasticsearch data directory
+DATA_DIR=/var/lib/$NAME
+
+# Elasticsearch configuration directory
+CONF_DIR=/etc/$NAME
+
+# Maximum number of VMA (Virtual Memory Areas) a process can own
+MAX_MAP_COUNT=262144
+
+# Elasticsearch PID file directory
+PID_DIR="/var/run/elasticsearch"
+
+# End of variables that can be overwritten in $DEFAULT
+
+# overwrite settings from default file
+if [ -f "$DEFAULT" ]; then
+	. "$DEFAULT"
+fi
+
+# CONF_FILE setting was removed
+if [ ! -z "$CONF_FILE" ]; then
+    echo "CONF_FILE setting is no longer supported. elasticsearch.yml must be placed in the config directory and cannot be renamed."
+    exit 1
+fi
+
+# Define other required variables
+PID_FILE="$PID_DIR/$NAME.pid"
+DAEMON=$ES_HOME/bin/elasticsearch
+DAEMON_OPTS="-d -p $PID_FILE -Edefault.path.logs=$LOG_DIR -Edefault.path.data=$DATA_DIR -Edefault.path.conf=$CONF_DIR"
+
+export ES_JAVA_OPTS
+export JAVA_HOME
+export ES_INCLUDE
+export ES_JVM_OPTIONS
+
+# export unsupported variables so bin/elasticsearch can reject them and inform the user these are unsupported
+if test -n "$ES_MIN_MEM"; then export ES_MIN_MEM; fi
+if test -n "$ES_MAX_MEM"; then export ES_MAX_MEM; fi
+if test -n "$ES_HEAP_SIZE"; then export ES_HEAP_SIZE; fi
+if test -n "$ES_HEAP_NEWSIZE"; then export ES_HEAP_NEWSIZE; fi
+if test -n "$ES_DIRECT_SIZE"; then export ES_DIRECT_SIZE; fi
+if test -n "$ES_USE_IPV4"; then export ES_USE_IPV4; fi
+if test -n "$ES_GC_OPTS"; then export ES_GC_OPTS; fi
+if test -n "$ES_GC_LOG_FILE"; then export ES_GC_LOG_FILE; fi
+
+if [ ! -x "$DAEMON" ]; then
+	echo "The elasticsearch startup script does not exists or it is not executable, tried: $DAEMON"
+	exit 1
+fi
+
+checkJava() {
+	if [ -x "$JAVA_HOME/bin/java" ]; then
+		JAVA="$JAVA_HOME/bin/java"
+	else
+		JAVA=`which java`
+	fi
+
+	if [ ! -x "$JAVA" ]; then
+		echo "Could not find any executable java binary. Please install java in your PATH or set JAVA_HOME"
+		exit 1
+	fi
+}
+
+case "$1" in
+  start)
+	checkJava
+
+	log_daemon_msg "Starting $DESC"
+
+	pid=`pidofproc -p $PID_FILE elasticsearch`
+	if [ -n "$pid" ] ; then
+		log_begin_msg "Already running."
+		log_end_msg 0
+		exit 0
+	fi
+
+	# Ensure that the PID_DIR exists (it is cleaned at OS startup time)
+	if [ -n "$PID_DIR" ] && [ ! -e "$PID_DIR" ]; then
+		mkdir -p "$PID_DIR" && chown "$ES_USER":"$ES_GROUP" "$PID_DIR"
+	fi
+	if [ -n "$PID_FILE" ] && [ ! -e "$PID_FILE" ]; then
+		touch "$PID_FILE" && chown "$ES_USER":"$ES_GROUP" "$PID_FILE"
+	fi
+
+	if [ -n "$MAX_OPEN_FILES" ]; then
+		ulimit -n $MAX_OPEN_FILES
+	fi
+
+	if [ -n "$MAX_LOCKED_MEMORY" ]; then
+		ulimit -l $MAX_LOCKED_MEMORY
+	fi
+
+	if [ -n "$MAX_MAP_COUNT" -a -f /proc/sys/vm/max_map_count ]; then
+		sysctl -q -w vm.max_map_count=$MAX_MAP_COUNT
+	fi
+
+	# Start Daemon
+	start-stop-daemon -d $ES_HOME --start --user "$ES_USER" -c "$ES_USER" --pidfile "$PID_FILE" --exec $DAEMON -- $DAEMON_OPTS
+	return=$?
+	if [ $return -eq 0 ]; then
+		i=0
+		timeout=10
+		# Wait for the process to be properly started before exiting
+		until { kill -0 `cat "$PID_FILE"`; } >/dev/null 2>&1
+		do
+			sleep 1
+			i=$(($i + 1))
+			if [ $i -gt $timeout ]; then
+				log_end_msg 1
+				exit 1
+			fi
+		done
+	fi
+	log_end_msg $return
+	exit $return
+	;;
+  stop)
+	log_daemon_msg "Stopping $DESC"
+
+	if [ -f "$PID_FILE" ]; then
+		start-stop-daemon --stop --pidfile "$PID_FILE" \
+			--user "$ES_USER" \
+			--quiet \
+			--retry forever/TERM/20 > /dev/null
+		if [ $? -eq 1 ]; then
+			log_progress_msg "$DESC is not running but pid file exists, cleaning up"
+		elif [ $? -eq 3 ]; then
+			PID="`cat $PID_FILE`"
+			log_failure_msg "Failed to stop $DESC (pid $PID)"
+			exit 1
+		fi
+		rm -f "$PID_FILE"
+	else
+		log_progress_msg "(not running)"
+	fi
+	log_end_msg 0
+	;;
+  status)
+	status_of_proc -p $PID_FILE elasticsearch elasticsearch && exit 0 || exit $?
+	;;
+  restart|force-reload)
+	if [ -f "$PID_FILE" ]; then
+		$0 stop
+	fi
+	$0 start
+	;;
+  *)
+	log_success_msg "Usage: $0 {start|stop|restart|force-reload|status}"
+	exit 1
+	;;
+esac
+
+exit 0

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/2de6a549/docker/griffin_env/conf/elasticsearch/elasticsearch.yml
----------------------------------------------------------------------
diff --git a/docker/griffin_env/conf/elasticsearch/elasticsearch.yml b/docker/griffin_env/conf/elasticsearch/elasticsearch.yml
new file mode 100644
index 0000000..fb51093
--- /dev/null
+++ b/docker/griffin_env/conf/elasticsearch/elasticsearch.yml
@@ -0,0 +1,11 @@
+network.host: 0.0.0.0
+http.cors.enabled: true
+http.cors.allow-origin: "*"
+node.name: "griffin"
+cluster.name: griffin-elas
+path.data: /data/elasticsearch
+
+indices.fielddata.cache.size: 40%
+indices.breaker.fielddata.limit: 60%
+indices.breaker.request.limit: 40%
+indices.breaker.total.limit: 70%

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/2de6a549/docker/griffin_env/conf/hadoop/core-site.xml.template
----------------------------------------------------------------------
diff --git a/docker/griffin_env/conf/hadoop/core-site.xml.template b/docker/griffin_env/conf/hadoop/core-site.xml.template
new file mode 100644
index 0000000..1a4cc8d
--- /dev/null
+++ b/docker/griffin_env/conf/hadoop/core-site.xml.template
@@ -0,0 +1,29 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License. See accompanying LICENSE file.
+-->
+
+<!-- Put site-specific property overrides in this file. -->
+
+<configuration>
+<property>
+        <name>fs.defaultFS</name>
+        <value>hdfs://HOSTNAME:9000</value>
+    </property>
+	<property>
+		<name>hadoop.tmp.dir</name>
+		<value>file:///data/hadoop-data/tmp</value>
+		<description>Abase for other temporary directories.</description>
+	</property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/2de6a549/docker/griffin_env/conf/hadoop/hadoop-env.sh
----------------------------------------------------------------------
diff --git a/docker/griffin_env/conf/hadoop/hadoop-env.sh b/docker/griffin_env/conf/hadoop/hadoop-env.sh
new file mode 100644
index 0000000..099391d
--- /dev/null
+++ b/docker/griffin_env/conf/hadoop/hadoop-env.sh
@@ -0,0 +1,99 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Set Hadoop-specific environment variables here.
+
+# The only required environment variable is JAVA_HOME.  All others are
+# optional.  When running a distributed configuration it is best to
+# set JAVA_HOME in this file, so that it is correctly defined on
+# remote nodes.
+
+# The java implementation to use.
+#export JAVA_HOME=${JAVA_HOME}
+export JAVA_HOME=/apache/jdk
+
+# The jsvc implementation to use. Jsvc is required to run secure datanodes
+# that bind to privileged ports to provide authentication of data transfer
+# protocol.  Jsvc is not required if SASL is configured for authentication of
+# data transfer protocol using non-privileged ports.
+#export JSVC_HOME=${JSVC_HOME}
+
+export HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-"/etc/hadoop"}
+
+# Extra Java CLASSPATH elements.  Automatically insert capacity-scheduler.
+for f in $HADOOP_HOME/contrib/capacity-scheduler/*.jar; do
+  if [ "$HADOOP_CLASSPATH" ]; then
+    export HADOOP_CLASSPATH=$HADOOP_CLASSPATH:$f
+  else
+    export HADOOP_CLASSPATH=$f
+  fi
+done
+
+# The maximum amount of heap to use, in MB. Default is 1000.
+#export HADOOP_HEAPSIZE=
+#export HADOOP_NAMENODE_INIT_HEAPSIZE=""
+
+# Extra Java runtime options.  Empty by default.
+export HADOOP_OPTS="$HADOOP_OPTS -Djava.net.preferIPv4Stack=true"
+
+# Command specific options appended to HADOOP_OPTS when specified
+export HADOOP_NAMENODE_OPTS="-Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,RFAS} -Dhdfs.audit.logger=${HDFS_AUDIT_LOGGER:-INFO,NullAppender} $HADOOP_NAMENODE_OPTS"
+export HADOOP_DATANODE_OPTS="-Dhadoop.security.logger=ERROR,RFAS $HADOOP_DATANODE_OPTS"
+
+export HADOOP_SECONDARYNAMENODE_OPTS="-Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,RFAS} -Dhdfs.audit.logger=${HDFS_AUDIT_LOGGER:-INFO,NullAppender} $HADOOP_SECONDARYNAMENODE_OPTS"
+
+export HADOOP_NFS3_OPTS="$HADOOP_NFS3_OPTS"
+export HADOOP_PORTMAP_OPTS="-Xmx512m $HADOOP_PORTMAP_OPTS"
+
+# The following applies to multiple commands (fs, dfs, fsck, distcp etc)
+export HADOOP_CLIENT_OPTS="-Xmx512m $HADOOP_CLIENT_OPTS"
+#HADOOP_JAVA_PLATFORM_OPTS="-XX:-UsePerfData $HADOOP_JAVA_PLATFORM_OPTS"
+
+# On secure datanodes, user to run the datanode as after dropping privileges.
+# This **MUST** be uncommented to enable secure HDFS if using privileged ports
+# to provide authentication of data transfer protocol.  This **MUST NOT** be
+# defined if SASL is configured for authentication of data transfer protocol
+# using non-privileged ports.
+export HADOOP_SECURE_DN_USER=${HADOOP_SECURE_DN_USER}
+
+# Where log files are stored.  $HADOOP_HOME/logs by default.
+#export HADOOP_LOG_DIR=${HADOOP_LOG_DIR}/$USER
+
+# Where log files are stored in the secure data environment.
+export HADOOP_SECURE_DN_LOG_DIR=${HADOOP_LOG_DIR}/${HADOOP_HDFS_USER}
+
+###
+# HDFS Mover specific parameters
+###
+# Specify the JVM options to be used when starting the HDFS Mover.
+# These options will be appended to the options specified as HADOOP_OPTS
+# and therefore may override any similar flags set in HADOOP_OPTS
+#
+# export HADOOP_MOVER_OPTS=""
+
+###
+# Advanced Users Only!
+###
+
+# The directory where pid files are stored. /tmp by default.
+# NOTE: this should be set to a directory that can only be written to by 
+#       the user that will run the hadoop daemons.  Otherwise there is the
+#       potential for a symlink attack.
+export HADOOP_PID_DIR=${HADOOP_PID_DIR}
+export HADOOP_SECURE_DN_PID_DIR=${HADOOP_PID_DIR}
+
+# A string representing this instance of hadoop. $USER by default.
+export HADOOP_IDENT_STRING=$USER

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/2de6a549/docker/griffin_env/conf/hadoop/hdfs-site.xml
----------------------------------------------------------------------
diff --git a/docker/griffin_env/conf/hadoop/hdfs-site.xml b/docker/griffin_env/conf/hadoop/hdfs-site.xml
new file mode 100644
index 0000000..8caff3c
--- /dev/null
+++ b/docker/griffin_env/conf/hadoop/hdfs-site.xml
@@ -0,0 +1,44 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License. See accompanying LICENSE file.
+-->
+
+<!-- Put site-specific property overrides in this file. -->
+
+<configuration>
+<property>
+        <name>dfs.replication</name>
+        <value>1</value>
+    </property>
+    <property>
+        <name>dfs.name.dir</name>
+        <value>file:///data/hadoop-data/nn</value>
+    </property>
+    <property>
+        <name>dfs.data.dir</name>
+        <value>file:///data/hadoop-data/dn</value>
+    </property>
+    <property>
+        <name>dfs.namenode.checkpoint.dir</name>
+        <value>file:///data/hadoop-data/snn</value>
+    </property>
+	<property>
+		<name>dfs.datanode.use.datanode.hostname</name>
+		<value>false</value>
+	</property>
+	<property>
+		<name>dfs.namenode.datanode.registration.ip-hostname-check</name>
+		<value>false</value>
+	</property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/2de6a549/docker/griffin_env/conf/hadoop/mapred-site.xml.template
----------------------------------------------------------------------
diff --git a/docker/griffin_env/conf/hadoop/mapred-site.xml.template b/docker/griffin_env/conf/hadoop/mapred-site.xml.template
new file mode 100644
index 0000000..a7d7b45
--- /dev/null
+++ b/docker/griffin_env/conf/hadoop/mapred-site.xml.template
@@ -0,0 +1,32 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License. See accompanying LICENSE file.
+-->
+
+<!-- Put site-specific property overrides in this file. -->
+
+<configuration>
+<property>
+        <name>mapreduce.framework.name</name>
+        <value>yarn</value>
+    </property>
+	<property>
+		<name>mapreduce.jobhistory.address </name>
+		<value>HOSTNAME:10020</value>
+	</property>
+	<property>
+		<name>mapreduce.jobhistory.webapp.address</name>
+		<value>HOSTNAME:19888</value>
+	</property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/2de6a549/docker/griffin_env/conf/hadoop/slaves
----------------------------------------------------------------------
diff --git a/docker/griffin_env/conf/hadoop/slaves b/docker/griffin_env/conf/hadoop/slaves
new file mode 100644
index 0000000..2fbb50c
--- /dev/null
+++ b/docker/griffin_env/conf/hadoop/slaves
@@ -0,0 +1 @@
+localhost

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/2de6a549/docker/griffin_env/conf/hadoop/yarn-site.xml.template
----------------------------------------------------------------------
diff --git a/docker/griffin_env/conf/hadoop/yarn-site.xml.template b/docker/griffin_env/conf/hadoop/yarn-site.xml.template
new file mode 100644
index 0000000..109bc4d
--- /dev/null
+++ b/docker/griffin_env/conf/hadoop/yarn-site.xml.template
@@ -0,0 +1,65 @@
+<?xml version="1.0"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License. See accompanying LICENSE file.
+-->
+<configuration>
+
+<!-- Site specific YARN configuration properties -->
+
+<property>
+        <name>yarn.resourcemanager.hostname</name>
+        <value>HOSTNAME</value>
+    </property>
+    <property>
+        <name>yarn.nodemanager.aux-services</name>
+        <value>mapreduce_shuffle</value>
+    </property>
+    <property>
+        <name>yarn.nodemanager.aux-services.mapreduce.shuffle.class</name>
+        <value>org.apache.hadoop.mapred.ShuffleHandler</value>
+    </property>
+	<property>
+		<name>yarn.nodemanager.log-dirs</name>
+		<value>/tmp/logs</value>
+	</property>
+	<property>
+		<name>yarn.log-aggregation-enable</name>
+		<value>true</value>
+	</property>
+	<property>
+		<name>yarn.nodemanager.remote-app-log-dir</name>
+		<value>/yarn-logs/logs</value>
+	</property>
+	<property>
+		<name>yarn.nodemanager.remote-app-log-dir-suffix</name>
+		<value>logs</value>
+	</property>
+	<property>
+		<name>yarn.log-aggregation.retain-seconds</name>
+		<value>360000</value>
+    </property>
+	<property>
+		<name>yarn.log.server.url</name>
+		<value>http://HOSTNAME:19888/jobhistory/logs</value>
+    </property>
+<!-- for java 8 -->
+<property>
+    <name>yarn.nodemanager.pmem-check-enabled</name>
+    <value>false</value>
+</property>
+
+<property>
+    <name>yarn.nodemanager.vmem-check-enabled</name>
+    <value>false</value>
+</property>
+</configuration>