You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airavata.apache.org by la...@apache.org on 2016/09/13 06:18:14 UTC

[14/15] airavata git commit: [AIRAVATA-2054][WIP] create docker images for airavata deployment components

[AIRAVATA-2054][WIP] create docker images for airavata deployment components

1. Introduce Docker images for each deployment component of airavata.
2. Deployed those in docker hub repository (scigap),
	try: docker search scigap
3. Use exhibitor docker images intead of zookeeper which is a much better
compare to using vanilla zookeeper.
http://techblog.netflix.com/2012/04/introducing-exhibitor-supervisor-system.html

4. IMHO we should never use docker images from public repository, Everything
we should create our own docker images from public images and test with those and
move to production.

5. Added a simple script(airavata/build.sh) to build airavata docker components.

      ./build.sh [component-name] - This will build a docker image for given component.

This is a temporary script we can use until AIRAVATA-2056 which integrates docker push with some CI tool like jenkins.


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/6ec88cfd
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/6ec88cfd
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/6ec88cfd

Branch: refs/heads/lahiru/AIRAVATA-2065
Commit: 6ec88cfd7d846fdd1175dba4b941ec49dc292999
Parents: 3986ec7
Author: Lahiru Ginnaliya Gamathige <la...@apache.org>
Authored: Sun Aug 21 00:49:58 2016 -0700
Committer: Lahiru Ginnaliya Gamathige <la...@apache.org>
Committed: Mon Sep 12 22:43:24 2016 -0700

----------------------------------------------------------------------
 build.sh                                        |  65 ++++++++++
 deploy/images/airavata/Dockerfile               |  16 +++
 deploy/images/exhibitor/Dockerfile              |   6 +
 deploy/images/exhibitor/exhibitor.properties    |  14 +++
 deploy/images/java/Dockerfile                   |  32 +++++
 deploy/images/kafka/Dockerfile                  |   8 ++
 deploy/images/kafka/start-kafka.sh              |  17 +++
 deploy/images/logstash/Dockerfile               |   5 +
 deploy/images/logstash/elasticsearchpolicy      |  27 ++++
 deploy/images/logstash/logstash-elastic.conf    |  94 ++++++++++++++
 .../logstash-output-amazon_es-0.3-java.gem      | Bin 0 -> 22016 bytes
 deploy/images/logstash/logstash.conf            |  97 +++++++++++++++
 deploy/images/rabbitmq/Dockerfile               |   3 +
 deploy/images/rabbitmq/rabbitmq.config          |   5 +
 deploy/systemd/aws-metadata.service             |  19 +++
 deploy/systemd/consul-download.service          |  11 ++
 deploy/systemd/consul.service                   |   8 ++
 deploy/systemd/kafka-manager.service            |  15 +++
 deploy/systemd/kafka.service                    |  13 ++
 deploy/systemd/logstash.service                 |  16 +++
 deploy/systemd/zookeeper.service                |  14 +++
 .../common/logging/kafka/KafkaAppender.java     | 122 +++++++++++++++++++
 pom.xml                                         |  97 ++++++++++++++-
 23 files changed, 703 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/6ec88cfd/build.sh
----------------------------------------------------------------------
diff --git a/build.sh b/build.sh
new file mode 100755
index 0000000..da3c5b7
--- /dev/null
+++ b/build.sh
@@ -0,0 +1,65 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# todo Add environment specific docker image creation and create docker image per component (api-server, orchestrator, gfac etc)
+
+echo $MAVEN_HOME
+echo $PATH
+
+cd $WORKSPACE/airavata-head/
+
+/home/jenkins/tools/maven/apache-maven-3.3.9/bin/mvn  clean install -Dmaven.test.skip=true
+if [ -d "docker-build" ]; then
+    printf '%s\n' "Removing old docker-build directory"
+    rm -rf docker-build
+fi
+
+mkdir docker-build
+cp modules/distribution/target/apache-airavata-server*.zip docker-build
+
+unzip docker-build/apache-airavata-server*.zip -d docker-build/airavata
+rm docker-build/apache-airavata-server*.zip
+
+cp deploy/images/airavata/Dockerfile docker-build/airavata/*/
+
+cd docker-build/airavata/*/
+
+# disable embedded zookeeper configuration
+echo  embedded.zk=false >> bin/airavata-server.properties
+
+component_name="all"
+if [ $# -gt 0 ]
+  then
+      docker build --build-arg COMPONENT=${component_name} -t airavata-${component_name} .
+      # docker push scigap/airavata-${component_name}
+fi
+
+docker build --build-arg COMPONENT=apiserver -t scigap/${environment}-airavata-apiserver .
+# docker push scigap/airavata-apiserver
+
+docker build --build-arg COMPONENT=gfac -t scigap/${environment}-airavata-gfac .
+# docker push scigap/airavata-gfac
+
+docker build --build-arg COMPONENT=orchestrator -t scigap/${environment}-airavata-orchestrator .
+# docker push scigap/airavata-orchestrator
+
+docker build --build-arg COMPONENT=credentialstore -t scigap/${environment}-airavata-credentialstore .
+# docker push scigap/airavata-credentialstore
+
+

http://git-wip-us.apache.org/repos/asf/airavata/blob/6ec88cfd/deploy/images/airavata/Dockerfile
----------------------------------------------------------------------
diff --git a/deploy/images/airavata/Dockerfile b/deploy/images/airavata/Dockerfile
new file mode 100644
index 0000000..6b051ae
--- /dev/null
+++ b/deploy/images/airavata/Dockerfile
@@ -0,0 +1,16 @@
+#
+# To build from the airavata root dir until jenkins script or maven build to create docker image
+#
+FROM scigap/java:8
+ARG COMPONENT="all"
+
+ENV COMPONENT=$COMPONENT
+
+RUN mkdir /airavata
+COPY . /airavata
+
+WORKDIR /airavata
+
+RUN chmod +x ./bin/airavata-server-start.sh
+
+ENTRYPOINT ./bin/airavata-server-start.sh $COMPONENT

http://git-wip-us.apache.org/repos/asf/airavata/blob/6ec88cfd/deploy/images/exhibitor/Dockerfile
----------------------------------------------------------------------
diff --git a/deploy/images/exhibitor/Dockerfile b/deploy/images/exhibitor/Dockerfile
new file mode 100644
index 0000000..feb7674
--- /dev/null
+++ b/deploy/images/exhibitor/Dockerfile
@@ -0,0 +1,6 @@
+FROM netflixoss/exhibitor:1.5.2
+
+ADD exhibitor.properties /exhibitor/exhibitor.properties
+
+ENTRYPOINT ["java", "-jar", "exhibitor-1.0-jar-with-dependencies.jar", "-c", "file", "--defaultconfig", "/exhibitor/exhibitor.properties"]
+

http://git-wip-us.apache.org/repos/asf/airavata/blob/6ec88cfd/deploy/images/exhibitor/exhibitor.properties
----------------------------------------------------------------------
diff --git a/deploy/images/exhibitor/exhibitor.properties b/deploy/images/exhibitor/exhibitor.properties
new file mode 100644
index 0000000..b099e60
--- /dev/null
+++ b/deploy/images/exhibitor/exhibitor.properties
@@ -0,0 +1,14 @@
+java-environment=export JAVA_OPTS\="-Xms1000m -Xmx1000m"
+zookeeper-data-directory=/zookeeper/data
+cleanup-period-ms=200000
+zookeeper-install-directory=/zookeeper
+check-ms=2000
+client-port=2181
+cleanup-max-files=10
+connect-port=2888
+log4j-properties=
+observer-threshold=4
+election-port=3888
+zoo-cfg-extra=syncLimit\=5&tickTime\=2000&initLimit\=10
+auto-manage-instances-settling-period-ms=10000
+auto-manage-instances=1
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/airavata/blob/6ec88cfd/deploy/images/java/Dockerfile
----------------------------------------------------------------------
diff --git a/deploy/images/java/Dockerfile b/deploy/images/java/Dockerfile
new file mode 100644
index 0000000..e3a5dd6
--- /dev/null
+++ b/deploy/images/java/Dockerfile
@@ -0,0 +1,32 @@
+FROM debian:stable
+
+ENV JAVA_MAJOR 8
+ENV JAVA_MINOR 102
+ENV JAVA_BUILD 14
+
+ENV JAVA_HOME /opt/jdk
+ENV PATH ${PATH}:${JAVA_HOME}/bin
+
+RUN apt-get update \
+  && apt-get install --assume-yes curl ca-certificates \
+  && apt-get clean \
+  && rm -rf /var/lib/apt/lists/* /var/log/apt/*
+
+RUN curl -jksSLH "Cookie: oraclelicense=accept-securebackup-cookie" \
+  http://download.oracle.com/otn-pub/java/jdk/${JAVA_MAJOR}u${JAVA_MINOR}-b${JAVA_BUILD}/server-jre-${JAVA_MAJOR}u${JAVA_MINOR}-linux-x64.tar.gz \
+  | tar -zxvf - -C /opt && \
+  ln -s /opt/jdk1.${JAVA_MAJOR}.0_${JAVA_MINOR} /opt/jdk && \
+  rm -rf /opt/jdk/man/* \
+         /opt/jdk/jre/bin/jjs \
+         /opt/jdk/jre/bin/keytool \
+         /opt/jdk/jre/bin/orbd \
+         /opt/jdk/jre/bin/pack200 \
+         /opt/jdk/jre/bin/policytool \
+         /opt/jdk/jre/bin/rmid \
+         /opt/jdk/jre/bin/rmiregistry \
+         /opt/jdk/jre/bin/servertool \
+         /opt/jdk/jre/bin/tnameserv \
+         /opt/jdk/jre/bin/unpack200 \
+         /opt/jdk/jre/lib/ext/nashorn.jar \
+         /opt/jdk/jre/lib/oblique-fonts
+

http://git-wip-us.apache.org/repos/asf/airavata/blob/6ec88cfd/deploy/images/kafka/Dockerfile
----------------------------------------------------------------------
diff --git a/deploy/images/kafka/Dockerfile b/deploy/images/kafka/Dockerfile
new file mode 100644
index 0000000..59770e1
--- /dev/null
+++ b/deploy/images/kafka/Dockerfile
@@ -0,0 +1,8 @@
+FROM scigap/java:8
+ENV ZOOKEEPER localhost:2181/kafka
+ENV LOG_DIRS /var/lib/kafka
+ENV JMX_PORT 9999
+ADD start-kafka.sh /start-kafka.sh
+RUN mkdir -p /opt/kafka && curl http://mirrors.sonic.net/apache/kafka/0.8.2.2/kafka_2.11-0.8.2.2.tgz > /tmp/kafka.tgz && \
+    tar -zxf /tmp/kafka.tgz  -C /opt/kafka --strip-components=1 && rm -f /tmp/kafka.tgz
+ENTRYPOINT /start-kafka.sh
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/airavata/blob/6ec88cfd/deploy/images/kafka/start-kafka.sh
----------------------------------------------------------------------
diff --git a/deploy/images/kafka/start-kafka.sh b/deploy/images/kafka/start-kafka.sh
new file mode 100755
index 0000000..6026010
--- /dev/null
+++ b/deploy/images/kafka/start-kafka.sh
@@ -0,0 +1,17 @@
+#!/bin/sh
+
+if test -z "${BROKER_ID}"; then
+  BROKER_ID=$(ip a | grep 'eth0' | awk '/inet /{print substr($2,4)}'| sed 's/\///g' | head -n1 | tr -d .)
+fi
+
+mkdir -p /opt/kafka/etc
+cat <<EOF > /opt/kafka/etc/server.properties
+broker.id=${BROKER_ID}
+zookeeper.connect=${ZOOKEEPER}
+log.dirs=${LOG_DIRS}
+num.partitions=2
+default.replication.factor=1
+advertised.host.name=${ADVERTISED_HOST_NAME}
+EOF
+
+exec /opt/kafka/bin/kafka-server-start.sh /opt/kafka/etc/server.properties

http://git-wip-us.apache.org/repos/asf/airavata/blob/6ec88cfd/deploy/images/logstash/Dockerfile
----------------------------------------------------------------------
diff --git a/deploy/images/logstash/Dockerfile b/deploy/images/logstash/Dockerfile
new file mode 100644
index 0000000..6fc9bd3
--- /dev/null
+++ b/deploy/images/logstash/Dockerfile
@@ -0,0 +1,5 @@
+FROM logstash:2.3.4
+
+RUN /opt/logstash/bin/plugin install logstash-output-amazon_es
+RUN /opt/logstash/bin/plugin install logstash-codec-avro logstash-codec-cloudtrail logstash-input-journald
+ENTRYPOINT []

http://git-wip-us.apache.org/repos/asf/airavata/blob/6ec88cfd/deploy/images/logstash/elasticsearchpolicy
----------------------------------------------------------------------
diff --git a/deploy/images/logstash/elasticsearchpolicy b/deploy/images/logstash/elasticsearchpolicy
new file mode 100644
index 0000000..8852280
--- /dev/null
+++ b/deploy/images/logstash/elasticsearchpolicy
@@ -0,0 +1,27 @@
+{
+  "Version": "2012-10-17",
+  "Statement": [
+    {
+      "Effect": "Allow",
+      "Principal": {
+        "AWS": "arn:aws:iam::691488976375:root"
+      },
+      "Action": "es:*",
+      "Resource": "arn:aws:es:us-east-1:691488976375:domain/scigap/*"
+    },
+    {
+      "Sid": "",
+      "Effect": "Allow",
+      "Principal": {
+        "AWS": "*"
+      },
+      "Action": "es:*",
+      "Resource": "arn:aws:es:us-east-1:691488976375:domain/scigap/*",
+      "Condition": {
+        "IpAddress": {
+          "aws:SourceIp": "50.200.229.250"
+        }
+      }
+    }
+  ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/airavata/blob/6ec88cfd/deploy/images/logstash/logstash-elastic.conf
----------------------------------------------------------------------
diff --git a/deploy/images/logstash/logstash-elastic.conf b/deploy/images/logstash/logstash-elastic.conf
new file mode 100644
index 0000000..2871657
--- /dev/null
+++ b/deploy/images/logstash/logstash-elastic.conf
@@ -0,0 +1,94 @@
+input {
+  kafka {
+    topic_id => "local_all_logs"
+    zk_connect => "127.0.0.1:2181"
+    auto_offset_reset => "smallest"
+    type => "all_logs"
+  }
+  kafka {
+    topic_id => "local_apiserver_logs"
+    zk_connect => "127.0.0.1:2181"
+    auto_offset_reset => "smallest"
+    type => "apiserver_logs"
+  }
+  kafka {
+    topic_id => "local_gfac_logs"
+    zk_connect => "127.0.0.1:2181"
+    auto_offset_reset => "smallest"
+    type => "gfac_logs"
+  }
+  kafka {
+    topic_id => "local_orchestrator_logs"
+    zk_connect => "127.0.0.1:2181"
+    auto_offset_reset => "smallest"
+    type => "orchestrator_logs"
+  }
+  kafka {
+    topic_id => "local_credentialstore_logs"
+    zk_connect => "127.0.0.1:2181"
+    auto_offset_reset => "smallest"
+    type => "credentialstore_logs"
+  }
+}
+
+filter {
+  mutate { add_field => { "[@metadata][level]" => "%{[level]}" } }
+  mutate { lowercase => ["[@metadata][level]"] }
+  mutate { gsub => ["level", "LOG_", ""] }
+  mutate {
+    add_tag => ["local", "CoreOS-899.13.0"]
+  }
+  ruby {
+    code => "
+    begin
+    t = Time.iso8601(event['timestamp'])
+    rescue ArgumentError => e
+    # drop the event if format is invalid
+    event.cancel
+    return
+    end
+    event['timestamp_usec'] = t.usec % 1000
+    event['timestamp'] = t.utc.strftime('%FT%T.%LZ')
+    "
+  }
+}
+
+output {
+  stdout { codec => rubydebug }
+  if [type] == "apiserver_logs" {
+    elasticsearch {
+      hosts => ["d5b696fac75ae2f1dda3c515ba904ff4.us-east-1.aws.found.io:9200"]
+      user => "admin"
+      password => "15tij9wc26p2qf3fgm"
+      index => "local-apiserver-logs-logstash-%{+YYYY.MM.dd}"
+    }
+  } else if [type] == "gfac_logs" {
+    elasticsearch {
+      hosts => ["d5b696fac75ae2f1dda3c515ba904ff4.us-east-1.aws.found.io:9200"]
+      user => "admin"
+      password => "15tij9wc26p2qf3fgm"
+      index => "local-gfac-logs-logstash-%{+YYYY.MM.dd}"
+    }
+  } else if [type] == "orchestrator_logs" {
+    elasticsearch {
+      hosts => ["d5b696fac75ae2f1dda3c515ba904ff4.us-east-1.aws.found.io:9200"]
+      user => "admin"
+      password => "15tij9wc26p2qf3fgm"
+      index => "local-orchestrator-logs-logstash-%{+YYYY.MM.dd}"
+    }
+  } else if [type] == "credentialstore_logs" {
+    elasticsearch {
+      hosts => ["d5b696fac75ae2f1dda3c515ba904ff4.us-east-1.aws.found.io:9200"]
+      user => "admin"
+      password => "15tij9wc26p2qf3fgm"
+      index => "local-credentialstore-logs-logstash-%{+YYYY.MM.dd}"
+    }
+  } else {
+  elasticsearch {
+    hosts => ["d5b696fac75ae2f1dda3c515ba904ff4.us-east-1.aws.found.io:9200"]
+    user => "admin"
+    password => "15tij9wc26p2qf3fgm"
+    index => "local-airavata-logs-logstash-%{+YYYY.MM.dd}"
+  }
+}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/airavata/blob/6ec88cfd/deploy/images/logstash/logstash-output-amazon_es-0.3-java.gem
----------------------------------------------------------------------
diff --git a/deploy/images/logstash/logstash-output-amazon_es-0.3-java.gem b/deploy/images/logstash/logstash-output-amazon_es-0.3-java.gem
new file mode 100644
index 0000000..d3c913a
Binary files /dev/null and b/deploy/images/logstash/logstash-output-amazon_es-0.3-java.gem differ

http://git-wip-us.apache.org/repos/asf/airavata/blob/6ec88cfd/deploy/images/logstash/logstash.conf
----------------------------------------------------------------------
diff --git a/deploy/images/logstash/logstash.conf b/deploy/images/logstash/logstash.conf
new file mode 100644
index 0000000..6dd3c41
--- /dev/null
+++ b/deploy/images/logstash/logstash.conf
@@ -0,0 +1,97 @@
+input {
+  kafka {
+    topic_id => "local_all_logs"
+    zk_connect => "107.20.88.15:2181/kafka"
+    auto_offset_reset => "smallest"
+    type => "all_logs"
+  }
+  kafka {
+    topic_id => "local_apiserver_logs"
+    zk_connect => "107.20.88.15:2181/kafka"
+    auto_offset_reset => "smallest"
+    type => "apiserver_logs"
+  }
+  kafka {
+    topic_id => "local_gfac_logs"
+    zk_connect => "107.20.88.15:2181/kafka"
+    auto_offset_reset => "smallest"
+    type => "gfac_logs"
+  }
+  kafka {
+    topic_id => "local_orchestrator_logs"
+    zk_connect => "107.20.88.15:2181/kafka"
+    auto_offset_reset => "smallest"
+    type => "orchestrator_logs"
+  }
+  kafka {
+    topic_id => "local_credentialstore_logs"
+    zk_connect => "107.20.88.15:2181/kafka"
+    auto_offset_reset => "smallest"
+    type => "credentialstore_logs"
+  }
+}
+
+filter {
+  mutate { add_field => { "[@metadata][level]" => "%{[level]}" } }
+  mutate { lowercase => ["[@metadata][level]"] }
+  mutate { gsub => ["level", "LOG_", ""] }
+  mutate {
+    add_tag => ["local", "CoreOS-899.13.0"]
+  }
+  ruby {
+    code => "
+    begin
+    t = Time.iso8601(event['timestamp'])
+    rescue ArgumentError => e
+    # drop the event if format is invalid
+    event.cancel
+    return
+    end
+    event['timestamp_usec'] = t.usec % 1000
+    event['timestamp'] = t.utc.strftime('%FT%T.%LZ')
+    "
+  }
+}
+
+output {
+  if [type] == "apiserver_logs" {
+    if [@metadata][level] == "debug" {
+      amazon_es {
+        hosts => ["search-scigap1-je4ln2j5dwlibskeuheh7nr2sa.us-east-1.es.amazonaws.com"]
+        region => "us-east-1"
+        index => "local-apiserver-logs-logstash-%{+YYYY.MM.dd}"
+      }
+    }
+  } else if [type] == "gfac_logs" {
+    if [@metadata][level] == "debug" {
+      amazon_es {
+        hosts => ["search-scigap1-je4ln2j5dwlibskeuheh7nr2sa.us-east-1.es.amazonaws.com"]
+        region => "us-east-1"
+        index => "local-gfac-logs-logstash-%{+YYYY.MM.dd}"
+      }
+    }
+  } else if [type] == "orchestrator_logs" {
+    if [@metadata][level] == "debug" {
+      amazon_es {
+        hosts => ["search-scigap1-je4ln2j5dwlibskeuheh7nr2sa.us-east-1.es.amazonaws.com"]
+        region => "us-east-1"
+        index => "local-orchestrator-logs-logstash-%{+YYYY.MM.dd}"
+            }
+    }
+  } else if [type] == "credentialstore_logs" {
+    if [@metadata][level] == "debug" {
+      amazon_es {
+        hosts => ["search-scigap1-je4ln2j5dwlibskeuheh7nr2sa.us-east-1.es.amazonaws.com"]
+        region => "us-east-1"
+        index => "local-credentialstore-logs-logstash-%{+YYYY.MM.dd}"
+      }
+    }
+  } else {
+  amazon_es {
+    hosts => ["search-scigap1-je4ln2j5dwlibskeuheh7nr2sa.us-east-1.es.amazonaws.com"]
+    region => "us-east-1"
+    index => "local-airavata-logs-logstash-%{+YYYY.MM.dd}"
+  }
+}
+}
+

http://git-wip-us.apache.org/repos/asf/airavata/blob/6ec88cfd/deploy/images/rabbitmq/Dockerfile
----------------------------------------------------------------------
diff --git a/deploy/images/rabbitmq/Dockerfile b/deploy/images/rabbitmq/Dockerfile
new file mode 100644
index 0000000..8dae7d4
--- /dev/null
+++ b/deploy/images/rabbitmq/Dockerfile
@@ -0,0 +1,3 @@
+FROM rabbitmq:3-management
+
+COPY rabbitmq.config /etc/rabbitmq/
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/airavata/blob/6ec88cfd/deploy/images/rabbitmq/rabbitmq.config
----------------------------------------------------------------------
diff --git a/deploy/images/rabbitmq/rabbitmq.config b/deploy/images/rabbitmq/rabbitmq.config
new file mode 100644
index 0000000..a3be93c
--- /dev/null
+++ b/deploy/images/rabbitmq/rabbitmq.config
@@ -0,0 +1,5 @@
+[
+  {rabbit, [
+    {tcp_listeners, [{"127.0.0.1", 5672}]}
+  ]}
+].

http://git-wip-us.apache.org/repos/asf/airavata/blob/6ec88cfd/deploy/systemd/aws-metadata.service
----------------------------------------------------------------------
diff --git a/deploy/systemd/aws-metadata.service b/deploy/systemd/aws-metadata.service
new file mode 100644
index 0000000..ac8ef57
--- /dev/null
+++ b/deploy/systemd/aws-metadata.service
@@ -0,0 +1,19 @@
+[Unit]
+Description=Loads AWS Metadata
+After=network-online.target
+Wants=network-online.target
+[Service]
+Type=oneshot
+RemainAfterExit=yes
+EnvironmentFile=/etc/environment
+ExecStart=-/usr/bin/bash -c "systemctl set-environment AWS_REGION=$(curl -s http://[ip]/latest/meta-data/placement/availability-zone | sed -e 's/[a-z]$//')"
+ExecStart=-/usr/bin/bash -c "systemctl set-environment AWS_HOSTNAME=$(curl -s http://[ip]/latest/meta-data/local-hostname)"
+ExecStart=-/usr/bin/bash -c "systemctl set-environment AWS_ZONE=$(curl -s http://[ip]/latest/meta-data/placement/availability-zone)"
+ExecStart=-/usr/bin/bash -c "systemctl set-environment AWS_INSTANCE_TYPE=$(curl -s http://[ip]/latest/meta-data/instance-type)"
+ExecStart=-/usr/bin/bash -c "systemctl set-environment COREOS_PRIVATE_IPV4=${COREOS_PRIVATE_IPV4}"
+ExecStart=-/usr/bin/bash -c "systemctl set-environment COREOS_PUBLIC_IPV4=${COREOS_PUBLIC_IPV4}"
+ExecStart=-/usr/bin/bash -c "systemctl set-environment COREOS_FREE_DISK=$(df -l | awk '{ s+=$4 } END {print s}')"
+ExecStart=-/usr/bin/bash -c "systemctl set-environment COREOS_TOTAL_MEM=$(cat /proc/meminfo | grep MemTotal | awk '{print $2/1024+2048}')"
+ExecStart=-/usr/bin/bash -c "systemctl set-environment COREOS_TOTAL_CPUS=$(cat /proc/cpuinfo | grep processor | wc -l | awk  '{print $s*2}')"
+[Install]
+WantedBy=multi-user.target
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/airavata/blob/6ec88cfd/deploy/systemd/consul-download.service
----------------------------------------------------------------------
diff --git a/deploy/systemd/consul-download.service b/deploy/systemd/consul-download.service
new file mode 100644
index 0000000..c430bb8
--- /dev/null
+++ b/deploy/systemd/consul-download.service
@@ -0,0 +1,11 @@
+[Unit]
+Description=Downloads Consul
+After=network-online.target
+Wants=network-online.target
+ConditionPathExists=!/opt/consul/0.6.3/consul
+[Service]
+Type=oneshot
+Environment=CONSUL_VERSION=0.6.3
+ExecStartPre=/usr/bin/bash -c 'wget --progress=dot -e dotbytes=10M https://releases.hashicorp.com/consul/${CONSUL_VERSION}/consul_${CONSUL_VERSION}_linux_amd64.zip -O /tmp/consul.zip'
+ExecStartPre=/usr/bin/bash -c "mkdir -p /opt/bin /opt/consul/${CONSUL_VERSION}"
+ExecStart=/usr/bin/bash -c "/usr/bin/unzip -o /tmp/consul.zip -d /opt/consul/${CONSUL_VERSION} && ln -sf /opt/consul/${CONSUL_VERSION}/consul /opt/bin/consul"

http://git-wip-us.apache.org/repos/asf/airavata/blob/6ec88cfd/deploy/systemd/consul.service
----------------------------------------------------------------------
diff --git a/deploy/systemd/consul.service b/deploy/systemd/consul.service
new file mode 100644
index 0000000..4a887f6
--- /dev/null
+++ b/deploy/systemd/consul.service
@@ -0,0 +1,8 @@
+[Unit]
+Description=Consul Service Discovery and DNS
+After=consul-download.service aws-metadata.service
+Requires=consul-download.service aws-metadata.service
+[Service]
+EnvironmentFile=/etc/environment
+Restart=on-failure
+ExecStart=/opt/bin/consul agent -server -data-dir /var/lib/consul -bind ${COREOS_PRIVATE_IPV4} -dc ${AWS_REGION} -config-file /etc/consul/config.json -ui -client 0.0.0.0

http://git-wip-us.apache.org/repos/asf/airavata/blob/6ec88cfd/deploy/systemd/kafka-manager.service
----------------------------------------------------------------------
diff --git a/deploy/systemd/kafka-manager.service b/deploy/systemd/kafka-manager.service
new file mode 100644
index 0000000..1bed655
--- /dev/null
+++ b/deploy/systemd/kafka-manager.service
@@ -0,0 +1,15 @@
+[Unit]
+Description=kafka-manager
+After=docker.service
+Requires=docker.service
+[Service]
+EnvironmentFile=/etc/environment
+TimeoutStartSec=60
+Restart=on-failure
+ExecStartPre=-/usr/bin/docker rm -f kafka-manager
+ExecStartPre=-/usr/bin/docker pull sheepkiller/kafka-manager
+ExecStart=/usr/bin/docker run --name kafka-manager -p 9000:9000 -e ZK_HOSTS=localhost:2181 -e APPLICATION_SECRET=face2face sheepkiller/kafka-manager
+ExecStop=/usr/bin/docker stop kafka-manager
+[Install]
+WantedBy=multi-user.target
+

http://git-wip-us.apache.org/repos/asf/airavata/blob/6ec88cfd/deploy/systemd/kafka.service
----------------------------------------------------------------------
diff --git a/deploy/systemd/kafka.service b/deploy/systemd/kafka.service
new file mode 100644
index 0000000..cad29e5
--- /dev/null
+++ b/deploy/systemd/kafka.service
@@ -0,0 +1,13 @@
+[Unit]
+Description=Kafka
+Requires=docker.service
+[Service]
+EnvironmentFile=/etc/environment
+TimeoutStartSec=60
+Restart=on-failure
+ExecStartPre=-/usr/bin/docker rm -f kafka
+ExecStartPre=-/usr/bin/docker pull scigap/kafka
+ExecStart=/usr/bin/docker run --net=host --name kafka -e ADVERTISED_HOST_NAME=54.163.192.179 -v /var/lib/kafka:/var/lib/kafka scigap/kafka
+ExecStop=/usr/bin/docker stop kafka
+[Install]
+WantedBy=multi-user.target

http://git-wip-us.apache.org/repos/asf/airavata/blob/6ec88cfd/deploy/systemd/logstash.service
----------------------------------------------------------------------
diff --git a/deploy/systemd/logstash.service b/deploy/systemd/logstash.service
new file mode 100644
index 0000000..7ed4955
--- /dev/null
+++ b/deploy/systemd/logstash.service
@@ -0,0 +1,16 @@
+[Unit]
+Description=logstash
+Requires=docker.service
+After=docker.service
+[Service]
+EnvironmentFile=/etc/os-release
+LimitNOFILE=infinity
+TimeoutStartSec=90
+Restart=on-failure
+ExecStartPre=-/usr/bin/docker rm -f logstash
+ExecStartPre=-/usr/bin/docker pull scigap/logstash
+ExecStart=/usr/bin/docker run --name logstash -e ENV_NAME=local -e OS_NAME=${NAME} -e OS_VERSION=${VERSION} \
+    -e KAFKA_ZK="localhost:2181/kafka" -e ES_ENDPOINT="search-scigap-62tebdueebw5dfyn7bfyn63rru.us-east-1.es.amazonaws.com" \
+    -v /etc/logstash:/config -v /var/log/journal:/var/log/journal -v /var/lib/logstash:/var/lib/logstash \
+    scigap/logstash logstash -f /config/logstash.conf
+ExecStop=/usr/bin/docker stop logstash

http://git-wip-us.apache.org/repos/asf/airavata/blob/6ec88cfd/deploy/systemd/zookeeper.service
----------------------------------------------------------------------
diff --git a/deploy/systemd/zookeeper.service b/deploy/systemd/zookeeper.service
new file mode 100644
index 0000000..38e631b
--- /dev/null
+++ b/deploy/systemd/zookeeper.service
@@ -0,0 +1,14 @@
+[Unit]
+Description=Exhibitor and Zookeeper
+After=docker.service
+Requires=docker.service
+[Service]
+EnvironmentFile=/etc/environment
+TimeoutStartSec=60
+Restart=on-failure
+ExecStartPre=-/usr/bin/docker rm -f zookeeper
+ExecStartPre=-/usr/bin/docker pull scigap/exhibitor
+    ExecStart=/usr/bin/docker run --net=host --name zookeeper -v /var/lib/zookeeper/data:/zookeeper/data scigap/exhibitor --hostname ${COREOS_PRIVATE_IPV4} --port 8081
+ExecStop=/usr/bin/docker stop zookeeper
+[Install]
+WantedBy=multi-user.target
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/airavata/blob/6ec88cfd/modules/commons/src/main/java/org/apache/airavata/common/logging/kafka/KafkaAppender.java
----------------------------------------------------------------------
diff --git a/modules/commons/src/main/java/org/apache/airavata/common/logging/kafka/KafkaAppender.java b/modules/commons/src/main/java/org/apache/airavata/common/logging/kafka/KafkaAppender.java
new file mode 100644
index 0000000..06649c6
--- /dev/null
+++ b/modules/commons/src/main/java/org/apache/airavata/common/logging/kafka/KafkaAppender.java
@@ -0,0 +1,122 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+package org.apache.airavata.common.logging.kafka;
+
+
+import ch.qos.logback.classic.Level;
+import ch.qos.logback.classic.spi.ILoggingEvent;
+import ch.qos.logback.classic.spi.IThrowableProxy;
+import ch.qos.logback.classic.spi.StackTraceElementProxy;
+import ch.qos.logback.core.UnsynchronizedAppenderBase;
+import com.google.gson.Gson;
+import org.apache.airavata.common.logging.Exception;
+import org.apache.airavata.common.logging.LogEntry;
+import org.apache.airavata.common.logging.ServerId;
+import org.apache.airavata.common.utils.AwsMetadata;
+import org.apache.airavata.common.utils.BuildConstant;
+import org.apache.airavata.common.utils.ServerSettings;
+import org.apache.kafka.clients.producer.KafkaProducer;
+import org.apache.kafka.clients.producer.Producer;
+import org.apache.kafka.clients.producer.ProducerRecord;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.time.Instant;
+import java.util.Arrays;
+import java.util.Properties;
+
+public class KafkaAppender extends UnsynchronizedAppenderBase<ILoggingEvent> {
+    private final static Logger logger = LoggerFactory.getLogger(KafkaAppender.class);
+
+    private final Producer<String, String> producer;
+    private final String kafkaTopic;
+
+    private  ServerId serverId = null;
+
+    public KafkaAppender(String kafkaHost, String kafkaTopicPrefix) {
+        Properties props = new Properties();
+        props.put("bootstrap.servers", kafkaHost);
+        props.put("acks", "0");
+        props.put("retries", 0);
+        props.put("batch.size", 16384);
+        props.put("linger.ms", 10000); // Send the batch every 10 seconds
+        props.put("buffer.memory", 33554432);
+        props.put("producer.type", "async");
+        props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
+        props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
+        this.kafkaTopic = getKafkaTopic(kafkaTopicPrefix);
+        logger.info("Starting kafka producer: bootstrap-server:{}, topic : {}", kafkaHost, this.kafkaTopic);
+        this.producer = new KafkaProducer<>(props);
+        if(ServerSettings.isRunningOnAws()) {
+            final AwsMetadata awsMetadata = new AwsMetadata();
+            serverId = new ServerId(awsMetadata.getId(), awsMetadata.getHostname(),
+                    BuildConstant.VERSION, ServerSettings.getServerRoles());
+        } else {
+            serverId = new ServerId(ServerSettings.getIp(), ServerSettings.getIp(),
+                    BuildConstant.VERSION, ServerSettings.getServerRoles());
+        }
+    }
+
+    @Override
+    protected void append(ILoggingEvent event) {
+        event.prepareForDeferredProcessing();
+        //todo do more elegant streaming approach to publish logs
+        if (!event.getLevel().equals(Level.ALL) &&         // OFF AND ALL are not loggable levels
+                !event.getLevel().equals(Level.OFF)) {
+            final IThrowableProxy throwableProxy = event.getThrowableProxy();
+            final LogEntry entry = throwableProxy != null ?
+                    new LogEntry(serverId, event.getMessage(), Instant.ofEpochMilli(event.getTimeStamp()).toString(),
+                            event.getLevel().toString(), event.getLoggerName(), event.getMDCPropertyMap(),
+                            event.getThreadName() != null ? event.getThreadName() : null,
+                            new Exception(throwableProxy.getMessage(), toStringArray(throwableProxy.getStackTraceElementProxyArray())
+                            , throwableProxy.getClassName()))
+                    : new LogEntry(serverId, event.getMessage(), Instant.ofEpochMilli(event.getTimeStamp()).toString(),
+                    event.getLevel().toString(), event.getLoggerName(), event.getMDCPropertyMap(),
+                    event.getThreadName() != null ? event.getThreadName() : null);
+            producer.send(new ProducerRecord<>(kafkaTopic, new Gson().toJson(entry)));
+        }
+    }
+
+
+    private String[] toStringArray(StackTraceElementProxy[] stackTraceElement) {
+        return Arrays.stream(stackTraceElement).map(StackTraceElementProxy::getSTEAsString).toArray(String[]::new);
+    }
+
+    private String getKafkaTopic(String kafkaTopicPrefix) {
+        final StringBuilder stringBuffer = new StringBuilder("");
+        final String[] serverRoles = ServerSettings.getServerRoles();
+        if (serverRoles.length == 4) {
+            return kafkaTopicPrefix + "_all";
+        }
+        for (String role : ServerSettings.getServerRoles()) {
+            stringBuffer.append("_");
+            stringBuffer.append(role);
+            stringBuffer.append("_logs");
+            // do not support multiple roles yet, topic name will become complex
+            break;
+        }
+        return kafkaTopicPrefix + stringBuffer.toString();
+    }
+
+    public void close() {
+        producer.close();
+    }
+}

http://git-wip-us.apache.org/repos/asf/airavata/blob/6ec88cfd/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index f3f0e8d..181baa1 100644
--- a/pom.xml
+++ b/pom.xml
@@ -571,8 +571,103 @@
 				<!--<module>modules/workflow</module>-->
 				<!--<module>modules/xbaya-gui</module>-->
                 		<module>modules/distribution</module>
-            </modules>
+            		</modules>
 		</profile>
+        <profile>
+            <id>jenkins</id>
+            <build>
+                <plugins>
+                    <plugin>
+                        <groupId>org.apache.maven.plugins</groupId>
+                        <artifactId>maven-remote-resources-plugin</artifactId>
+                        <executions>
+                            <execution>
+                                <goals>
+                                    <goal>process</goal>
+                                </goals>
+                            </execution>
+                        </executions>
+                    </plugin>
+                    <plugin>
+                        <artifactId>maven-resources-plugin</artifactId>
+                        <version>2.5</version>
+                        <executions>
+                            <execution>
+                                <id>copy-resources</id>
+                                <!-- here the phase you need -->
+                                <phase>validate</phase>
+                                <goals>
+                                    <goal>copy-resources</goal>
+                                </goals>
+                                <configuration>
+                                    <outputDirectory>${basedir}/target/classes/META-INF</outputDirectory>
+                                    <resources>
+                                        <resource>
+                                            <directory>${basedir}/src/main/assembly/dist</directory>
+                                            <filtering>true</filtering>
+                                        </resource>
+                                    </resources>
+                                </configuration>
+                            </execution>
+                        </executions>
+                    </plugin>
+                    <plugin>
+                        <groupId>org.apache.maven.plugins</groupId>
+                        <artifactId>maven-compiler-plugin</artifactId>
+                        <version>3.1</version>
+                        <configuration>
+                            <source>1.8</source>
+                            <target>1.8</target>
+                        </configuration>
+                    </plugin>
+                    <plugin>
+                        <groupId>org.apache.maven.plugins</groupId>
+                        <artifactId>maven-surefire-plugin</artifactId>
+                        <version>${surefire.version}</version>
+                        <configuration>
+                            <failIfNoTests>false</failIfNoTests>
+                            <skipTests>${skipTests}</skipTests>
+                            <workingDirectory>${project.build.testOutputDirectory}</workingDirectory>
+                            <!-- making sure that the sure-fire plugin doesn't run the integration
+                                tests -->
+                            <!-- Integration tests are run using the fail-safe plugin in the module
+                                pom -->
+                            <excludes>
+                                <exclude>**/IT.java</exclude>
+                                <exclude>**/*TestWithMyProxyAuth.java</exclude>
+                                <exclude>**/*TestWithSSHAuth.java</exclude>
+                                <exclude>**/*TestWithEC2Auth.java</exclude>
+                            </excludes>
+                        </configuration>
+                    </plugin>
+                </plugins>
+            </build>
+            <activation>
+                <activeByDefault>true</activeByDefault>
+            </activation>
+            <modules>
+                <module>modules/configuration</module>
+                <module>airavata-api</module>
+                <module>modules/commons</module>
+                <module>modules/messaging</module>
+                <module>modules/gfac</module>
+                <module>modules/registry</module>
+                <module>modules/security</module>
+                <module>modules/credential-store</module>
+                <module>modules/orchestrator</module>
+                <module>modules/monitoring</module>
+                <module>modules/user-profile</module>
+                <!--<module>modules/cloud</module>-->
+                <module>modules/server</module>
+                <module>modules/workflow</module>
+                <module>modules/test-suite</module>
+                <!-- Deprecated Modules-->
+                <!--<module>modules/integration-tests</module>-->
+                <!--<module>modules/workflow-model</module>-->
+                <!--<module>modules/workflow</module>-->
+                <!--<module>modules/xbaya-gui</module>-->
+            </modules>
+        </profile>
 		<profile>
 			<id>pedantic</id>
 			<build>