You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by jo...@apache.org on 2017/07/18 15:02:49 UTC

[13/50] [abbrv] ambari git commit: AMBARI-21462. Readd TITAN, R4ML, SYSTEMML, JNBG to BigInsights and fix HBase backup during EU and imports (alejandro)

AMBARI-21462. Readd TITAN, R4ML, SYSTEMML, JNBG to BigInsights and fix HBase backup during EU and imports (alejandro)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/69e492f2
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/69e492f2
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/69e492f2

Branch: refs/heads/branch-feature-AMBARI-21450
Commit: 69e492f288340e797cce62bfd42e677bec958158
Parents: 1f54c6e
Author: Alejandro Fernandez <af...@hortonworks.com>
Authored: Wed Jul 12 15:14:30 2017 -0700
Committer: Alejandro Fernandez <af...@hortonworks.com>
Committed: Wed Jul 12 16:17:07 2017 -0700

----------------------------------------------------------------------
 .../0.96.0.2.0/package/scripts/hbase_master.py  |  10 +-
 .../0.96.0.2.0/package/scripts/hbase_service.py |  37 ++--
 .../common-services/JNBG/0.2.0/alerts.json      |  32 +++
 .../JNBG/0.2.0/configuration/jnbg-env.xml       | 208 +++++++++++++++++++
 .../common-services/JNBG/0.2.0/kerberos.json    |  59 ++++++
 .../common-services/JNBG/0.2.0/metainfo.xml     | 108 ++++++++++
 .../JNBG/0.2.0/package/files/jkg_install.sh     | 169 +++++++++++++++
 .../JNBG/0.2.0/package/files/jkg_start.sh       |  84 ++++++++
 .../JNBG/0.2.0/package/files/log4j_setup.sh     |  79 +++++++
 .../0.2.0/package/files/pyspark_configure.sh    | 104 ++++++++++
 .../JNBG/0.2.0/package/files/pythonenv_setup.sh | 138 ++++++++++++
 .../JNBG/0.2.0/package/files/toree_configure.sh | 151 ++++++++++++++
 .../JNBG/0.2.0/package/files/toree_install.sh   | 176 ++++++++++++++++
 .../JNBG/0.2.0/package/scripts/jkg_toree.py     | 134 ++++++++++++
 .../0.2.0/package/scripts/jkg_toree_params.py   | 177 ++++++++++++++++
 .../JNBG/0.2.0/package/scripts/jnbg_helpers.py  |  81 ++++++++
 .../JNBG/0.2.0/package/scripts/jnbg_params.py   |  66 ++++++
 .../JNBG/0.2.0/package/scripts/py_client.py     |  63 ++++++
 .../0.2.0/package/scripts/py_client_params.py   |  39 ++++
 .../JNBG/0.2.0/package/scripts/service_check.py |  44 ++++
 .../JNBG/0.2.0/package/scripts/status_params.py |  26 +++
 .../R4ML/0.8.0/configuration/r4ml-env.xml       |  48 +++++
 .../common-services/R4ML/0.8.0/metainfo.xml     |  92 ++++++++
 .../R4ML/0.8.0/package/files/Install.R          |  25 +++
 .../R4ML/0.8.0/package/files/ServiceCheck.R     |  28 +++
 .../R4ML/0.8.0/package/files/localr.repo        |  22 ++
 .../R4ML/0.8.0/package/scripts/__init__.py      |  19 ++
 .../R4ML/0.8.0/package/scripts/params.py        |  80 +++++++
 .../R4ML/0.8.0/package/scripts/r4ml_client.py   | 201 ++++++++++++++++++
 .../R4ML/0.8.0/package/scripts/service_check.py |  45 ++++
 .../SYSTEMML/0.10.0/metainfo.xml                |  77 +++++++
 .../SYSTEMML/0.10.0/package/scripts/__init__.py |  19 ++
 .../SYSTEMML/0.10.0/package/scripts/params.py   |  40 ++++
 .../0.10.0/package/scripts/service_check.py     |  43 ++++
 .../0.10.0/package/scripts/systemml_client.py   |  49 +++++
 .../common-services/TITAN/1.0.0/alerts.json     |  33 +++
 .../1.0.0/configuration/gremlin-server.xml      |  85 ++++++++
 .../TITAN/1.0.0/configuration/hadoop-gryo.xml   |  94 +++++++++
 .../1.0.0/configuration/hadoop-hbase-read.xml   | 102 +++++++++
 .../TITAN/1.0.0/configuration/titan-env.xml     | 157 ++++++++++++++
 .../1.0.0/configuration/titan-hbase-solr.xml    |  69 ++++++
 .../TITAN/1.0.0/configuration/titan-log4j.xml   |  65 ++++++
 .../common-services/TITAN/1.0.0/kerberos.json   |  52 +++++
 .../common-services/TITAN/1.0.0/metainfo.xml    | 124 +++++++++++
 .../package/alerts/alert_check_titan_server.py  |  65 ++++++
 .../package/files/gremlin-server-script.sh      |  86 ++++++++
 .../package/files/tinkergraph-empty.properties  |  18 ++
 .../TITAN/1.0.0/package/files/titanSmoke.groovy |  20 ++
 .../TITAN/1.0.0/package/scripts/params.py       | 202 ++++++++++++++++++
 .../1.0.0/package/scripts/params_server.py      |  37 ++++
 .../1.0.0/package/scripts/service_check.py      |  88 ++++++++
 .../TITAN/1.0.0/package/scripts/titan.py        | 143 +++++++++++++
 .../TITAN/1.0.0/package/scripts/titan_client.py |  61 ++++++
 .../TITAN/1.0.0/package/scripts/titan_server.py |  67 ++++++
 .../1.0.0/package/scripts/titan_service.py      | 150 +++++++++++++
 .../templates/titan_solr_client_jaas.conf.j2    |  23 ++
 .../package/templates/titan_solr_jaas.conf.j2   |  26 +++
 .../BigInsights/4.2.5/role_command_order.json   |  12 +-
 .../4.2.5/services/JNBG/metainfo.xml            |  26 +++
 .../4.2.5/services/R4ML/metainfo.xml            |  37 ++++
 .../4.2.5/services/SYSTEMML/metainfo.xml        |  37 ++++
 .../4.2.5/services/TITAN/metainfo.xml           |  40 ++++
 .../BigInsights/4.2.5/services/stack_advisor.py |  53 +++++
 .../upgrades/nonrolling-upgrade-to-hdp-2.6.xml  |   2 +-
 .../BigInsights/4.2/role_command_order.json     |   3 +-
 .../4.2/services/SYSTEMML/metainfo.xml          |  77 +++++++
 .../SYSTEMML/package/scripts/__init__.py        |  19 ++
 .../services/SYSTEMML/package/scripts/params.py |  40 ++++
 .../SYSTEMML/package/scripts/service_check.py   |  43 ++++
 .../SYSTEMML/package/scripts/systemml_client.py |  49 +++++
 .../services/TITAN/configuration/titan-env.xml  |  46 ++++
 .../TITAN/configuration/titan-hbase-solr.xml    |  66 ++++++
 .../TITAN/configuration/titan-log4j.xml         |  65 ++++++
 .../4.2/services/TITAN/kerberos.json            |  17 ++
 .../BigInsights/4.2/services/TITAN/metainfo.xml |  88 ++++++++
 .../TITAN/package/files/titanSmoke.groovy       |  20 ++
 .../services/TITAN/package/scripts/params.py    | 128 ++++++++++++
 .../TITAN/package/scripts/service_check.py      |  64 ++++++
 .../4.2/services/TITAN/package/scripts/titan.py |  70 +++++++
 .../TITAN/package/scripts/titan_client.py       |  58 ++++++
 .../upgrades/nonrolling-upgrade-to-hdp-2.6.xml  |   2 +-
 81 files changed, 5583 insertions(+), 19 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/69e492f2/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_master.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_master.py b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_master.py
index 30674a8..8151572 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_master.py
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_master.py
@@ -19,10 +19,14 @@ limitations under the License.
 """
 
 import sys
-from resource_management import *
+
+from resource_management.libraries.script.script import Script
+from resource_management.core.resources.service import Service
 from resource_management.libraries.functions.security_commons import build_expectations, \
   cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \
   FILE_TYPE_XML
+from resource_management.libraries.functions.check_process_status import check_process_status
+from resource_management.libraries.functions.format import format
 from hbase import hbase
 from hbase_service import hbase_service
 from hbase_decommission import hbase_decommission
@@ -31,6 +35,8 @@ from setup_ranger_hbase import setup_ranger_hbase
 from ambari_commons import OSCheck, OSConst
 from ambari_commons.os_family_impl import OsFamilyImpl
 
+if OSCheck.is_windows_family():
+  from resource_management.libraries.functions.windows_service_utils import check_windows_service_status
 
 class HbaseMaster(Script):
   def configure(self, env):
@@ -83,7 +89,7 @@ class HbaseMasterDefault(HbaseMaster):
     env.set_params(params)
     self.configure(env) # for security
     setup_ranger_hbase(upgrade_type=upgrade_type, service_name="hbase-master")
-    hbase_service('master', action = 'start')
+    hbase_service('master', action='start')
     
   def stop(self, env, upgrade_type=None):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/69e492f2/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_service.py b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_service.py
index 3b8e494..1d618ed 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_service.py
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_service.py
@@ -17,14 +17,17 @@ See the License for the specific language governing permissions and
 limitations under the License.
 
 """
+from datetime import datetime
 
-from resource_management import *
+from resource_management.core.resources.system import Execute
+from resource_management.core.resources.system import File
+from resource_management.libraries.script.script import Script
+from resource_management.libraries.functions.format import format
+from resource_management.core.shell import as_sudo
+from resource_management.libraries.functions.show_logs import show_logs
 from resource_management.core.logger import Logger
 
-def hbase_service(
-  name,
-  action = 'start'): # 'start' or 'stop' or 'status'
-    
+def hbase_service(name, action='start'):
     import params
   
     role = name
@@ -36,18 +39,28 @@ def hbase_service(
     # delete wal log if HBase version has moved down
     if params.to_backup_wal_dir:
       wal_directory = params.wal_directory
-      timestamp = datetime.datetime.now()
+      timestamp = datetime.now()
       timestamp_format = '%Y%m%d%H%M%S'
       wal_directory_backup = '%s_%s' % (wal_directory, timestamp.strftime(timestamp_format))
 
-      rm_cmd = format("hadoop fs -mv {wal_directory} {wal_directory_backup}")
+      check_if_wal_dir_exists = format("hdfs dfs -ls {wal_directory}")
+      wal_dir_exists = False
       try:
-        Execute ( rm_cmd,
-          user = params.hbase_user
-        )
+        Execute(check_if_wal_dir_exists,
+                user=params.hbase_user
+                )
+        wal_dir_exists = True
       except Exception, e:
-        #Should still allow HBase Start/Stop to proceed
-        Logger.error("Failed to backup HBase WAL directory, command: {0} . Exception: {1}".format(rm_cmd, e.message))
+        Logger.error(format("Did not find HBase WAL directory {wal_directory}. It's possible that it was already moved. Exception: {e.message}"))
+
+      if wal_dir_exists:
+        move_wal_dir_cmd = format("hdfs dfs -mv {wal_directory} {wal_directory_backup}")
+        try:
+          Execute(move_wal_dir_cmd,
+            user=params.hbase_user
+          )
+        except Exception, e:
+          Logger.error(format("Failed to backup HBase WAL directory, command: {move_wal_dir_cmd} . Exception: {e.message}"))
 
     if action == 'start':
       daemon_cmd = format("{cmd} start {role}")

http://git-wip-us.apache.org/repos/asf/ambari/blob/69e492f2/ambari-server/src/main/resources/common-services/JNBG/0.2.0/alerts.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/JNBG/0.2.0/alerts.json b/ambari-server/src/main/resources/common-services/JNBG/0.2.0/alerts.json
new file mode 100755
index 0000000..963c687
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/JNBG/0.2.0/alerts.json
@@ -0,0 +1,32 @@
+{
+  "JNBG": {
+    "service": [],
+    "KERNEL_GATEWAY": [
+      {
+        "name": "jupyter_kernel_gateway",
+        "label": "Jupyter Kernel Gateway Process",
+        "description": "This host-level alert is triggered if the Jupyter Kernel Gateway cannot be determined to be up.",
+        "interval": 1,
+        "scope": "HOST",
+        "source": {
+          "type": "PORT",
+          "uri": "{{jnbg-env/jnbg_port}}",
+          "default_port": 8888,
+          "reporting": {
+            "ok": {
+              "text": "TCP OK - {0:.3f}s response on port {1}"
+            },
+            "warning": {
+              "text": "TCP OK - {0:.3f}s response on port {1}",
+              "value": 1.5
+            },
+            "critical": {
+              "text": "Connection failed: {0} to {1}:{2}",
+              "value": 5.0
+            }
+          }
+        }
+      }
+    ]
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/69e492f2/ambari-server/src/main/resources/common-services/JNBG/0.2.0/configuration/jnbg-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/JNBG/0.2.0/configuration/jnbg-env.xml b/ambari-server/src/main/resources/common-services/JNBG/0.2.0/configuration/jnbg-env.xml
new file mode 100755
index 0000000..f9da01e
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/JNBG/0.2.0/configuration/jnbg-env.xml
@@ -0,0 +1,208 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration supports_adding_forbidden="false">
+  <property>
+    <name>kernel_spark_opts</name>
+    <value>--master=yarn --deploy-mode=client --driver-java-options="-Dlog4j.logFile=/var/log/jnbg/spark-driver-USER.log -Dlog4j.configuration=file:/var/lib/jnbg/conf/log4j.properties"</value>
+    <display-name>spark_opts</display-name>
+    <description>
+      SPARK_OPTS used for all kernels (ToreeInstall.spark_opts, PYSPARK_SUBMIT_ARGS).
+      Optionally include -Dlog4j.logLevel and -Dlog4j.fileSize in --driver-java-options
+      to influence logging behavior. Default: -Dlog4j.logLevel=INFO -Dlog4j.fileSize=10MB
+    </description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>toree_opts</name>
+    <value></value>
+    <display-name>ToreeInstall.toree_opts</display-name>
+    <description>__TOREE_OPTS__ for Apache Toree kernel</description>
+    <value-attributes>
+      <empty-value-valid>true</empty-value-valid>
+    </value-attributes>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>notebook_user</name>
+    <value>notebook</value>
+    <display-name>Notebook service user</display-name>
+    <description>User to run JKG and kernel processes</description>
+    <value-attributes>
+      <overridable>false</overridable>
+      <editable-only-at-install>true</editable-only-at-install>
+    </value-attributes>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>notebook_group</name>
+    <value>notebook</value>
+    <display-name>Notebook service user group</display-name>
+    <description>Service user's group</description>
+    <value-attributes>
+      <overridable>false</overridable>
+      <editable-only-at-install>true</editable-only-at-install>
+    </value-attributes>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>spark_home</name>
+    <value>/usr/iop/current/spark2-client</value>
+    <display-name>spark_home</display-name>
+    <description>SPARK_HOME for kernels</description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>spark_sql_warehouse_dir</name>
+    <value>/apps/jnbg/spark-warehouse</value>
+    <display-name>spark.sql.warehouse.dir</display-name>
+    <description>Warehouse for Notebook applications</description>
+  </property>
+  <property>
+    <name>jkg_port</name>
+    <value>8888</value>
+    <display-name>KernelGatewayApp.port</display-name>
+    <description>Jupyter Kernel Gateway port</description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>jkg_loglevel</name>
+    <value>INFO</value>
+    <display-name>Application.log_level</display-name>
+    <description>Jupyter Kernel Gateway Log level</description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>jkg_pid_dir_prefix</name>
+    <value>/var/run/jnbg</value>
+    <display-name>JNBG pid directory prefix</display-name>
+    <description>JNBG pid directory prefix for storing process ID</description>
+    <value-attributes>
+      <type>directory</type>
+      <overridable>false</overridable>
+      <editable-only-at-install>true</editable-only-at-install>
+    </value-attributes>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>jkg_log_dir</name>
+    <value>/var/log/jnbg</value>
+    <display-name>Kernel Gateway log directory</display-name>
+    <description>Jupyter Kernel Gateway logfile directory</description>
+    <value-attributes>
+      <type>directory</type>
+      <overridable>false</overridable>
+    </value-attributes>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>max_kernels</name>
+    <value>15</value>
+    <display-name>Maximum number of kernel instances</display-name>
+    <description>
+      Limits the number of kernel instances allowed to run by this gateway.
+      Unbounded by default.
+
+      Note: Number of kernel instances is also affected by the Spark2 property spark.port.maxRetries. Increase spark.port.maxRetries from its default value to a much higher value to enable controlling the number of active kernel instances using max_kernels.
+    </description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>cull_idle_kernel_period</name>
+    <value>43200</value>
+    <display-name>Idle kernel culling period</display-name>
+    <description>Period in seconds kernel can idle before being culled</description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>cull_idle_kernel_interval</name>
+    <value>300</value>
+    <display-name>Idle kernel culling interval</display-name>
+    <description>Check for idle kernels to cull every specified number of seconds</description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>python_interpreter_path</name>
+    <value>/usr/bin/python</value>
+    <display-name>Python interpreter path</display-name>
+    <description>
+      PYTHON_EXE for virtualenv
+      Python interpreter must be version 2.7.x
+    </description>
+    <value-attributes>
+      <type>file</type>
+      <overridable>false</overridable>
+      <editable-only-at-install>true</editable-only-at-install>
+    </value-attributes>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>python_virtualenv_path_prefix</name>
+    <value>/var/lib/jnbg</value>
+    <display-name>Python virtualenv path prefix</display-name>
+    <description>
+      Python virtualenv path prefix
+      $VIRTUAL_ENV=python_virtualenv_path_prefix/python2.7
+    </description>
+    <value-attributes>
+      <type>directory</type>
+      <overridable>false</overridable>
+      <editable-only-at-install>true</editable-only-at-install>
+    </value-attributes>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>python_virtualenv_restrictive</name>
+    <value>true</value>
+    <display-name>Python virtualenv restrictive mode</display-name>
+    <description> 
+      Python virtualenv restrictive mode.
+      Check for restrictive mode so that service users cannot modify it.
+      Uncheck so that service users can install packages with "pip install ..."
+    </description>
+    <final>true</final>
+    <value-attributes>
+      <type>boolean</type>
+      <overridable>false</overridable>
+      <editable-only-at-install>true</editable-only-at-install>
+    </value-attributes>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>pythonpath</name>
+    <value>/usr/iop/current/spark2-client/python:/usr/iop/current/spark2-client/python/lib/pyspark.zip:/usr/iop/current/spark2-client/python/lib/py4j-0.10.4-src.zip</value>
+    <display-name>PYTHONPATH</display-name>
+    <description>PYTHONPATH for PySpark kernel</description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>spark_conf_dir</name>
+    <value>/var/lib/jnbg/conf</value>
+    <display-name>SPARK_CONF_DIR</display-name>
+    <description>Spark configuration directory, currently only contains log4j.properties (see "-Dlog4j.configuration=file:/var/lib/jnbg/conf/log4j.properties" in spark_opts)</description>
+    <value-attributes>
+      <type>directory</type>
+      <overridable>false</overridable>
+      <editable-only-at-install>true</editable-only-at-install>
+    </value-attributes>
+    <on-ambari-upgrade add="false"/>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/69e492f2/ambari-server/src/main/resources/common-services/JNBG/0.2.0/kerberos.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/JNBG/0.2.0/kerberos.json b/ambari-server/src/main/resources/common-services/JNBG/0.2.0/kerberos.json
new file mode 100755
index 0000000..8777709
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/JNBG/0.2.0/kerberos.json
@@ -0,0 +1,59 @@
+{
+  "services": [
+    {
+      "name": "JNBG",
+      "identities": [
+        {
+          "name": "/smokeuser"
+        }
+      ],
+      "components": [
+        {
+          "name": "KERNEL_GATEWAY",
+          "identities": [
+            {
+              "name": "/HDFS/NAMENODE/hdfs"
+            },
+            {
+              "name": "jnbg_principal",
+              "principal": {
+                "value": "${jnbg-env/notebook_user}/_HOST@${realm}",
+                "type" : "service",
+                "configuration": "jnbg-env/jnbg.service.kerberos.principal",
+                "local_username" : "${jnbg-env/notebook_user}"
+              },
+              "keytab": {
+                "file": "${keytab_dir}/jnbg.service.keytab",
+                "owner": {
+                  "name": "${jnbg-env/notebook_user}",
+                  "access": "r"
+                },
+                "group": {
+                  "name": "${cluster-env/user_group}",
+                  "access": ""
+                },
+                "configuration": "jnbg-env/jnbg.service.kerberos.keytab"
+              }
+            }
+          ]
+        },
+        {
+          "name": "PYTHON_CLIENT",
+          "identities": [
+            {
+              "name": "/JNBG/KERNEL_GATEWAY/jnbg_principal"
+            }
+          ]
+        }
+      ],
+
+      "configurations": [
+        {
+          "jnbg-env": {
+            "jnbg.kerberos.enabled": "true"
+          }
+        }
+      ]
+    }
+  ]
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/69e492f2/ambari-server/src/main/resources/common-services/JNBG/0.2.0/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/JNBG/0.2.0/metainfo.xml b/ambari-server/src/main/resources/common-services/JNBG/0.2.0/metainfo.xml
new file mode 100755
index 0000000..5afe904
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/JNBG/0.2.0/metainfo.xml
@@ -0,0 +1,108 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>JNBG</name>
+      <displayName>JNBG</displayName>
+      <comment>Jupyter Notebook Kernel Gateway with Apache Toree</comment>
+      <version>0.2.0</version>
+      <components>
+        <component>
+          <name>KERNEL_GATEWAY</name>
+          <displayName>Jupyter Kernel Gateway</displayName>
+          <category>MASTER</category>
+          <cardinality>1+</cardinality>
+          <versionAdvertised>false</versionAdvertised>
+          <commandScript>
+            <script>scripts/jkg_toree.py</script>
+            <scriptType>PYTHON</scriptType>
+            <timeout>3000</timeout>
+          </commandScript>
+          <dependencies>
+            <dependency>
+              <name>JNBG/PYTHON_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+            <dependency>
+              <name>SPARK/SPARK2_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+            <dependency>
+              <name>HDFS/HDFS_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+            <dependency>
+              <name>YARN/YARN_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+          </dependencies>
+        </component>
+        <component>
+          <name>PYTHON_CLIENT</name>
+          <displayName>Python Client</displayName>
+          <category>CLIENT</category>
+          <cardinality>1+</cardinality>
+          <versionAdvertised>false</versionAdvertised>
+          <commandScript>
+            <script>scripts/py_client.py</script>
+            <scriptType>PYTHON</scriptType>
+            <timeout>3000</timeout>
+          </commandScript>
+          <dependencies>
+            <dependency>
+              <name>SPARK/SPARK2_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+          </dependencies>
+        </component>
+      </components>
+
+      <commandScript>
+        <script>scripts/service_check.py</script>
+        <scriptType>PYTHON</scriptType>
+        <timeout>300</timeout>
+      </commandScript>
+
+      <requiredServices>
+        <service>SPARK2</service>
+      </requiredServices>
+
+      <configuration-dependencies>
+        <config-type>jnbg-env</config-type>
+      </configuration-dependencies>
+      <restartRequiredAfterChange>true</restartRequiredAfterChange>
+    </service>
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/69e492f2/ambari-server/src/main/resources/common-services/JNBG/0.2.0/package/files/jkg_install.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/JNBG/0.2.0/package/files/jkg_install.sh b/ambari-server/src/main/resources/common-services/JNBG/0.2.0/package/files/jkg_install.sh
new file mode 100755
index 0000000..2027c9f
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/JNBG/0.2.0/package/files/jkg_install.sh
@@ -0,0 +1,169 @@
+#!/usr/bin/env bash
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+set -x
+
+PY_EXEC=$1
+PY_VENV_PATH_PREFIX=$2
+PY_VENV_OWNER=$3
+KINIT_CMD=$4
+
+checkPipInstall()
+{
+  pip show $1 2>&1 > /dev/null
+}
+
+checkSuccess()
+{
+  if [ $? != 0 ]; then
+    set +x
+    echo "Error encountered at line $1 while attempting to: "
+    if [ -n "$2" ]; then
+      echo $2
+    fi
+    echo Exiting.
+    exit 1
+  fi
+  set -x
+}
+
+# /etc/pip.conf overrides all
+if [ -f /etc/pip.conf ]; then
+  PYPI_URL=$(cat  /etc/pip.conf | grep -i extra-index-url | awk '{print $3}')
+  PYPI_HOST=$(cat  /etc/pip.conf | grep -i extra-index-url | awk '{print $3}' | sed -e 's/^.*\/\///' | sed -e 's/:.*$//')
+  PYPI_PORT=$(cat  /etc/pip.conf | grep -i extra-index-url | awk '{print $3}'  | sed -e 's/^.*:*://' | sed -e 's/\/.*$//')
+else
+  # If no pip.conf then try to determine based on repo URLs in use
+  if [ -f /etc/yum.repos.d/IOP.repo ]; then
+    cat /etc/yum.repos.d/IOP.repo | grep baseurl |  grep -w http
+    httpurl=$?
+    cat /etc/yum.repos.d/IOP.repo | grep baseurl |  grep -w https
+    httpsurl=$?
+    if [ "$httpurl" -eq 0 ]; then
+      PYPI_HOST=$(cat /etc/yum.repos.d/IOP.repo | grep baseurl | sed -e 's/baseurl=http:\/\///' | cut -f1 -d"/")
+      PYPI_PORT=8080
+      PYPI_URL=http://${PYPI_HOST}:${PYPI_PORT}/simple/
+    elif [ "$httpsurl" -eq 0 ]; then
+      PYPI_HOST=$(cat /etc/yum.repos.d/IOP.repo | grep baseurl | sed -e 's/baseurl=https:\/\///' | cut -f1 -d"/")
+      PYPI_PORT=8080
+      PYPI_URL=http://${PYPI_HOST}:${PYPI_PORT}/simple/
+    fi
+  else
+    # fallback default
+    PYPI_HOST=ibm-open-platform.ibm.com
+    PYPI_PORT=8080
+    PYPI_URL=http://ibm-open-platform.ibm.com:8080/simple/
+  fi
+fi
+
+if [[ -z "${PYPI_URL}" || -z "${PYPI_HOST}" || -z "${PYPI_PORT}" ]];then
+  PYPI_HOST=ibm-open-platform.ibm.com
+  PYPI_PORT=8080
+  PYPI_URL=http://ibm-open-platform.ibm.com:8080/simple/
+fi
+
+PLATFORM=`uname -p`
+rhver=7
+
+if [ "$PY_EXEC" = "/opt/rh/python27/root/usr/bin/python" ]; then
+  rhscl=1
+else
+  rhscl=0
+fi
+
+if [ "$PLATFORM" == "x86_64" ]
+then
+  if [ -x /usr/bin/lsb_release ]; then
+    rhver=$(/usr/bin/lsb_release -rs | cut -f1 -d.)
+  fi
+
+  if [ "$rhver" -eq 6 ];then
+    if [ "$rhscl" -eq 1 ] && [ ! -f /opt/rh/python27/enable ]; then
+      echo "Installation failed; Install Python 2.7 using Red Hat Software Collections and retry."
+      exit 1
+    elif [ "$rhscl" -eq 1 ]; then
+      source /opt/rh/python27/enable
+      # uninstall older pip version that accompanies SCL
+      pip uninstall -y pip
+    fi
+  fi
+fi
+
+pyver=`echo $(${PY_EXEC} -V 2>&1 | awk '{ print $2 }') | sed -e 's/\.//g'`
+if [ "$pyver" -lt 270 ]; then
+  echo "Installation failed; Ensure that the specified python_interpreter_path is Python version 2.7."
+  exit 1
+fi
+
+easy_install pip
+checkSuccess $LINENO "-  easy_install pip"
+pip -V
+
+pip install --trusted-host ${PYPI_HOST} --no-cache-dir --index-url http://${PYPI_HOST}:${PYPI_PORT}/simple virtualenv --upgrade
+checkPipInstall virtualenv
+checkSuccess $LINENO "-  pip install virtualenv"
+
+if [ -d "${PY_VENV_PATH_PREFIX}/python2.7" ]; then
+  # Warning only to tolerate pre-existing virtual env. from failed installs
+  echo "Installation warning: ${PY_VENV_PATH_PREFIX}/python2.7 exists."
+  echo "This might indicate remnants from a prior or failed installation."
+  echo "Check specified property value for python_virtualenv_path_prefix."
+fi
+
+if [ ! -x "${PY_EXEC}" ]; then
+  echo "Installation failed: ${PY_EXEC} does not appear to be a valid python executable; Use a different python_interpreter_path."
+  exit 1
+fi
+
+virtualenv -p ${PY_EXEC} ${PY_VENV_PATH_PREFIX}/python2.7
+checkSuccess $LINENO "-  create virtualenv using ${PY_EXEC}"
+
+source ${PY_VENV_PATH_PREFIX}/python2.7/bin/activate
+pip -V
+
+if [ "$rhver" -eq 6 ]; then
+  if [ "$rhscl" -eq 1 ]; then
+    pip -V
+    # uninstall older pip version that accompanies virtualenv with SCL
+    pip uninstall -y pip
+    easy_install pip
+    checkPipInstall pip
+    checkSuccess $LINENO "- easy_install pip"
+  fi
+fi
+
+pip -V
+# Use --index-url and not --extra-index-url as we are trying to install
+# specific package versions
+pip install --trusted-host ${PYPI_HOST} --no-cache-dir --index-url http://${PYPI_HOST}:${PYPI_PORT}/simple setuptools --upgrade
+checkPipInstall setuptools
+checkSuccess $LINENO "- pip install setuptools"
+
+# Using --upgrade enables updating missing dependencies after failed installs
+pip install --trusted-host ${PYPI_HOST} --no-cache-dir --index-url http://${PYPI_HOST}:${PYPI_PORT}/simple/ jupyter_kernel_gateway --upgrade
+checkPipInstall jupyter_kernel_gateway
+checkSuccess $LINENO "- pip install jupyter_kernel_gateway"
+
+# Set ownership of the created virtualenv if configured via python_virtualenv_restrictive
+if [ "${PY_VENV_OWNER}" != "root" ]; then
+  echo ====== Virtualenv owner = $PY_VENV_OWNER =========
+  chown -R ${PY_VENV_OWNER}: ${PY_VENV_PATH_PREFIX}/python2.7
+fi

http://git-wip-us.apache.org/repos/asf/ambari/blob/69e492f2/ambari-server/src/main/resources/common-services/JNBG/0.2.0/package/files/jkg_start.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/JNBG/0.2.0/package/files/jkg_start.sh b/ambari-server/src/main/resources/common-services/JNBG/0.2.0/package/files/jkg_start.sh
new file mode 100755
index 0000000..fdc9e59
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/JNBG/0.2.0/package/files/jkg_start.sh
@@ -0,0 +1,84 @@
+#!/usr/bin/env bash
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+set -x
+
+START_CMD=$1
+SPARK_HOME=$2
+PY_EXEC=$3
+PY_VENV_PATH_PREFIX=$4
+KINIT_CMD=$5
+LOG=$6
+PIDFILE=$7
+
+source ${PY_VENV_PATH_PREFIX}/python2.7/bin/activate
+
+PLATFORM=`uname -p`
+rhver=7
+
+if [ "$PY_EXEC" = "/opt/rh/python27/root/usr/bin/python" ]; then
+  rhscl=1
+else
+  rhscl=0
+fi
+
+if [ "$PLATFORM" == "x86_64" ]
+then
+  if [ -x /usr/bin/lsb_release ]; then
+    rhver=$(/usr/bin/lsb_release -rs | cut -f1 -d.)
+  fi
+
+  if [ "$rhver" -eq 6 ];then
+    if [ "$rhscl" -eq 1 ] && [ ! -f /opt/rh/python27/enable ]; then
+      echo "Detected invalid installation state: Install Python 2.7 using Red Hat Software Collections and try reinstalling the service."
+      exit 1
+    elif [ "$rhscl" -eq 1 ]; then
+      source /opt/rh/python27/enable
+      # uninstall older pip version that accompanies SCL
+      pip uninstall -y pip
+    fi
+  fi
+fi
+
+pyver=`echo $(${PY_EXEC} -V 2>&1 | awk '{ print $2 }') | sed -e 's/\.//g'`
+if [ "$pyver" -lt 270 ]; then
+  echo "Detected invalid installation state: Ensure that the specified python_interpreter_path is Python version 2.7."
+  exit 1
+fi
+
+if [ ! -d "${PY_VENV_PATH_PREFIX}/python2.7" ]; then
+  echo "Did not find necessary virtual environment to execute service startup. This state in unexpected and inconsistent when the service is in the INSTALLED state. Delete the service and reinstall."
+  exit 1
+fi
+source ${PY_VENV_PATH_PREFIX}/python2.7/bin/activate
+
+# Required for supporting Python 2 kernel
+export PYTHONPATH=${SPARK_HOME}/python/lib/pyspark.zip:${SPARK_HOME}/python:${SPARK_HOME}/python/lib/py4j-0.10.4-src.zip
+
+export SPARK_CONF_DIR=$SPARK_HOME/conf
+source $SPARK_CONF_DIR/spark-env.sh
+set +x
+eval "$START_CMD >> $LOG 2>&1 &"
+if [ $? -eq 0 ]; then
+  echo $! > $PIDFILE
+  exit 0
+fi
+exit 1

http://git-wip-us.apache.org/repos/asf/ambari/blob/69e492f2/ambari-server/src/main/resources/common-services/JNBG/0.2.0/package/files/log4j_setup.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/JNBG/0.2.0/package/files/log4j_setup.sh b/ambari-server/src/main/resources/common-services/JNBG/0.2.0/package/files/log4j_setup.sh
new file mode 100755
index 0000000..921045d
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/JNBG/0.2.0/package/files/log4j_setup.sh
@@ -0,0 +1,79 @@
+#!/usr/bin/env bash
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+set -x
+
+SPARK_CONFIG_DIR=$1
+
+log4j_properties_file="${SPARK_CONFIG_DIR}/log4j.properties"
+
+cat <<'EOF' > "${log4j_properties_file}"
+
+# default log file, overridden by Java System property -Dlog4j.logFile=...
+log4j.logFile=/var/log/jnbg/spark-driver-${user.name}.log
+
+# default (root) log level, overridable by Java System property -Dlog4j.logLevel=...
+log4j.logLevel=INFO
+
+# default log file size limit, overridable by Java System property -Dlog4j.fileSize=... (KB, MB, GB)
+log4j.fileSize=10MB
+
+# default max number of log file backups, overridable by Java System property -Dlog4j.backupFiles=...
+log4j.backupFiles=10
+
+# log to file using rolling log strategy with one backup file
+# NOTE: Spark REPL overrides rootCategory, set log4j.logLevel above
+log4j.rootCategory=${log4j.logLevel}, logfile
+log4j.appender.logfile=org.apache.log4j.RollingFileAppender
+log4j.appender.logfile.File=${log4j.logFile}
+log4j.appender.logfile.MaxFileSize=${log4j.fileSize}
+log4j.appender.logfile.MaxBackupIndex=${log4j.backupFiles}
+log4j.appender.logfile.encoding=UTF-8
+log4j.appender.logfile.layout=org.apache.log4j.PatternLayout
+log4j.appender.logfile.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n
+
+# Reduce Toree related "noise"
+log4j.logger.org.apache.toree.kernel.protocol.v5.stream.KernelOutputStream=ERROR
+
+# Modified Spark 2.1 default settings:
+
+# Spark overrides rootCategory level with the level set for the Scala & PySpark REPLs (default=WARN)
+# This is intended to reduce log verbosity while working with a Spark shell or PySpark shell.
+# However, notebook kernels internally use the spark-shell and pyspark shell implementation, but
+# since notebooks are logging to a log file, we want potentially more verbose logs.
+# We need to set the spark-shell and pyspark shell log level to the same level as the rootCategory.
+# See: org.apache.spark.internal.Logging#initializeLogging(isInterpreter=true)
+log4j.logger.org.apache.spark.repl.Main=${log4j.rootCategory}
+log4j.logger.org.apache.spark.api.python.PythonGatewayServer=${log4j.rootCategory}
+
+# Settings to quiet third party logs that are too verbose
+log4j.logger.org.spark_project.jetty=WARN
+log4j.logger.org.spark_project.jetty.util.component.AbstractLifeCycle=ERROR
+log4j.logger.org.apache.spark.repl.SparkIMain$exprTyper=INFO
+log4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=INFO
+log4j.logger.org.apache.parquet=ERROR
+log4j.logger.parquet=ERROR
+
+# SPARK-9183: Settings to avoid annoying messages when looking up nonexistent UDFs in SparkSQL with Hive support
+log4j.logger.org.apache.hadoop.hive.metastore.RetryingHMSHandler=FATAL
+log4j.logger.org.apache.hadoop.hive.ql.exec.FunctionRegistry=ERROR
+
+EOF

http://git-wip-us.apache.org/repos/asf/ambari/blob/69e492f2/ambari-server/src/main/resources/common-services/JNBG/0.2.0/package/files/pyspark_configure.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/JNBG/0.2.0/package/files/pyspark_configure.sh b/ambari-server/src/main/resources/common-services/JNBG/0.2.0/package/files/pyspark_configure.sh
new file mode 100755
index 0000000..59cd28d
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/JNBG/0.2.0/package/files/pyspark_configure.sh
@@ -0,0 +1,104 @@
+#!/usr/bin/env bash
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+set -x
+
+PY_EXEC=$1
+PY_VENV_PATH_PREFIX=$2
+PY_VENV_OWNER=$3
+KINIT_CMD=$4
+SPARK_HOME=$5
+PYTHONPATH=$6
+SPARK_OPTS=$7
+
+if [ ! -d "${PY_VENV_PATH_PREFIX}/python2.7" ]; then
+  echo "Unexpected state of installation. No Python client installation detected while trying to install PySpark kernel."
+  exit 0
+fi
+
+source ${PY_VENV_PATH_PREFIX}/python2.7/bin/activate
+
+if [ -z "${VIRTUAL_ENV}" ]; then
+  echo "Unexpected condition detected; Unable to find virtualenv environment."
+  exit 1
+fi
+
+# assume --sys-prefix used for Toree kernel installs
+kernel_dir=${VIRTUAL_ENV}/share/jupyter/kernels/spark_2.1_python2
+kernel_run_file=$kernel_dir/bin/run.sh
+kernel_json_file=$kernel_dir/kernel.json
+
+mkdir -p $kernel_dir/bin
+rm -f $kernel_json_file $kernel_run_file
+
+cat <<'EOF' >> $kernel_run_file
+#!/usr/bin/env bash
+echo
+echo "Starting Python 2 kernel with Spark 2.1 for user ${KERNEL_USERNAME}"
+echo
+
+CONF_ARGS="--name '${KERNEL_USERNAME:-Notebook} Python' \
+           --conf spark.sql.catalogImplementation=in-memory"
+
+PYSPARK_SUBMIT_ARGS="${CONF_ARGS} ${PYSPARK_SUBMIT_ARGS}"
+
+# replace generic log file name with user-specific log file name based on authenticated end-user
+PYSPARK_SUBMIT_ARGS="${PYSPARK_SUBMIT_ARGS//spark-driver-USER.log/spark-driver-${KERNEL_USERNAME:-all}.log}"
+
+echo "PYSPARK_SUBMIT_ARGS=\"${PYSPARK_SUBMIT_ARGS}\""
+
+EOF
+
+# For kerberized clusters
+if [ -n "${KINIT_CMD}" ]; then
+  sed -i "$ a ${KINIT_CMD}\n" $kernel_run_file
+fi
+
+sed -i "$ a ${PY_VENV_PATH_PREFIX}/python2.7/bin/python2 -m ipykernel -f \${2}" $kernel_run_file
+
+chmod 755 $kernel_run_file
+
+# Escape double-quotes in the user specified SPARK_OPTS value
+SPARK_OPTS="${SPARK_OPTS//\"/\\\"}"
+
+cat <<EOF >> $kernel_json_file
+{
+  "language": "python",
+  "display_name": "Spark 2.1 - Python 2",
+  "env": {
+    "SPARK_HOME": "${SPARK_HOME}",
+    "PYTHONPATH": "${PYTHONPATH}",
+    "PYTHONSTARTUP": "${SPARK_HOME}/python/pyspark/shell.py",
+    "PYSPARK_SUBMIT_ARGS": "${SPARK_OPTS} pyspark-shell"
+  },
+  "argv": [
+    "$kernel_run_file",
+    "-f",
+    "{connection_file}"
+  ]
+}
+EOF
+
+# Set ownership of the created virtualenv if configured via python_virtualenv_restrictive
+if [ "${PY_VENV_OWNER}" != "root" ]; then
+  echo ====== Virtualenv owner = $PY_VENV_OWNER =========
+  chown -R ${PY_VENV_OWNER}: ${PY_VENV_PATH_PREFIX}/python2.7
+fi

http://git-wip-us.apache.org/repos/asf/ambari/blob/69e492f2/ambari-server/src/main/resources/common-services/JNBG/0.2.0/package/files/pythonenv_setup.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/JNBG/0.2.0/package/files/pythonenv_setup.sh b/ambari-server/src/main/resources/common-services/JNBG/0.2.0/package/files/pythonenv_setup.sh
new file mode 100755
index 0000000..5b2b7d9
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/JNBG/0.2.0/package/files/pythonenv_setup.sh
@@ -0,0 +1,138 @@
+#!/usr/bin/env bash
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+set -x
+
+PY_EXEC=$1
+PY_VENV_PATH_PREFIX=$2
+PY_VENV_OWNER=$3
+
+checkPipInstall()
+{
+  pip show $1 2>&1 > /dev/null
+}
+
+checkSuccess()
+{
+  if [ $? != 0 ]
+  then
+    set +x
+    echo "Error encountered at line $1 while attempting to: "
+    if [ -n "$2" ]
+    then
+      echo $2
+    fi
+    echo Exiting.
+    exit 1
+  fi
+  set -x
+}
+
+if [ -d "${PY_VENV_PATH_PREFIX}/python2.7" ]; then
+  echo "Python client installation detected. Nothing to do."
+  exit 0
+fi
+
+# /etc/pip.conf overrides all
+if [ -f /etc/pip.conf ]; then
+  PYPI_URL=$(cat  /etc/pip.conf | grep -i extra-index-url | awk '{print $3}')
+  PYPI_HOST=$(cat  /etc/pip.conf | grep -i extra-index-url | awk '{print $3}' | sed -e 's/^.*\/\///' | sed -e 's/:.*$//')
+  PYPI_PORT=$(cat  /etc/pip.conf | grep -i extra-index-url | awk '{print $3}'  | sed -e 's/^.*:*://' | sed -e 's/\/.*$//')
+else
+  # If no pip.conf then try to determine based on repo URLs in use
+  if [ -f /etc/yum.repos.d/IOP.repo ]; then
+    cat /etc/yum.repos.d/IOP.repo | grep baseurl |  grep -w http
+    httpurl=$?
+    cat /etc/yum.repos.d/IOP.repo | grep baseurl |  grep -w https
+    httpsurl=$?
+    if [ "$httpurl" -eq 0 ]; then
+      PYPI_HOST=$(cat /etc/yum.repos.d/IOP.repo | grep baseurl | sed -e 's/baseurl=http:\/\///' | cut -f1 -d"/")
+      PYPI_PORT=8080
+      PYPI_URL=http://${PYPI_HOST}:${PYPI_PORT}/simple/
+    elif [ "$httpsurl" -eq 0 ]; then
+      PYPI_HOST=$(cat /etc/yum.repos.d/IOP.repo | grep baseurl | sed -e 's/baseurl=https:\/\///' | cut -f1 -d"/")
+      PYPI_PORT=8080
+      PYPI_URL=http://${PYPI_HOST}:${PYPI_PORT}/simple/
+    fi
+  else
+    # fallback default
+    PYPI_HOST=ibm-open-platform.ibm.com
+    PYPI_PORT=8080
+    PYPI_URL=http://ibm-open-platform.ibm.com:8080/simple/
+  fi
+fi
+
+if [[ -z "${PYPI_URL}" || -z "${PYPI_HOST}" || -z "${PYPI_PORT}" ]];then
+  PYPI_HOST=ibm-open-platform.ibm.com
+  PYPI_PORT=8080
+  PYPI_URL=http://ibm-open-platform.ibm.com:8080/simple/
+fi
+
+PLATFORM=`uname -p`
+rhver=7
+
+if [ "$PY_EXEC" = "/opt/rh/python27/root/usr/bin/python" ]; then
+  rhscl=1
+else
+  rhscl=0
+fi
+
+if [ "$PLATFORM" == "x86_64" ]
+then
+  if [ -x /usr/bin/lsb_release ]; then
+    rhver=$(/usr/bin/lsb_release -rs | cut -f1 -d.)
+  fi
+
+  if [ "$rhver" -eq 6 ];then
+    if [ "$rhscl" -eq 1 ] && [ ! -f /opt/rh/python27/enable ]; then
+      echo "Installation failed; Install Python 2.7 using Red Hat Software Collections and retry."
+      exit 1
+    elif [ "$rhscl" -eq 1 ]; then
+      #Install Python 2.7 using Red Hat Software Collections and retry."
+      source /opt/rh/python27/enable
+      # uninstall older pip version that accompanies SCL
+      pip uninstall -y pip
+    fi
+  fi
+fi
+
+pyver=`echo $(${PY_EXEC} -V 2>&1 | awk '{ print $2 }') | sed -e 's/\.//g'`
+if [ "$pyver" -lt 270 ]; then
+  echo "Installation failed; Ensure that the specified python_interpreter_path is Python version 2.7."
+  exit 1
+fi
+
+easy_install pip
+checkSuccess $LINENO "-  easy_install pip"
+pip -V
+
+pip install --trusted-host ${PYPI_HOST} --no-cache-dir --index-url http://${PYPI_HOST}:${PYPI_PORT}/simple virtualenv --upgrade
+checkPipInstall virtualenv
+checkSuccess $LINENO "-  pip install virtualenv"
+
+virtualenv -p ${PY_EXEC} ${PY_VENV_PATH_PREFIX}/python2.7
+checkSuccess $LINENO "-  create virtualenv using ${PY_EXEC}"
+
+# Set ownership of the created virtualenv if configured via python_virtualenv_restrictive
+if [ "${PY_VENV_OWNER}" != "root" ]; then
+  echo ====== Virtualenv owner = $PY_VENV_OWNER =========
+  chown -R ${PY_VENV_OWNER}: ${PY_VENV_PATH_PREFIX}/python2.7
+fi

http://git-wip-us.apache.org/repos/asf/ambari/blob/69e492f2/ambari-server/src/main/resources/common-services/JNBG/0.2.0/package/files/toree_configure.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/JNBG/0.2.0/package/files/toree_configure.sh b/ambari-server/src/main/resources/common-services/JNBG/0.2.0/package/files/toree_configure.sh
new file mode 100755
index 0000000..8f4cbb3
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/JNBG/0.2.0/package/files/toree_configure.sh
@@ -0,0 +1,151 @@
+#!/usr/bin/env bash
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+set -x
+
+NBX_USER=$1
+PY_EXEC=$2
+PY_VENV_PATH_PREFIX=$3
+PY_VENV_OWNER=$4
+KINIT_CMD=$5
+SPARK_HOME=$6
+TOREE_INTERPRETERS=$7
+TOREE_OPTS=${8:-""}
+SPARK_OPTS=$9
+
+checkSuccess()
+{
+  if [ $? != 0 ]
+  then
+    set +x
+    echo "Error encountered at line $1 while attempting to: "
+    if [ -n "$2" ]
+    then
+      echo $2
+    fi
+    echo Exiting.
+    exit 1
+  fi
+  set -x
+}
+
+# /etc/pip.conf overrides all
+if [ -f /etc/pip.conf ]; then
+  PYPI_URL=$(cat  /etc/pip.conf | grep -i extra-index-url | awk '{print $3}')
+  PYPI_HOST=$(cat  /etc/pip.conf | grep -i extra-index-url | awk '{print $3}' | sed -e 's/^.*\/\///' | sed -e 's/:.*$//')
+  PYPI_PORT=$(cat  /etc/pip.conf | grep -i extra-index-url | awk '{print $3}'  | sed -e 's/^.*:*://' | sed -e 's/\/.*$//')
+else
+  # If no pip.conf then try to determine based on repo URLs in use
+  if [ -f /etc/yum.repos.d/IOP.repo ]; then
+    cat /etc/yum.repos.d/IOP.repo | grep baseurl |  grep -w http
+    httpurl=$?
+    cat /etc/yum.repos.d/IOP.repo | grep baseurl |  grep -w https
+    httpsurl=$?
+    if [ "$httpurl" -eq 0 ]; then
+      PYPI_HOST=$(cat /etc/yum.repos.d/IOP.repo | grep baseurl | sed -e 's/baseurl=http:\/\///' | cut -f1 -d"/")
+      PYPI_PORT=8080
+      PYPI_URL=http://${PYPI_HOST}:${PYPI_PORT}/simple/
+    elif [ "$httpsurl" -eq 0 ]; then
+      PYPI_HOST=$(cat /etc/yum.repos.d/IOP.repo | grep baseurl | sed -e 's/baseurl=https:\/\///' | cut -f1 -d"/")
+      PYPI_PORT=8080
+      PYPI_URL=http://${PYPI_HOST}:${PYPI_PORT}/simple/
+    fi
+  else
+    # fallback default
+    PYPI_HOST=ibm-open-platform.ibm.com
+    PYPI_PORT=8080
+    PYPI_URL=http://ibm-open-platform.ibm.com:8080/simple/
+  fi
+fi
+
+if [[ -z "${PYPI_URL}" || -z "${PYPI_HOST}" || -z "${PYPI_PORT}" ]];then
+  PYPI_HOST=ibm-open-platform.ibm.com
+  PYPI_PORT=8080
+  PYPI_URL=http://ibm-open-platform.ibm.com:8080/simple/
+fi
+
+PLATFORM=`uname -p`
+rhver=7
+
+if [ "$PY_EXEC" = "/opt/rh/python27/root/usr/bin/python" ]; then
+  rhscl=1
+else
+  rhscl=0
+fi
+
+if [ "$PLATFORM" == "x86_64" ]
+then
+  if [ -x /usr/bin/lsb_release ]; then
+    rhver=$(/usr/bin/lsb_release -rs | cut -f1 -d.)
+  fi
+
+  if [ "$rhver" -eq 6 ];then
+    if [ "$rhscl" -eq 1 ] && [ ! -f /opt/rh/python27/enable ]; then
+      echo "Configuration failed; Expected Python 2.7 from Red Hat Software Collections was not found."
+      exit 1
+    elif [ "$rhscl" -eq 1 ]; then
+      source /opt/rh/python27/enable
+    fi
+  fi
+fi
+
+pyver=`echo $(${PY_EXEC} -V 2>&1 | awk '{ print $2 }') | sed -e 's/\.//g'`
+if [ "$pyver" -lt 270 ]; then
+  echo "Configuration failed; Ensure that the specified python_interpreter_path is Python version 2.7."
+  exit 1
+fi
+
+if [ ! -d "${PY_VENV_PATH_PREFIX}/python2.7" ]; then
+  echo "Configuration failed as the virtualenv ${PY_VENV_PATH_PREFIX}/python2.7 was not found; Ensure that the installation was usccessful."
+  exit 1
+fi
+source ${PY_VENV_PATH_PREFIX}/python2.7/bin/activate
+pip -V
+
+if [ -z "${TOREE_OPTS}" ]; then
+  jupyter toree install --sys-prefix --spark_home=${SPARK_HOME} --kernel_name='Spark 2.1' --interpreters=${TOREE_INTERPRETERS} "--spark_opts=${SPARK_OPTS}"
+  checkSuccess $LINENO "-  jupyter toree install"
+else
+  jupyter toree install --sys-prefix --spark_home=${SPARK_HOME} --kernel_name='Spark 2.1' --interpreters=${TOREE_INTERPRETERS} "--toree_opts=${TOREE_OPTS}" "--spark_opts=${SPARK_OPTS}"
+  checkSuccess $LINENO "-  jupyter toree install"
+fi
+
+# Note the value of --kernel_name and --interpreters from the toree install command determines the kernel directory
+# i.e. --kernel_name='Spark 2.1' --interpreters='Scala' --> .../jupyter/kernels/spark_2.1_scala/
+kernel_dir=${PY_VENV_PATH_PREFIX}/python2.7/share/jupyter/kernels/spark_2.1_scala
+kernel_run_file=$kernel_dir/bin/run.sh
+
+# Include the end-user name for spark-submit application name (KERNEL_USERNAME env var set by nb2kg)
+sed -i "s/--name \"'Apache Toree'\"/--name \"'\${KERNEL_USERNAME:-Notebook} Scala'\"/" $kernel_run_file
+
+# Replace log file path in SPARK_OPTS
+sed -i "/eval exec/i SPARK_OPTS=\"\${SPARK_OPTS//spark-driver-USER.log/spark-driver-\${KERNEL_USERNAME:-all}.log}\"\n" $kernel_run_file
+
+# For kerberized clusters
+if [ -n "${KINIT_CMD}" ]; then
+  sed -i "/eval exec/i ${KINIT_CMD}\n" $kernel_run_file
+fi
+
+# Set ownership of the created virtualenv if configured via python_virtualenv_restrictive
+if [ "${PY_VENV_OWNER}" != "root" ]; then
+  echo ====== Virtualenv owner = $PY_VENV_OWNER =========
+  chown -R ${PY_VENV_OWNER}: ${PY_VENV_PATH_PREFIX}/python2.7
+fi

http://git-wip-us.apache.org/repos/asf/ambari/blob/69e492f2/ambari-server/src/main/resources/common-services/JNBG/0.2.0/package/files/toree_install.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/JNBG/0.2.0/package/files/toree_install.sh b/ambari-server/src/main/resources/common-services/JNBG/0.2.0/package/files/toree_install.sh
new file mode 100755
index 0000000..7967105
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/JNBG/0.2.0/package/files/toree_install.sh
@@ -0,0 +1,176 @@
+#!/usr/bin/env bash
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+set -x
+
+PY_EXEC=$1
+PY_VENV_PATH_PREFIX=$2
+PY_VENV_OWNER=$3
+KINIT_CMD=$4
+SPARK_HOME=$5
+SPARK_OPTS=$6
+
+checkPipInstall()
+{
+  pip show $1 2>&1 > /dev/null
+}
+
+checkSuccess()
+{
+  if [ $? != 0 ]
+  then
+    set +x
+    echo "Error encountered at line $1 while attempting to: "
+    if [ -n "$2" ]
+    then
+      echo $2
+    fi
+    echo Exiting.
+    exit 1
+  fi
+  set -x
+}
+
+# /etc/pip.conf overrides all
+if [ -f /etc/pip.conf ]; then
+  PYPI_URL=$(cat  /etc/pip.conf | grep -i extra-index-url | awk '{print $3}')
+  PYPI_HOST=$(cat  /etc/pip.conf | grep -i extra-index-url | awk '{print $3}' | sed -e 's/^.*\/\///' | sed -e 's/:.*$//')
+  PYPI_PORT=$(cat  /etc/pip.conf | grep -i extra-index-url | awk '{print $3}'  | sed -e 's/^.*:*://' | sed -e 's/\/.*$//')
+else
+  # If no pip.conf then try to determine based on repo URLs in use
+  if [ -f /etc/yum.repos.d/IOP.repo ]; then
+    cat /etc/yum.repos.d/IOP.repo | grep baseurl |  grep -w http
+    httpurl=$?
+    cat /etc/yum.repos.d/IOP.repo | grep baseurl |  grep -w https
+    httpsurl=$?
+    if [ "$httpurl" -eq 0 ]; then
+      PYPI_HOST=$(cat /etc/yum.repos.d/IOP.repo | grep baseurl | sed -e 's/baseurl=http:\/\///' | cut -f1 -d"/")
+      PYPI_PORT=8080
+      PYPI_URL=http://${PYPI_HOST}:${PYPI_PORT}/simple/
+    elif [ "$httpsurl" -eq 0 ]; then
+      PYPI_HOST=$(cat /etc/yum.repos.d/IOP.repo | grep baseurl | sed -e 's/baseurl=https:\/\///' | cut -f1 -d"/")
+      PYPI_PORT=8080
+      PYPI_URL=http://${PYPI_HOST}:${PYPI_PORT}/simple/
+    fi
+  else
+    # fallback default
+    PYPI_HOST=ibm-open-platform.ibm.com
+    PYPI_PORT=8080
+    PYPI_URL=http://ibm-open-platform.ibm.com:8080/simple/
+  fi
+fi
+
+if [[ -z "${PYPI_URL}" || -z "${PYPI_HOST}" || -z "${PYPI_PORT}" ]];then
+  PYPI_HOST=ibm-open-platform.ibm.com
+  PYPI_PORT=8080
+  PYPI_URL=http://ibm-open-platform.ibm.com:8080/simple/
+fi
+
+PLATFORM=`uname -p`
+rhver=7
+
+if [ "$PY_EXEC" = "/opt/rh/python27/root/usr/bin/python" ]; then
+  rhscl=1
+else
+  rhscl=0
+fi
+
+if [ "$PLATFORM" == "x86_64" ]
+then
+  if [ -x /usr/bin/lsb_release ]; then
+    rhver=$(/usr/bin/lsb_release -rs | cut -f1 -d.)
+  fi
+
+  if [ "$rhver" -eq 6 ];then
+    if [ "$rhscl" -eq 1 ] && [ ! -f /opt/rh/python27/enable ]; then
+      echo "Installation failed; Install Python 2.7 using Red Hat Software Collections and retry."
+      exit 1
+    elif [ "$rhscl" -eq 1 ]; then
+      source /opt/rh/python27/enable
+      # uninstall older pip version that accompanies SCL
+      pip uninstall -y pip
+    fi
+  fi
+fi
+
+pyver=`echo $(${PY_EXEC} -V 2>&1 | awk '{ print $2 }') | sed -e 's/\.//g'`
+if [ "$pyver" -lt 270 ]; then
+  echo "Installation failed; Ensure that the specified python_interpreter_path is Python version 2.7."
+  exit 1
+fi
+
+if [ ! -d "${PY_VENV_PATH_PREFIX}/python2.7" ]; then
+  easy_install pip
+  checkSuccess $LINENO "-  easy_install pip"
+
+  pip install --trusted-host ${PYPI_HOST} --no-cache-dir --index-url http://${PYPI_HOST}:${PYPI_PORT}/simple virtualenv --upgrade
+  checkPipInstall virtualenv
+  checkSuccess $LINENO "-  pip install virtualenv"
+
+  virtualenv -p ${PY_EXEC} ${PY_VENV_PATH_PREFIX}/python2.7
+  checkSuccess $LINENO "-  create virtualenv using ${PY_EXEC}"
+fi
+source ${PY_VENV_PATH_PREFIX}/python2.7/bin/activate
+pip -V
+
+if [ "$rhver" -eq 6 ]; then
+  if [ "$rhscl" -eq 1 ]; then
+    pip -V
+    # uninstall older pip version that accompanies virtualenv with SCL
+    pip uninstall -y pip
+    easy_install pip
+    checkPipInstall pip
+    checkSuccess $LINENO "- easy_install pip"
+  fi
+fi
+
+# Use --index-url and not --extra-index-url as we are trying to install
+# specific package versions
+pip install --trusted-host ${PYPI_HOST} --no-cache-dir --index-url http://${PYPI_HOST}:${PYPI_PORT}/simple/ setuptools --upgrade
+checkPipInstall setuptools
+checkSuccess $LINENO "- pip install setuptools"
+
+# Using --upgrade enables updating missing dependencies after failed installs
+pip install --trusted-host ${PYPI_HOST} --no-cache-dir --index-url http://${PYPI_HOST}:${PYPI_PORT}/simple/ toree --upgrade
+checkPipInstall toree
+checkSuccess $LINENO "- pip install toree"
+
+# Note the value of --kernel_name and --interpreters from the toree install command determines the kernel directory
+# i.e. --kernel_name='Spark 2.1' --interpreters='Scala' --> .../jupyter/kernels/spark_2.1_scala/
+kernel_dir=${PY_VENV_PATH_PREFIX}/python2.7/share/jupyter/kernels/spark_2.1_scala
+kernel_run_file=$kernel_dir/bin/run.sh
+
+# Include the end-user name for spark-submit application name (KERNEL_USERNAME env var set by nb2kg)
+sed -i "s/--name \"'Apache Toree'\"/--name \"'\${KERNEL_USERNAME:-Notebook} Scala'\"/" $kernel_run_file
+
+# Replace log file path in SPARK_OPTS
+sed -i "/eval exec/i SPARK_OPTS=\"\${SPARK_OPTS//spark-driver-USER.log/spark-driver-\${KERNEL_USERNAME:-all}.log}\"\n" $kernel_run_file
+
+# For kerberized clusters
+if [ -n "${KINIT_CMD}" ]; then
+  sed -i "/eval exec/i ${KINIT_CMD}\n" $kernel_run_file
+fi
+
+# Set ownership of the created virtualenv if configured via python_virtualenv_restrictive
+if [ "${PY_VENV_OWNER}" != "root" ]; then
+  echo ====== Virtualenv owner = $PY_VENV_OWNER =========
+  chown -R ${PY_VENV_OWNER}: ${PY_VENV_PATH_PREFIX}/python2.7
+fi

http://git-wip-us.apache.org/repos/asf/ambari/blob/69e492f2/ambari-server/src/main/resources/common-services/JNBG/0.2.0/package/scripts/jkg_toree.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/JNBG/0.2.0/package/scripts/jkg_toree.py b/ambari-server/src/main/resources/common-services/JNBG/0.2.0/package/scripts/jkg_toree.py
new file mode 100755
index 0000000..34bcfe1
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/JNBG/0.2.0/package/scripts/jkg_toree.py
@@ -0,0 +1,134 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+import os, errno
+from resource_management.core.exceptions import Fail
+from resource_management.core.logger import Logger
+from resource_management.core.resources.system import Execute, File, Directory
+from resource_management.core.source import StaticFile, InlineTemplate, Template
+from resource_management.libraries.script.script import Script
+from resource_management.libraries.functions.format import format
+from resource_management.libraries.functions.get_user_call_output import get_user_call_output
+from resource_management.libraries.functions.check_process_status import check_process_status
+import jnbg_helpers as helpers
+
+class GatewayKernels(Script):
+  def install(self, env):
+    import jkg_toree_params as params
+    self.install_packages(env)
+
+    # Create user and group if they don't exist
+    helpers.create_linux_user(params.user, params.group)
+
+    # Create directories used by the service and service user
+    Directory([params.home_dir, params.jkg_pid_dir, params.log_dir, params.spark_config_dir],
+              mode=0755,
+              create_parents=True,
+              owner=params.user,
+              group=params.group,
+              recursive_ownership=True
+             )
+
+    if os.path.exists(params.py_venv_pathprefix):
+      Logger.warning("Virtualenv path prefix {0} to be used for JNBG service might already exist."
+                     "This is unexpected if the service or service component is being installed on the node for the first time."
+                     "It could indicate remnants from a prior installation.".format(params.py_venv_pathprefix))
+
+    # Setup bash scripts for execution
+    for sh_script in params.sh_scripts:
+      File(params.sh_scripts_dir + os.sep + sh_script,
+           content=StaticFile(sh_script),
+           mode=0750
+          )
+    for sh_script in params.sh_scripts_user:
+      File(params.sh_scripts_dir + os.sep + sh_script,
+           content=StaticFile(sh_script),
+           mode=0755
+          )
+
+    # Run install commands for JKG defined in params
+    for command in params.jkg_commands: Execute(command, logoutput=True)
+
+    # Run install commands for Toree defined in params
+    for command in params.toree_commands: Execute(command, logoutput=True)
+
+    # Run setup commands for log4j
+    for command in params.log4j_setup_commands: Execute(command, logoutput=True)
+
+    # Note that configure is done during startup
+
+  def stop(self, env):
+    import status_params as params
+    import jkg_toree_params as jkgparams
+    env.set_params(params)
+
+    helpers.stop_process(params.jkg_pid_file, jkgparams.user, jkgparams.log_dir)
+
+  def start(self, env):
+    import os, sys, time
+    import jkg_toree_params as params
+    env.set_params(params)
+    self.configure(env)
+    delay_checks = 8
+
+    # Need HDFS started for the next step
+    helpers.create_hdfs_dirs(params.user, params.group, params.dirs)
+
+    Execute(params.start_command, user=params.user, logoutput=True)
+    check_process_status(params.jkg_pid_file)
+
+    time.sleep(delay_checks)
+
+    with open(params.jkg_pid_file, 'r') as fp:
+      try:
+        os.kill(int(fp.read().strip()), 0)
+      except OSError as ose:
+        if ose.errno != errno.EPERM:
+          raise Fail("Error starting Jupyter Kernel Gateway. Check {0} for the possible cause.".format(params.log_dir + "/jupyter_kernel_gateway.log"))
+        else:
+          # non-root install might have to resort to status check but
+          # with the side-effect that any error might only reflected during
+          # the status check after a minute rather than immediately 
+          check_process_status(params.jkg_pid_file)
+
+  def status(self, env):
+    import status_params as params
+    env.set_params(params)
+    check_process_status(params.jkg_pid_file)
+
+  def configure(self, env):
+    import jkg_toree_params as params
+    env.set_params(params)
+
+    # Create directories used by the service and service user
+    # if they were updated
+    Directory([params.home_dir, params.jkg_pid_dir, params.log_dir],
+              mode=0755,
+              create_parents=True,
+              owner=params.user,
+              group=params.group,
+              recursive_ownership=True)
+
+    # Run commands to configure Toree and PySpark
+    for command in params.toree_configure_commands: Execute(command, logoutput=True)
+    for command in params.pyspark_configure_commands: Execute(command, logoutput=True)
+
+if __name__ == "__main__":
+  GatewayKernels().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/69e492f2/ambari-server/src/main/resources/common-services/JNBG/0.2.0/package/scripts/jkg_toree_params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/JNBG/0.2.0/package/scripts/jkg_toree_params.py b/ambari-server/src/main/resources/common-services/JNBG/0.2.0/package/scripts/jkg_toree_params.py
new file mode 100755
index 0000000..13a8aba
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/JNBG/0.2.0/package/scripts/jkg_toree_params.py
@@ -0,0 +1,177 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management.libraries.script.script import Script
+from resource_management.libraries.functions.default import default
+from resource_management.libraries.functions.format import format
+from resource_management.libraries.functions import get_kinit_path
+from ambari_commons.constants import AMBARI_SUDO_BINARY
+import jnbg_helpers as helpers
+
+# Server configurations
+config = Script.get_config()
+stack_root = Script.get_stack_root()
+cluster_configs = config['clusterHostInfo']
+
+# Notebook service configs
+user = config['configurations']['jnbg-env']['notebook_user']
+group = config['configurations']['jnbg-env']['notebook_group']
+log_dir = config['configurations']['jnbg-env']['jkg_log_dir']
+jkg_pid_dir = config['configurations']['jnbg-env']['jkg_pid_dir_prefix']
+jkg_host = str(cluster_configs['kernel_gateway_hosts'][0])
+jkg_port = str(config['configurations']['jnbg-env']['jkg_port'])
+jkg_loglevel = str(config['configurations']['jnbg-env']['jkg_loglevel'])
+jkg_max_kernels = config['configurations']['jnbg-env']['max_kernels']
+jkg_cull_period = config['configurations']['jnbg-env']['cull_idle_kernel_period']
+jkg_cull_interval = config['configurations']['jnbg-env']['cull_idle_kernel_interval']
+py_executable = config['configurations']['jnbg-env']['python_interpreter_path']
+py_venv_pathprefix = config['configurations']['jnbg-env']['python_virtualenv_path_prefix']
+py_venv_restrictive = config['configurations']['jnbg-env']['python_virtualenv_restrictive']
+spark_sql_warehouse_dir = config['configurations']['jnbg-env']['spark_sql_warehouse_dir']
+pythonpath = config['configurations']['jnbg-env']['pythonpath']
+spark_home = format("{stack_root}/current/spark2-client")
+security_enabled = config['configurations']['cluster-env']['security_enabled']
+#ui_ssl_enabled = config['configurations']['jnbg-env']['jnbg.ssl']
+ui_ssl_enabled = False
+spark_opts = str(config['configurations']['jnbg-env']['kernel_spark_opts'])
+modified_spark_opts = format("{spark_opts} --conf spark.sql.warehouse.dir={spark_sql_warehouse_dir}")
+modified_spark_opts = "'{0}'".format(modified_spark_opts)
+toree_opts = str(config['configurations']['jnbg-env']['toree_opts'])
+toree_opts = "'{0}'".format(toree_opts)
+kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
+jkg_log_formatter_cmd = format("%(asctime)s,%(msecs)03d %(levelname)s %(name)s: %(message)s")
+jkg_log_formatter_cmd = "'{0}'".format(jkg_log_formatter_cmd)
+venv_owner="root" if py_venv_restrictive else user
+spark_config_dir = config['configurations']['jnbg-env']['spark_conf_dir']
+interpreters = "Scala"
+
+jnbg_kinit_cmd = ""
+if security_enabled:
+  _hostname_lowercase = config['hostname'].lower()
+  jnbg_kerberos_keytab = config['configurations']['jnbg-env']['jnbg.service.kerberos.keytab']
+  jnbg_kerberos_principal = config['configurations']['jnbg-env']['jnbg.service.kerberos.principal']
+  jnbg_kerberos_principal = jnbg_kerberos_principal.replace('_HOST',_hostname_lowercase)
+  jnbg_kinit_cmd = format("{kinit_path_local} -kt {jnbg_kerberos_keytab} {jnbg_kerberos_principal}; ")
+
+jnbg_kinit_arg = "'{0}'".format(jnbg_kinit_cmd)
+
+ambarisudo = AMBARI_SUDO_BINARY
+home_dir = format("/home/{user}")
+hdfs_home_dir = format("/user/{user}")
+jkg_pid_file = format("{jkg_pid_dir}/jupyter_kernel_gateway.pid")
+dirs = [(hdfs_home_dir, "0775"), (spark_sql_warehouse_dir, "01770")]
+package_dir = helpers.package_dir()
+sh_scripts_dir = format("{package_dir}files/")
+sh_scripts = ['jkg_install.sh',
+              'toree_install.sh',
+              'log4j_setup.sh',
+              'toree_configure.sh',
+              'pyspark_configure.sh',
+              'pythonenv_setup.sh']
+sh_scripts_user = ['jkg_start.sh']
+
+# Sequence of commands to be executed for JKG installation
+jkg_commands = []
+cmd_file_name = "jkg_install.sh"
+cmd_file_path = format("{package_dir}files/{cmd_file_name}")
+
+jkg_commands.append(ambarisudo + ' ' +
+                    cmd_file_path + ' ' +
+                    py_executable + ' ' +
+                    py_venv_pathprefix + ' ' +
+                    venv_owner + ' ' +
+                    jnbg_kinit_arg)
+
+# Sequence of commands executed for Toree installation
+toree_commands = []
+cmd_file_name = "toree_install.sh"
+cmd_file_path = format("{package_dir}files/{cmd_file_name}")
+
+toree_commands.append(ambarisudo + ' ' +
+                      cmd_file_path + ' ' +
+                      py_executable + ' ' +
+                      py_venv_pathprefix + ' ' +
+                      venv_owner + ' ' +
+                      jnbg_kinit_arg + ' ' +
+                      spark_home + ' ' +
+                      modified_spark_opts)
+
+# Sequence of commands executed for Toree configuration
+toree_configure_commands = []
+cmd_file_name = "toree_configure.sh"
+cmd_file_path = format("{package_dir}files/{cmd_file_name}")
+
+toree_configure_commands.append(ambarisudo + ' ' +
+                                cmd_file_path + ' ' +
+                                user + ' ' +
+                                py_executable + ' ' +
+                                py_venv_pathprefix + ' ' +
+                                venv_owner + ' ' +
+                                jnbg_kinit_arg + ' ' +
+                                spark_home + ' ' +
+                                interpreters + ' ' +
+                                toree_opts + ' ' +
+                                modified_spark_opts)
+
+# Sequence of commands executed for PySpark kernel configuration
+pyspark_configure_commands = []
+cmd_file_name = "pyspark_configure.sh"
+cmd_file_path = format("{package_dir}files/{cmd_file_name}")
+
+pyspark_configure_commands.append(ambarisudo + ' ' +
+                                  cmd_file_path + ' ' +
+                                  py_executable + ' ' +
+                                  py_venv_pathprefix + ' ' +
+                                  venv_owner + ' ' +
+                                  jnbg_kinit_arg + ' ' +
+                                  spark_home + ' ' +
+                                  pythonpath + ' ' +
+                                  modified_spark_opts)
+
+log4j_setup_commands = []
+cmd_file_name = "log4j_setup.sh"
+cmd_file_path = format("{package_dir}files/{cmd_file_name}")
+
+log4j_setup_commands.append(ambarisudo + ' ' +
+                            cmd_file_path + ' ' +
+                            spark_config_dir)
+
+# JKG startup command
+start_args = ['"jupyter kernelgateway' +
+              ' --ip=' + '0.0.0.0' +
+              ' --port=' + jkg_port +
+              ' --port_retries=' + '0' +
+              ' --log-level=' + jkg_loglevel +
+              ' --KernelGatewayApp.max_kernels=' + jkg_max_kernels,
+              ' --KernelGatewayApp.cull_idle_kernel_period=' + jkg_cull_period,
+              ' --KernelGatewayApp.cull_idle_kernel_interval=' + jkg_cull_interval,
+              ' --KernelSpecManager.ensure_native_kernel=' + 'False',
+              ' --KernelGatewayApp.log_format=' + jkg_log_formatter_cmd,
+              ' --JupyterWebsocketPersonality.list_kernels=' + 'True "',
+              spark_home,
+              py_executable,
+              py_venv_pathprefix,
+              jnbg_kinit_arg,
+              log_dir + "/jupyter_kernel_gateway.log",
+              jkg_pid_file]
+
+cmd_file_name = "jkg_start.sh"
+cmd_file_path = format("{package_dir}files/{cmd_file_name}")
+start_command = cmd_file_path + ' ' + ' '.join(start_args)

http://git-wip-us.apache.org/repos/asf/ambari/blob/69e492f2/ambari-server/src/main/resources/common-services/JNBG/0.2.0/package/scripts/jnbg_helpers.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/JNBG/0.2.0/package/scripts/jnbg_helpers.py b/ambari-server/src/main/resources/common-services/JNBG/0.2.0/package/scripts/jnbg_helpers.py
new file mode 100755
index 0000000..4d126e3
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/JNBG/0.2.0/package/scripts/jnbg_helpers.py
@@ -0,0 +1,81 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+import os, pwd, grp
+from resource_management.core.resources.system import Execute, File
+from resource_management.core.logger import Logger
+from resource_management.libraries.functions.format import format
+from resource_management.libraries.functions.get_user_call_output import get_user_call_output
+from resource_management.libraries.functions.show_logs import show_logs
+#from resource_management.libraries.resources.hdfs_resource import HdfsResource
+from ambari_commons.constants import AMBARI_SUDO_BINARY
+
+def package_dir():
+  return os.path.realpath(__file__).split('/package')[0] + '/package/'
+
+def create_linux_user(user, group):
+  sudo = AMBARI_SUDO_BINARY
+
+  try: pwd.getpwnam(user)
+  except KeyError: Execute(format("{sudo} useradd ") + user, logoutput=True)
+  try: grp.getgrnam(group)
+  except KeyError: Execute(format("{sudo} groupadd ") + group, logoutput=True)
+
+def create_hdfs_dirs(user, group, dirs):
+  import jnbg_params as params
+  for dir, perms in dirs:
+    params.HdfsResource(dir,
+                        type = "directory",
+                        action = "create_on_execute",
+                        owner = user,
+                        group = group,
+                        mode = int(perms, 8)
+                       )
+  params.HdfsResource(None, action="execute")
+ 
+def stop_process(pid_file, user, log_dir):
+  """
+  Kill the process by pid file, then check the process is running or not.
+  If the process is still running after the kill command, try to kill
+  with -9 option (hard kill)
+  """
+
+  sudo = AMBARI_SUDO_BINARY
+  pid = get_user_call_output(format("cat {pid_file}"), user=user, is_checked_call=False)[1]
+  process_id_exists_command = format("ls {pid_file} >/dev/null 2>&1 && ps -p {pid} >/dev/null 2>&1")
+
+  kill_cmd = format("{sudo} kill {pid}")
+  Execute(kill_cmd, not_if=format("! ({process_id_exists_command})"))
+
+  wait_time = 5
+  hard_kill_cmd = format("{sudo} kill -9 {pid}")
+  Execute(hard_kill_cmd,
+          not_if=format("! ({process_id_exists_command}) || ( sleep {wait_time} && ! ({process_id_exists_command}) )"),
+          ignore_failures = True)
+
+  try:
+    Execute(format("! ({process_id_exists_command})"),
+            tries=20,
+            try_sleep=3)
+  except:
+    show_logs(log_dir, user)
+    raise
+
+  File(pid_file, action="delete")

http://git-wip-us.apache.org/repos/asf/ambari/blob/69e492f2/ambari-server/src/main/resources/common-services/JNBG/0.2.0/package/scripts/jnbg_params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/JNBG/0.2.0/package/scripts/jnbg_params.py b/ambari-server/src/main/resources/common-services/JNBG/0.2.0/package/scripts/jnbg_params.py
new file mode 100755
index 0000000..82660ab
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/JNBG/0.2.0/package/scripts/jnbg_params.py
@@ -0,0 +1,66 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management.libraries.resources.hdfs_resource import HdfsResource
+from resource_management.libraries.functions.default import default
+from resource_management.libraries.functions.get_not_managed_resources import get_not_managed_resources
+from resource_management.libraries.functions import conf_select
+from resource_management.libraries.functions import stack_select
+from resource_management.libraries.functions import get_kinit_path
+from resource_management.libraries.script.script import Script
+
+import functools
+
+#for create_hdfs_directory
+
+# server configurations
+config = Script.get_config()
+
+hadoop_bin_dir = stack_select.get_hadoop_dir("bin")
+hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
+
+security_enabled = config['configurations']['cluster-env']['security_enabled']
+kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
+hostname = config["hostname"]
+hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
+hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
+hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
+hdfs_site = config['configurations']['hdfs-site']
+default_fs = config['configurations']['core-site']['fs.defaultFS']
+dfs_type = default("/commandParams/dfs_type", "")
+
+# create partial functions with common arguments for every HdfsResource call
+# to create hdfs directory we need to import this and call HdfsResource in code
+
+HdfsResource = functools.partial(
+ HdfsResource,
+  hdfs_resource_ignore_file = "/var/lib/ambari-agent/data/.hdfs_resource_ignore",
+  security_enabled = security_enabled,
+  user = hdfs_user,
+  keytab = hdfs_user_keytab,
+  kinit_path_local = kinit_path_local,
+  hadoop_bin_dir = hadoop_bin_dir,
+  hadoop_conf_dir = hadoop_conf_dir,
+  principal_name = hdfs_principal_name,
+  hdfs_site = hdfs_site,
+  default_fs = default_fs,
+  immutable_paths = get_not_managed_resources(),
+  dfs_type = dfs_type
+ )

http://git-wip-us.apache.org/repos/asf/ambari/blob/69e492f2/ambari-server/src/main/resources/common-services/JNBG/0.2.0/package/scripts/py_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/JNBG/0.2.0/package/scripts/py_client.py b/ambari-server/src/main/resources/common-services/JNBG/0.2.0/package/scripts/py_client.py
new file mode 100755
index 0000000..094edde
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/JNBG/0.2.0/package/scripts/py_client.py
@@ -0,0 +1,63 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+import os
+from resource_management.core.resources.system import Execute, File, Directory
+from resource_management.core.source import StaticFile, InlineTemplate, Template
+from resource_management.core.resources.system import Execute
+from resource_management.libraries.script.script import Script
+from resource_management.libraries.functions.format import format
+import jnbg_helpers as helpers
+
+class PyClient(Script):
+  def install(self, env):
+    import py_client_params as params
+    from jkg_toree_params import user, group, sh_scripts_dir, sh_scripts, sh_scripts_user
+
+    # Setup bash scripts for execution
+    for sh_script in sh_scripts:
+      File(sh_scripts_dir + os.sep + sh_script,
+           content=StaticFile(sh_script),
+           mode=0750
+          )
+    for sh_script in sh_scripts_user:
+      File(sh_scripts_dir + os.sep + sh_script,
+           content=StaticFile(sh_script),
+           mode=0755
+          )
+
+    self.install_packages(env)
+    self.configure(env)
+
+    # Create user and group if they don't exist
+    helpers.create_linux_user(user, group)
+
+    # Run install commands for Python client defined in params
+    for command in params.commands: Execute(command, logoutput=True)
+
+  def status(self, env):
+    raise ClientComponentHasNoStatus()
+
+  def configure(self, env):
+    import py_client_params as params
+    env.set_params(params)
+
+if __name__ == "__main__":
+  PyClient().execute()