You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by al...@apache.org on 2017/06/28 00:24:16 UTC

[18/51] [partial] ambari git commit: AMBARI-21349. Create BigInsights Stack Skeleton in Ambari 2.5 (alejandro)

http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SPARK/package/scripts/spark.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SPARK/package/scripts/spark.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SPARK/package/scripts/spark.py
new file mode 100755
index 0000000..86c738f
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SPARK/package/scripts/spark.py
@@ -0,0 +1,351 @@
+#!/usr/bin/python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+import sys
+import fileinput
+import shutil
+import os
+from resource_management import *
+from resource_management.core.exceptions import ComponentIsNotRunning
+from resource_management.core.logger import Logger
+from resource_management.core import shell
+
+
+def spark(env):
+  import params
+
+  env.set_params(params)
+
+  Directory(params.spark_conf,
+            owner=params.spark_user,
+            create_parents=True,
+            group=params.user_group
+  )
+
+  Directory([params.spark_pid_dir, params.spark_log_dir],
+            owner=params.spark_user,
+            group=params.user_group,
+            mode=0775,
+            create_parents=True
+  )
+  if type == 'server':
+    if action == 'start' or action == 'config':
+      params.HdfsResource(params.spark_hdfs_user_dir,
+                         type="directory",
+                         action="create_on_execute",
+                         owner=params.spark_user,
+                         mode=0775
+      )
+      params.HdfsResource(None, action="execute")
+
+  #file_path = params.spark_conf + '/spark-defaults.conf'
+  #create_file(file_path)
+
+  #write_properties_to_file(file_path, params.config['configurations']['spark-defaults'])
+  configFile("spark-defaults.conf", template_name="spark-defaults.conf.j2")
+
+  # create spark-env.sh in conf dir
+  File(os.path.join(params.spark_conf, 'spark-env.sh'),
+       owner=params.spark_user,
+       group=params.user_group,
+       content=InlineTemplate(params.spark_env_sh)
+  )
+
+  # create log4j.properties in conf dir
+  File(os.path.join(params.spark_conf, 'spark-log4j.properties'),
+       owner=params.spark_user,
+       group=params.user_group,
+       content=InlineTemplate(params.spark_log4j)
+  )
+
+  #create metrics.properties in conf dir
+#  File(os.path.join(params.spark_conf, 'metrics.properties'),
+#       owner=params.spark_user,
+#       group=params.spark_group,
+#       content=InlineTemplate(params.spark_metrics_properties)
+#  )
+
+  # create java-opts in etc/spark/conf dir for iop.version
+  File(os.path.join(params.spark_conf, 'java-opts'),
+       owner=params.spark_user,
+       group=params.user_group,
+       content=params.spark_javaopts_properties
+  )
+
+  if params.is_hive_installed:
+    hive_config = get_hive_config()
+    XmlConfig("hive-site.xml",
+              conf_dir=params.spark_conf,
+              configurations=hive_config,
+              owner=params.spark_user,
+              group=params.user_group,
+              mode=0640)
+def get_hive_config():
+  import params
+  # MUST CONVERT BOOLEANS TO LOWERCASE STRINGS
+  hive_conf_dict = dict()
+  hive_conf_dict['hive.metastore.uris'] = params.config['configurations']['hive-site']['hive.metastore.uris']
+  hive_conf_dict['ambari.hive.db.schema.name'] = params.config['configurations']['hive-site']['ambari.hive.db.schema.name']
+  hive_conf_dict['datanucleus.cache.level2.type'] = params.config['configurations']['hive-site']['datanucleus.cache.level2.type']
+  hive_conf_dict['fs.file.impl.disable.cache'] = str(params.config['configurations']['hive-site']['fs.file.impl.disable.cache']).lower()
+  hive_conf_dict['fs.hdfs.impl.disable.cache'] = str(params.config['configurations']['hive-site']['fs.hdfs.impl.disable.cache']).lower()
+  hive_conf_dict['hive.auto.convert.join'] = str(params.config['configurations']['hive-site']['hive.auto.convert.join']).lower()
+  hive_conf_dict['hive.auto.convert.join.noconditionaltask'] = str(params.config['configurations']['hive-site']['hive.auto.convert.join.noconditionaltask']).lower()
+  hive_conf_dict['hive.auto.convert.join.noconditionaltask.size'] = params.config['configurations']['hive-site']['hive.auto.convert.join.noconditionaltask.size']
+  hive_conf_dict['hive.auto.convert.sortmerge.join'] = str(params.config['configurations']['hive-site']['hive.auto.convert.sortmerge.join']).lower()
+  #hive_conf_dict['hive.auto.convert.sortmerge.join.noconditionaltask'] = str(params.config['configurations']['hive-site']['hive.auto.convert.sortmerge.join.noconditionaltask']).lower()
+  hive_conf_dict['hive.auto.convert.sortmerge.join.to.mapjoin'] = str(params.config['configurations']['hive-site']['hive.auto.convert.sortmerge.join.to.mapjoin']).lower()
+  hive_conf_dict['hive.cbo.enable'] = str(params.config['configurations']['hive-site']['hive.cbo.enable']).lower()
+  hive_conf_dict['hive.cli.print.header'] = str(params.config['configurations']['hive-site']['hive.cli.print.header']).lower()
+  hive_conf_dict['hive.cluster.delegation.token.store.class'] = params.config['configurations']['hive-site']['hive.cluster.delegation.token.store.class']
+  hive_conf_dict['hive.cluster.delegation.token.store.zookeeper.connectString'] = params.config['configurations']['hive-site']['hive.cluster.delegation.token.store.zookeeper.connectString']
+  hive_conf_dict['hive.cluster.delegation.token.store.zookeeper.znode'] = params.config['configurations']['hive-site']['hive.cluster.delegation.token.store.zookeeper.znode']
+  hive_conf_dict['hive.compactor.abortedtxn.threshold'] = params.config['configurations']['hive-site']['hive.compactor.abortedtxn.threshold']
+  hive_conf_dict['hive.compactor.check.interval'] = params.config['configurations']['hive-site']['hive.compactor.check.interval']
+  hive_conf_dict['hive.compactor.delta.num.threshold'] = params.config['configurations']['hive-site']['hive.compactor.delta.num.threshold']
+  hive_conf_dict['hive.compactor.delta.pct.threshold'] = params.config['configurations']['hive-site']['hive.compactor.delta.pct.threshold']
+  hive_conf_dict['hive.compactor.initiator.on'] = str(params.config['configurations']['hive-site']['hive.compactor.initiator.on']).lower()
+  hive_conf_dict['hive.compactor.worker.threads'] = params.config['configurations']['hive-site']['hive.compactor.worker.threads']
+  hive_conf_dict['hive.compactor.worker.timeout'] = params.config['configurations']['hive-site']['hive.compactor.worker.timeout']
+  hive_conf_dict['hive.compute.query.using.stats'] = str(params.config['configurations']['hive-site']['hive.compute.query.using.stats']).lower()
+  hive_conf_dict['hive.conf.restricted.list'] = params.config['configurations']['hive-site']['hive.conf.restricted.list']
+  hive_conf_dict['hive.enforce.bucketing'] = str(params.config['configurations']['hive-site']['hive.enforce.bucketing']).lower()
+  hive_conf_dict['hive.enforce.sorting'] = str(params.config['configurations']['hive-site']['hive.enforce.sorting']).lower()
+  hive_conf_dict['hive.enforce.sortmergebucketmapjoin'] = str(params.config['configurations']['hive-site']['hive.enforce.sortmergebucketmapjoin']).lower()
+  hive_conf_dict['hive.exec.compress.intermediate'] = str(params.config['configurations']['hive-site']['hive.exec.compress.intermediate']).lower()
+  hive_conf_dict['hive.exec.compress.output'] = str(params.config['configurations']['hive-site']['hive.exec.compress.output']).lower()
+  hive_conf_dict['hive.exec.dynamic.partition'] = str(params.config['configurations']['hive-site']['hive.exec.dynamic.partition']).lower()
+  hive_conf_dict['hive.exec.dynamic.partition.mode'] = params.config['configurations']['hive-site']['hive.exec.dynamic.partition.mode']
+  hive_conf_dict['hive.exec.max.created.files'] = params.config['configurations']['hive-site']['hive.exec.max.created.files']
+  hive_conf_dict['hive.exec.max.dynamic.partitions'] = params.config['configurations']['hive-site']['hive.exec.max.dynamic.partitions']
+  hive_conf_dict['hive.exec.max.dynamic.partitions.pernode'] = params.config['configurations']['hive-site']['hive.exec.max.dynamic.partitions.pernode']
+  hive_conf_dict['hive.exec.orc.compression.strategy'] = params.config['configurations']['hive-site']['hive.exec.orc.compression.strategy']
+  hive_conf_dict['hive.exec.orc.default.compress'] = params.config['configurations']['hive-site']['hive.exec.orc.default.compress']
+  hive_conf_dict['hive.exec.orc.default.stripe.size'] = params.config['configurations']['hive-site']['hive.exec.orc.default.stripe.size']
+  hive_conf_dict['hive.exec.parallel'] = str(params.config['configurations']['hive-site']['hive.exec.parallel']).lower()
+  hive_conf_dict['hive.exec.parallel.thread.number'] = params.config['configurations']['hive-site']['hive.exec.parallel.thread.number']
+  hive_conf_dict['hive.exec.reducers.bytes.per.reducer'] = params.config['configurations']['hive-site']['hive.exec.reducers.bytes.per.reducer']
+  hive_conf_dict['hive.exec.reducers.max'] = params.config['configurations']['hive-site']['hive.exec.reducers.max']
+  hive_conf_dict['hive.exec.scratchdir'] = params.config['configurations']['hive-site']['hive.exec.scratchdir']
+  hive_conf_dict['hive.exec.submit.local.task.via.child'] = str(params.config['configurations']['hive-site']['hive.exec.submit.local.task.via.child']).lower()
+  hive_conf_dict['hive.exec.submitviachild'] = str(params.config['configurations']['hive-site']['hive.exec.submitviachild']).lower()
+  hive_conf_dict['hive.execution.engine'] = params.config['configurations']['hive-site']['hive.execution.engine']
+  hive_conf_dict['hive.fetch.task.aggr'] = str(params.config['configurations']['hive-site']['hive.fetch.task.aggr']).lower()
+  hive_conf_dict['hive.fetch.task.conversion'] = params.config['configurations']['hive-site']['hive.fetch.task.conversion']
+  hive_conf_dict['hive.fetch.task.conversion.threshold'] = params.config['configurations']['hive-site']['hive.fetch.task.conversion.threshold']
+  hive_conf_dict['hive.heapsize'] = params.config['configurations']['hive-env']['hive.heapsize']
+  hive_conf_dict['hive.limit.optimize.enable'] = str(params.config['configurations']['hive-site']['hive.limit.optimize.enable']).lower()
+  hive_conf_dict['hive.limit.pushdown.memory.usage'] = params.config['configurations']['hive-site']['hive.limit.pushdown.memory.usage']
+  hive_conf_dict['hive.metastore.authorization.storage.checks'] = str(params.config['configurations']['hive-site']['hive.metastore.authorization.storage.checks']).lower()
+  hive_conf_dict['hive.metastore.cache.pinobjtypes'] = params.config['configurations']['hive-site']['hive.metastore.cache.pinobjtypes']
+  # The following two parameters are hardcoded to workaround an issue
+  # interpreting the syntax for the property values (unit like s for seconds
+  # result in an error)
+  #hive_conf_dict['hive.metastore.client.connect.retry.delay'] = params.config['configurations']['hive-site']['hive.metastore.client.connect.retry.delay']
+  #hive_conf_dict['hive.metastore.client.socket.timeout'] = params.config['configurations']['hive-site']['hive.metastore.client.socket.timeout']
+  hive_conf_dict['hive.metastore.client.connect.retry.delay'] = '5'
+  hive_conf_dict['hive.metastore.client.socket.timeout'] = '1800'
+  hive_conf_dict['hive.metastore.connect.retries'] = params.config['configurations']['hive-site']['hive.metastore.connect.retries']
+  hive_conf_dict['hive.metastore.execute.setugi'] = str(params.config['configurations']['hive-site']['hive.metastore.execute.setugi']).lower()
+  hive_conf_dict['hive.metastore.failure.retries'] = params.config['configurations']['hive-site']['hive.metastore.failure.retries']
+  hive_conf_dict['hive.metastore.kerberos.keytab.file'] = params.config['configurations']['hive-site']['hive.metastore.kerberos.keytab.file']
+  hive_conf_dict['hive.metastore.kerberos.principal'] = params.config['configurations']['hive-site']['hive.metastore.kerberos.principal']
+  hive_conf_dict['hive.metastore.pre.event.listeners'] = params.config['configurations']['hive-site']['hive.metastore.pre.event.listeners']
+  hive_conf_dict['hive.metastore.sasl.enabled'] = str(params.config['configurations']['hive-site']['hive.metastore.sasl.enabled']).lower()
+  hive_conf_dict['hive.metastore.server.max.threads'] = params.config['configurations']['hive-site']['hive.metastore.server.max.threads']
+  hive_conf_dict['hive.metastore.warehouse.dir'] = params.config['configurations']['hive-site']['hive.metastore.warehouse.dir']
+  hive_conf_dict['hive.orc.compute.splits.num.threads'] = params.config['configurations']['hive-site']['hive.orc.compute.splits.num.threads']
+  hive_conf_dict['hive.orc.splits.include.file.footer'] = str(params.config['configurations']['hive-site']['hive.orc.splits.include.file.footer']).lower()
+  hive_conf_dict['hive.security.authenticator.manager'] = params.config['configurations']['hive-site']['hive.security.authenticator.manager']
+  hive_conf_dict['hive.security.metastore.authenticator.manager'] = params.config['configurations']['hive-site']['hive.security.metastore.authenticator.manager']
+  hive_conf_dict['hive.security.metastore.authorization.auth.reads'] = str(params.config['configurations']['hive-site']['hive.security.metastore.authorization.auth.reads']).lower()
+  hive_conf_dict['hive.security.metastore.authorization.manager'] = params.config['configurations']['hive-site']['hive.security.metastore.authorization.manager']
+  hive_conf_dict['hive.server2.allow.user.substitution'] = str(params.config['configurations']['hive-site']['hive.server2.allow.user.substitution']).lower()
+  hive_conf_dict['hive.server2.logging.operation.enabled'] = str(params.config['configurations']['hive-site']['hive.server2.logging.operation.enabled']).lower()
+  hive_conf_dict['hive.server2.logging.operation.log.location'] = params.config['configurations']['hive-site']['hive.server2.logging.operation.log.location']
+  hive_conf_dict['hive.server2.support.dynamic.service.discovery'] = str(params.config['configurations']['hive-site']['hive.server2.support.dynamic.service.discovery']).lower()
+  hive_conf_dict['hive.server2.table.type.mapping'] = params.config['configurations']['hive-site']['hive.server2.table.type.mapping']
+  hive_conf_dict['hive.server2.thrift.http.path'] = params.config['configurations']['hive-site']['hive.server2.thrift.http.path']
+  hive_conf_dict['hive.server2.thrift.max.worker.threads'] = params.config['configurations']['hive-site']['hive.server2.thrift.max.worker.threads']
+  hive_conf_dict['hive.server2.thrift.port'] = params.spark_thriftserver_port
+  hive_conf_dict['hive.server2.thrift.sasl.qop'] = params.config['configurations']['hive-site']['hive.server2.thrift.sasl.qop']
+  hive_conf_dict['hive.server2.transport.mode'] = params.config['configurations']['hive-site']['hive.server2.transport.mode']
+  hive_conf_dict['hive.server2.use.SSL'] = str(params.config['configurations']['hive-site']['hive.server2.use.SSL']).lower()
+  hive_conf_dict['hive.server2.zookeeper.namespace'] = params.config['configurations']['hive-site']['hive.server2.zookeeper.namespace']
+  hive_conf_dict['hive.smbjoin.cache.rows'] = params.config['configurations']['hive-site']['hive.smbjoin.cache.rows']
+  hive_conf_dict['hive.stats.autogather'] = str(params.config['configurations']['hive-site']['hive.stats.autogather']).lower()
+  hive_conf_dict['hive.stats.dbclass'] = params.config['configurations']['hive-site']['hive.stats.dbclass']
+  hive_conf_dict['hive.stats.fetch.column.stats'] = params.config['configurations']['hive-site']['hive.stats.fetch.column.stats']
+  hive_conf_dict['hive.stats.fetch.partition.stats'] = str(params.config['configurations']['hive-site']['hive.stats.fetch.partition.stats']).lower()
+  hive_conf_dict['hive.support.concurrency'] = str(params.config['configurations']['hive-site']['hive.support.concurrency']).lower()
+  hive_conf_dict['hive.txn.manager'] = params.config['configurations']['hive-site']['hive.txn.manager']
+  hive_conf_dict['hive.txn.max.open.batch'] = params.config['configurations']['hive-site']['hive.txn.max.open.batch']
+  hive_conf_dict['hive.txn.timeout'] = params.config['configurations']['hive-site']['hive.txn.timeout']
+  hive_conf_dict['hive.vectorized.execution.enabled'] = str(params.config['configurations']['hive-site']['hive.vectorized.execution.enabled']).lower()
+  hive_conf_dict['hive.vectorized.execution.reduce.enabled'] = str(params.config['configurations']['hive-site']['hive.vectorized.execution.reduce.enabled']).lower()
+  hive_conf_dict['hive.vectorized.groupby.checkinterval'] = params.config['configurations']['hive-site']['hive.vectorized.groupby.checkinterval']
+  hive_conf_dict['hive.vectorized.groupby.flush.percent'] = params.config['configurations']['hive-site']['hive.vectorized.groupby.flush.percent']
+  hive_conf_dict['hive.vectorized.groupby.maxentries'] = params.config['configurations']['hive-site']['hive.vectorized.groupby.maxentries']
+  hive_conf_dict['hive.zookeeper.client.port'] = params.config['configurations']['hive-site']['hive.zookeeper.client.port']
+  hive_conf_dict['hive.zookeeper.namespace'] = params.config['configurations']['hive-site']['hive.zookeeper.namespace']
+  hive_conf_dict['hive.zookeeper.quorum'] = params.config['configurations']['hive-site']['hive.zookeeper.quorum']
+  hive_conf_dict['javax.jdo.option.ConnectionDriverName'] = params.config['configurations']['hive-site']['javax.jdo.option.ConnectionDriverName']
+  hive_conf_dict['javax.jdo.option.ConnectionURL'] = params.config['configurations']['hive-site']['javax.jdo.option.ConnectionURL']
+  hive_conf_dict['javax.jdo.option.ConnectionUserName'] = params.config['configurations']['hive-site']['javax.jdo.option.ConnectionUserName']
+
+# Comment out parameters not found in configurations dictionary
+# hive_conf_dict['hive.prewarm.enabled'] = params.config['configurations']['hive-site']['hive.prewarm.enabled']
+# hive_conf_dict['hive.prewarm.numcontainers'] = params.config['configurations']['hive-site']['hive.prewarm.numcontainers']
+#  hive_conf_dict['hive.user.install.directory'] = params.config['configurations']['hive-site']['hive.user.install.directory']
+
+  if params.security_enabled:
+    hive_conf_dict['hive.metastore.sasl.enabled'] =  str(params.config['configurations']['hive-site']['hive.metastore.sasl.enabled']).lower()
+    hive_conf_dict['hive.metastore.kerberos.keytab.file'] = params.config['configurations']['hive-site']['hive.metastore.kerberos.keytab.file']
+    hive_conf_dict['hive.server2.authentication.spnego.principal'] =  params.config['configurations']['hive-site']['hive.server2.authentication.spnego.principal']
+    hive_conf_dict['hive.server2.authentication.spnego.keytab'] = params.config['configurations']['hive-site']['hive.server2.authentication.spnego.keytab']
+    hive_conf_dict['hive.metastore.kerberos.principal'] = params.config['configurations']['hive-site']['hive.metastore.kerberos.principal']
+    hive_conf_dict['hive.server2.authentication.kerberos.principal'] = params.config['configurations']['hive-site']['hive.server2.authentication.kerberos.principal']
+    hive_conf_dict['hive.server2.authentication.kerberos.keytab'] =  params.config['configurations']['hive-site']['hive.server2.authentication.kerberos.keytab']
+    hive_conf_dict['hive.security.authorization.enabled']=  str(params.config['configurations']['hive-site']['hive.security.authorization.enabled']).lower()
+    hive_conf_dict['hive.server2.enable.doAs'] =  str(params.config['configurations']['hive-site']['hive.server2.enable.doAs']).lower()
+
+  if hive_conf_dict['hive.server2.use.SSL']:
+    if params.config['configurations']['hive-site']['hive.server2.keystore.path'] is not None:
+      hive_conf_dict['hive.server2.keystore.path']= str(params.config['configurations']['hive-site']['hive.server2.keystore.path'])
+    if params.config['configurations']['hive-site']['hive.server2.keystore.password'] is not None:
+      hive_conf_dict['hive.server2.keystore.password']= str(params.config['configurations']['hive-site']['hive.server2.keystore.password'])
+
+  # convert remaining numbers to strings
+  for key, value in hive_conf_dict.iteritems():
+    hive_conf_dict[key] = str(value)
+
+  return hive_conf_dict
+
+
+def spark_properties(params):
+  spark_dict = dict()
+
+  all_spark_config  = params.config['configurations']['spark-defaults']
+  #Add all configs unfiltered first to handle Custom case.
+  spark_dict = all_spark_config.copy()
+
+  spark_dict['spark.yarn.executor.memoryOverhead'] = params.spark_yarn_executor_memoryOverhead
+  spark_dict['spark.yarn.driver.memoryOverhead'] = params.spark_yarn_driver_memoryOverhead
+  spark_dict['spark.yarn.applicationMaster.waitTries'] = params.spark_yarn_applicationMaster_waitTries
+  spark_dict['spark.yarn.scheduler.heartbeat.interval-ms'] = params.spark_yarn_scheduler_heartbeat_interval
+  spark_dict['spark.yarn.max_executor.failures'] = params.spark_yarn_max_executor_failures
+  spark_dict['spark.yarn.queue'] = params.spark_yarn_queue
+  spark_dict['spark.yarn.containerLauncherMaxThreads'] = params.spark_yarn_containerLauncherMaxThreads
+  spark_dict['spark.yarn.submit.file.replication'] = params.spark_yarn_submit_file_replication
+  spark_dict['spark.yarn.preserve.staging.files'] = params.spark_yarn_preserve_staging_files
+
+  # Hardcoded paramaters to be added to spark-defaults.conf
+  spark_dict['spark.yarn.historyServer.address'] = params.spark_history_server_host + ':' + str(
+    params.spark_history_ui_port)
+  spark_dict['spark.history.ui.port'] = params.spark_history_ui_port
+  spark_dict['spark.eventLog.enabled'] = str(params.spark_eventlog_enabled).lower()
+  spark_dict['spark.eventLog.dir'] = params.spark_eventlog_dir
+  spark_dict['spark.history.fs.logDirectory'] = params.spark_eventlog_dir
+  spark_dict['spark.yarn.jar'] = params.spark_yarn_jar
+
+  spark_dict['spark.driver.extraJavaOptions'] = params.spark_driver_extraJavaOptions
+  spark_dict['spark.yarn.am.extraJavaOptions'] = params.spark_yarn_am_extraJavaOptions
+
+  # convert remaining numbers to strings
+  for key, value in spark_dict.iteritems():
+    spark_dict[key] = str(value)
+
+  return spark_dict
+
+
+def write_properties_to_file(file_path, value):
+  for key in value:
+    modify_config(file_path, key, value[key])
+
+
+def modify_config(filepath, variable, setting):
+  var_found = False
+  already_set = False
+  V = str(variable)
+  S = str(setting)
+
+  if ' ' in S:
+    S = '%s' % S
+
+  for line in fileinput.input(filepath, inplace=1):
+    if not line.lstrip(' ').startswith('#') and '=' in line:
+      _infile_var = str(line.split('=')[0].rstrip(' '))
+      _infile_set = str(line.split('=')[1].lstrip(' ').rstrip())
+      if var_found == False and _infile_var.rstrip(' ') == V:
+        var_found = True
+        if _infile_set.lstrip(' ') == S:
+          already_set = True
+        else:
+          line = "%s %s\n" % (V, S)
+
+    sys.stdout.write(line)
+
+  if not var_found:
+    with open(filepath, "a") as f:
+      f.write("%s \t %s\n" % (V, S))
+  elif already_set == True:
+    pass
+  else:
+    pass
+
+  return
+
+
+def create_file(file_path):
+  try:
+    file = open(file_path, 'w')
+    file.close()
+  except:
+    print('Unable to create file: ' + file_path)
+    sys.exit(0)
+
+def configFile(name, template_name=None):
+  import params
+
+  File(format("{spark_conf}/{name}"),
+       content=Template(template_name),
+       owner=params.spark_user,
+       group=params.user_group
+  )
+
+def get_iop_version():
+  try:
+    command = 'iop-select status hadoop-client'
+    return_code, iop_output = shell.call(command, timeout=20)
+  except Exception, e:
+    Logger.error(str(e))
+    raise Fail('Unable to execute iop-select command to retrieve the version.')
+
+  if return_code != 0:
+    raise Fail(
+      'Unable to determine the current version because of a non-zero return code of {0}'.format(str(return_code)))
+
+  iop_version = re.sub('hadoop-client - ', '', iop_output)
+  match = re.match('[0-9]+.[0-9]+.[0-9]+.[0-9]+', iop_version)
+
+  if match is None:
+    raise Fail('Failed to get extracted version')
+
+  return iop_version

http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SPARK/package/scripts/spark_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SPARK/package/scripts/spark_client.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SPARK/package/scripts/spark_client.py
new file mode 100755
index 0000000..2345c63
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SPARK/package/scripts/spark_client.py
@@ -0,0 +1,61 @@
+#!/usr/bin/python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+import sys
+from resource_management import *
+from resource_management.libraries.functions import conf_select
+from resource_management.libraries.functions import stack_select
+#from resource_management.libraries.functions.version import compare_versions, format_stack_version
+from resource_management.core.exceptions import ComponentIsNotRunning
+from resource_management.core.logger import Logger
+from resource_management.core import shell
+from spark import spark
+
+
+class SparkClient(Script):
+
+  def get_component_name(self):
+    return "spark-client"
+
+  def pre_upgrade_restart(self, env, upgrade_type=None):
+    import params
+
+    env.set_params(params)
+    if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
+      conf_select.select(params.stack_name, "spark", params.version)
+      stack_select.select("spark-client", params.version)
+      #Execute(format("stack-select set spark-client {version}"))
+
+  def install(self, env):
+    self.install_packages(env)
+    self.configure(env)
+
+  def configure(self, env):
+    import params
+
+    env.set_params(params)
+    spark(env)
+
+  def status(self, env):
+    raise ClientComponentHasNoStatus()
+
+
+if __name__ == "__main__":
+  SparkClient().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SPARK/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SPARK/package/scripts/status_params.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SPARK/package/scripts/status_params.py
new file mode 100755
index 0000000..de8c2d4
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SPARK/package/scripts/status_params.py
@@ -0,0 +1,41 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management import *
+
+config = Script.get_config()
+
+spark_user = config['configurations']['spark-env']['spark_user']
+spark_group = config['configurations']['spark-env']['spark_group']
+user_group = config['configurations']['cluster-env']['user_group']
+
+if 'hive-env' in config['configurations']:
+    hive_user = config['configurations']['hive-env']['hive_user']
+else:
+    hive_user = "hive"
+
+security_enabled = config['configurations']['cluster-env']['security_enabled']
+
+spark_pid_dir = config['configurations']['spark-env']['spark_pid_dir']
+spark_history_server_pid_file = format("{spark_pid_dir}/spark-{spark_user}-org.apache.spark.deploy.history.HistoryServer-1.pid")
+if security_enabled:
+    spark_thrift_server_pid_file = format("{spark_pid_dir}/spark-{hive_user}-org.apache.spark.sql.hive.thriftserver.HiveThriftServer2-1.pid")
+else:
+    spark_thrift_server_pid_file = format("{spark_pid_dir}/spark-{spark_user}-org.apache.spark.sql.hive.thriftserver.HiveThriftServer2-1.pid")

http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SPARK/package/scripts/thrift_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SPARK/package/scripts/thrift_server.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SPARK/package/scripts/thrift_server.py
new file mode 100755
index 0000000..39e15d3
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SPARK/package/scripts/thrift_server.py
@@ -0,0 +1,125 @@
+#!/usr/bin/python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+import sys
+import socket
+import os
+from resource_management import *
+from resource_management.libraries.functions import stack_select
+from resource_management.libraries.functions import conf_select
+from resource_management.core.exceptions import ComponentIsNotRunning
+from resource_management.core.logger import Logger
+from resource_management.core import shell
+from resource_management.libraries.functions import Direction
+from spark import *
+
+
+class ThriftServer(Script):
+
+  def get_component_name(self):
+    return "spark-thriftserver"
+
+
+  def pre_upgrade_restart(self, env, upgrade_type=None):
+    import params
+
+    env.set_params(params)
+    if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
+      conf_select.select(params.stack_name, "spark", params.version)
+      stack_select.select("spark-thriftserver", params.version)
+
+  def install(self, env):
+    self.install_packages(env)
+    import params
+    env.set_params(params)
+    self.configure(env)
+
+  def stop(self, env, upgrade_type=None):
+    import params
+
+    env.set_params(params)
+    self.configure(env)
+    daemon_cmd = format('{spark_thrift_server_stop}')
+    if params.security_enabled:
+      Execute(daemon_cmd,
+              user=params.hive_user,
+              environment={'JAVA_HOME': params.java_home}
+              )
+    else:
+      Execute(daemon_cmd,
+              user=params.spark_user,
+              environment={'JAVA_HOME': params.java_home}
+      )
+    if os.path.isfile(params.spark_thrift_server_pid_file):
+      os.remove(params.spark_thrift_server_pid_file)
+
+
+  def start(self, env, upgrade_type=None):
+    import params
+
+    env.set_params(params)
+    # TODO this looks wrong, maybe just call spark(env)
+    self.configure(env)
+
+    if params.security_enabled:
+      hive_kerberos_keytab = params.config['configurations']['hive-site']['hive.metastore.kerberos.keytab.file']
+      hive_principal = params.config['configurations']['hive-site']['hive.metastore.kerberos.principal'].replace('_HOST', socket.getfqdn().lower())
+      hive_kinit_cmd = format("{kinit_path_local} -kt {hive_kerberos_keytab} {hive_principal}; ")
+      Execute(hive_kinit_cmd, user=params.hive_user)
+
+    # FIXME! TODO! remove this after soft link bug is fixed:
+    #if not os.path.islink('/usr/iop/current/spark'):
+    #  iop_version = get_iop_version()
+    #  cmd = 'ln -s /usr/iop/' + iop_version + '/spark /usr/iop/current/spark'
+    #  Execute(cmd)
+
+    daemon_cmd = format('{spark_thrift_server_start}')
+    no_op_test = format(
+      'ls {spark_thrift_server_pid_file} >/dev/null 2>&1 && ps -p `cat {spark_thrift_server_pid_file}` >/dev/null 2>&1')
+    if (upgrade_type is not None and params.upgrade_direction == Direction.UPGRADE) or params.security_enabled:
+      Execute(daemon_cmd,
+              user=params.hive_user,
+              environment={'JAVA_HOME': params.java_home},
+              not_if=no_op_test
+              )
+    else:
+      Execute(daemon_cmd,
+              user=params.spark_user,
+              environment={'JAVA_HOME': params.java_home},
+              not_if=no_op_test
+      )
+
+  def status(self, env):
+    import status_params
+
+    env.set_params(status_params)
+    pid_file = format("{spark_thrift_server_pid_file}")
+    # Recursively check all existing gmetad pid files
+    check_process_status(pid_file)
+
+  # Note: This function is not called from start()/install()
+  def configure(self, env):
+    import params
+
+    env.set_params(params)
+    spark(env)
+
+if __name__ == "__main__":
+  ThriftServer().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SPARK/package/templates/spark-defaults.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SPARK/package/templates/spark-defaults.conf.j2 b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SPARK/package/templates/spark-defaults.conf.j2
new file mode 100755
index 0000000..83c971d
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SPARK/package/templates/spark-defaults.conf.j2
@@ -0,0 +1,43 @@
+{#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#}
+
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+#
+
+{% for key, value in spark_conf_properties_map.iteritems() -%}
+  {{key}}       {{value}}
+{% endfor %}

http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SQOOP/configuration/sqoop-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SQOOP/configuration/sqoop-env.xml b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SQOOP/configuration/sqoop-env.xml
new file mode 100755
index 0000000..eb9da7b
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SQOOP/configuration/sqoop-env.xml
@@ -0,0 +1,59 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+
+<configuration>
+  <!-- sqoop-env.sh -->
+  <property>
+    <name>content</name>
+    <description>This is the jinja template for sqoop-env.sh file</description>
+    <value>
+# Set Hadoop-specific environment variables here.
+
+#Set path to where bin/hadoop is available
+#Set path to where bin/hadoop is available
+export HADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}
+
+#set the path to where bin/hbase is available
+export HBASE_HOME=${HBASE_HOME:-{{hbase_home}}}
+
+#Set the path to where bin/hive is available
+export HIVE_HOME=${HIVE_HOME:-{{hive_home}}}
+
+#Set the path for where zookeper config dir is
+export ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}
+
+# add libthrift in hive to sqoop class path first so hive imports work
+export SQOOP_USER_CLASSPATH="`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> /dev/null`:${SQOOP_USER_CLASSPATH}"
+    </value>
+  </property>
+  <property>
+    <name>sqoop_user</name>
+    <description>User to run Sqoop as</description>
+    <property-type>USER</property-type>
+    <value>sqoop</value>
+  </property>
+  <property>
+    <name>jdbc_drivers</name>
+    <description>Comma separated list of additional JDBC drivers class names</description>
+    <value> </value>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SQOOP/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SQOOP/metainfo.xml b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SQOOP/metainfo.xml
new file mode 100755
index 0000000..f878000
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SQOOP/metainfo.xml
@@ -0,0 +1,93 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>SQOOP</name>
+      <displayName>Sqoop</displayName>
+      <comment>Tool for transferring bulk data between Apache Hadoop and
+        structured data stores such as relational databases
+      </comment>
+      <version>1.4.6</version>
+
+      <components>
+        <component>
+          <name>SQOOP</name>
+          <displayName>Sqoop</displayName>
+          <category>CLIENT</category>
+          <cardinality>1+</cardinality>
+          <versionAdvertised>true</versionAdvertised>
+          <dependencies>
+            <dependency>
+              <name>HDFS/HDFS_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+            <dependency>
+              <name>MAPREDUCE2/MAPREDUCE2_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+          </dependencies>
+          <commandScript>
+            <script>scripts/sqoop_client.py</script>
+            <scriptType>PYTHON</scriptType>
+          </commandScript>
+          <configFiles>
+            <configFile>
+              <type>env</type>
+              <fileName>sqoop-env.sh</fileName>
+              <dictionaryName>sqoop-env</dictionaryName>
+            </configFile>
+          </configFiles>
+        </component>
+      </components>
+      <osSpecifics>
+        <osSpecific>
+          <osFamily>any</osFamily>
+          <packages>
+            <package>
+              <name>sqoop</name>
+            </package>
+            <package>
+              <name>mysql-connector-java</name>
+            </package>
+          </packages>
+        </osSpecific>
+      </osSpecifics>
+      <commandScript>
+        <script>scripts/service_check.py</script>
+        <scriptType>PYTHON</scriptType>
+        <timeout>300</timeout>
+      </commandScript>
+
+      <requiredServices>
+        <service>HDFS</service>
+      </requiredServices>
+
+      <configuration-dependencies>
+        <config-type>sqoop-env</config-type>
+      </configuration-dependencies>
+    </service>
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SQOOP/package/scripts/__init__.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SQOOP/package/scripts/__init__.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SQOOP/package/scripts/__init__.py
new file mode 100755
index 0000000..5561e10
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SQOOP/package/scripts/__init__.py
@@ -0,0 +1,19 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""

http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SQOOP/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SQOOP/package/scripts/params.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SQOOP/package/scripts/params.py
new file mode 100755
index 0000000..eafee9e
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SQOOP/package/scripts/params.py
@@ -0,0 +1,95 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management.libraries.functions.version import format_stack_version, compare_versions
+from resource_management.libraries.functions.default import default
+from resource_management import *
+
+# a map of the Ambari role to the component name
+# for use with /usr/iop/current/<component>
+SERVER_ROLE_DIRECTORY_MAP = {
+  'SQOOP' : 'sqoop-client'
+}
+
+component_directory = Script.get_component_from_role(SERVER_ROLE_DIRECTORY_MAP, "SQOOP")
+
+config = Script.get_config()
+ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
+
+stack_name = default("/hostLevelParams/stack_name", None)
+
+stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
+stack_version = format_stack_version(stack_version_unformatted)
+
+# New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade
+version = default("/commandParams/version", None)
+
+#hadoop params
+sqoop_conf_dir = "/usr/iop/current/sqoop-client/conf"
+sqoop_lib = '/usr/iop/current/sqoop-client/lib'
+hadoop_home = '/usr/iop/current/hadoop-client'
+hbase_home = '/usr/iop/current/hbase-client'
+hive_home = '/usr/iop/current/hive-client'
+sqoop_bin_dir = '/usr/iop/current/sqoop-client/bin/'
+
+zoo_conf_dir = "/usr/iop/current/zookeeper-client/conf"
+security_enabled = config['configurations']['cluster-env']['security_enabled']
+smokeuser = config['configurations']['cluster-env']['smokeuser']
+smokeuser_principal = config['configurations']['cluster-env']['smokeuser_principal_name']
+user_group = config['configurations']['cluster-env']['user_group']
+sqoop_env_sh_template = config['configurations']['sqoop-env']['content']
+
+sqoop_user = config['configurations']['sqoop-env']['sqoop_user']
+
+smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
+kinit_path_local = functions.get_kinit_path()
+#JDBC driver jar name
+sqoop_jdbc_drivers_dict = {}
+sqoop_jdbc_drivers_name_dict = {}
+if "jdbc_drivers" in config['configurations']['sqoop-env']:
+  sqoop_jdbc_drivers = config['configurations']['sqoop-env']['jdbc_drivers'].split(',')
+
+  for driver_name in sqoop_jdbc_drivers:
+    driver_name = driver_name.strip()
+    if driver_name and not driver_name == '':
+      if driver_name == "com.microsoft.sqlserver.jdbc.SQLServerDriver":
+        jdbc_jar_name = "sqljdbc4.jar"
+        jdbc_symlink_name = "mssql-jdbc-driver.jar"
+        jdbc_driver_name = "mssql"
+      elif driver_name == "com.mysql.jdbc.Driver":
+        jdbc_jar_name = "mysql-connector-java.jar"
+        jdbc_symlink_name = "mysql-jdbc-driver.jar"
+        jdbc_driver_name = "mysql"
+      elif driver_name == "org.postgresql.Driver":
+        jdbc_jar_name = "postgresql-jdbc.jar"
+        jdbc_symlink_name = "postgres-jdbc-driver.jar"
+        jdbc_driver_name = "postgres"
+      elif driver_name == "oracle.jdbc.driver.OracleDriver":
+        jdbc_jar_name = "ojdbc.jar"
+        jdbc_symlink_name = "oracle-jdbc-driver.jar"
+        jdbc_driver_name = "oracle"
+      elif driver_name == "org.hsqldb.jdbc.JDBCDriver":
+        jdbc_jar_name = "hsqldb.jar"
+        jdbc_symlink_name = "hsqldb-jdbc-driver.jar"
+        jdbc_driver_name = "hsqldb"
+    else:
+      continue
+    sqoop_jdbc_drivers_dict[jdbc_jar_name] = jdbc_symlink_name
+    sqoop_jdbc_drivers_name_dict[jdbc_jar_name] = jdbc_driver_name
+jdk_location = config['hostLevelParams']['jdk_location']

http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SQOOP/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SQOOP/package/scripts/service_check.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SQOOP/package/scripts/service_check.py
new file mode 100755
index 0000000..b1f658d
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SQOOP/package/scripts/service_check.py
@@ -0,0 +1,44 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+
+from resource_management import *
+
+
+class SqoopServiceCheck(Script):
+
+  def get_component_name(self):
+    return "sqoop-server"
+
+  def service_check(self, env):
+    import params
+    env.set_params(params)
+    if params.security_enabled:
+      Execute(format("{kinit_path_local}  -kt {smoke_user_keytab} {smokeuser_principal}"),
+              user = params.smokeuser,
+      )
+    Execute("sqoop version",
+            user = params.smokeuser,
+            path = params.sqoop_bin_dir,
+            logoutput = True
+    )
+
+if __name__ == "__main__":
+  SqoopServiceCheck().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SQOOP/package/scripts/sqoop.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SQOOP/package/scripts/sqoop.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SQOOP/package/scripts/sqoop.py
new file mode 100755
index 0000000..79fe2af
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SQOOP/package/scripts/sqoop.py
@@ -0,0 +1,84 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management.core.source import InlineTemplate, DownloadSource
+from resource_management.libraries.functions import format
+from resource_management.core.resources.system import File, Link, Directory
+from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
+from ambari_commons import OSConst
+import os
+
+def sqoop(type=None):
+  import params
+  Link(params.sqoop_lib + "/mysql-connector-java.jar",
+       to = '/usr/share/java/mysql-connector-java.jar'
+  )
+
+  jdbc_connector()
+
+  Directory(params.sqoop_conf_dir,
+            owner = params.sqoop_user,
+            group = params.user_group,
+            create_parents = True
+  )
+  File(format("{sqoop_conf_dir}/sqoop-env.sh"),
+    owner=params.sqoop_user,
+    group = params.user_group,
+    content=InlineTemplate(params.sqoop_env_sh_template)
+  )
+  update_config_permissions(["sqoop-env-template.sh",
+                             "sqoop-site-template.xml",
+                             "sqoop-site.xml"])
+  pass
+
+def update_config_permissions(names):
+  import params
+  for filename in names:
+    full_filename = os.path.join(params.sqoop_conf_dir, filename)
+    File(full_filename,
+         owner = params.sqoop_user,
+         group = params.user_group,
+         only_if = format("test -e {full_filename}")
+    )
+
+def jdbc_connector():
+  import params
+  from urllib2 import HTTPError
+  from resource_management import Fail
+  for jar_name in params.sqoop_jdbc_drivers_dict:
+    if 'mysql-connector-java.jar' in jar_name:
+      continue
+    downloaded_custom_connector = format("{sqoop_lib}/{jar_name}")
+    jdbc_symlink_remote = params.sqoop_jdbc_drivers_dict[jar_name]
+    jdbc_driver_label = params.sqoop_jdbc_drivers_name_dict[jar_name]
+    driver_curl_source = format("{jdk_location}/{jdbc_symlink_remote}")
+    environment = {
+      "no_proxy": format("{ambari_server_hostname}")
+    }
+    try:
+      File(downloaded_custom_connector,
+           content = DownloadSource(driver_curl_source),
+           mode = 0644,
+      )
+    except HTTPError:
+      error_string = format("Could not download {driver_curl_source}\n\
+                 Please upload jdbc driver to server by run command:\n\
+                 ambari-server setup --jdbc-db={jdbc_driver_label} --jdbc-driver=<PATH TO DRIVER>\n\
+                 at {ambari_server_hostname}")
+      raise Fail(error_string)

http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SQOOP/package/scripts/sqoop_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SQOOP/package/scripts/sqoop_client.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SQOOP/package/scripts/sqoop_client.py
new file mode 100755
index 0000000..56b7135
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SQOOP/package/scripts/sqoop_client.py
@@ -0,0 +1,57 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+import sys
+from resource_management import *
+from resource_management.libraries.functions import conf_select
+from resource_management.libraries.functions import stack_select
+from resource_management.libraries.functions.version import compare_versions, format_stack_version
+
+from sqoop import sqoop
+
+
+class SqoopClient(Script):
+
+  def get_component_name(self):
+    return "sqoop-client"
+
+  def pre_rolling_restart(self, env):
+    import params
+    env.set_params(params)
+
+    if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
+      conf_select.select(params.stack_name, "sqoop", params.version)
+      stack_select.select("sqoop-client", params.version)
+      #Execute(format("stack-select set sqoop-client {version}"))
+
+  def install(self, env):
+    self.install_packages(env)
+    self.configure(env)
+
+  def configure(self, env):
+    import params
+    env.set_params(params)
+    sqoop(type='client')
+
+  def status(self, env):
+    raise ClientComponentHasNoStatus()
+
+if __name__ == "__main__":
+  SqoopClient().execute()