You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by jo...@apache.org on 2017/05/03 16:47:23 UTC

[40/53] [abbrv] ambari git commit: AMBARI-20910. HDP 3.0 TP - Unable to install Spark, cannot find package/scripts dir (alejandro)

http://git-wip-us.apache.org/repos/asf/ambari/blob/4b588a92/ambari-server/src/main/resources/common-services/SPARK/2.2.0/scripts/setup_livy.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/2.2.0/scripts/setup_livy.py b/ambari-server/src/main/resources/common-services/SPARK/2.2.0/scripts/setup_livy.py
deleted file mode 100644
index adaca87..0000000
--- a/ambari-server/src/main/resources/common-services/SPARK/2.2.0/scripts/setup_livy.py
+++ /dev/null
@@ -1,88 +0,0 @@
-#!/usr/bin/python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-import os
-from resource_management import Directory, File, PropertiesFile, InlineTemplate, format
-
-
-def setup_livy(env, type, upgrade_type = None, action = None):
-  import params
-
-  Directory([params.livy_pid_dir, params.livy_log_dir],
-            owner=params.livy_user,
-            group=params.user_group,
-            mode=0775,
-            create_parents = True
-  )
-  if type == 'server' and action == 'config':
-    params.HdfsResource(params.livy_hdfs_user_dir,
-                        type="directory",
-                        action="create_on_execute",
-                        owner=params.livy_user,
-                        mode=0775
-    )
-    params.HdfsResource(None, action="execute")
-
-    params.HdfsResource(params.livy_recovery_dir,
-                        type="directory",
-                        action="create_on_execute",
-                        owner=params.livy_user,
-                        mode=0700
-       )
-    params.HdfsResource(None, action="execute")
-
-  # create livy-env.sh in etc/conf dir
-  File(os.path.join(params.livy_conf, 'livy-env.sh'),
-       owner=params.livy_user,
-       group=params.livy_group,
-       content=InlineTemplate(params.livy_env_sh),
-       mode=0644,
-  )
-
-  # create livy.conf in etc/conf dir
-  PropertiesFile(format("{livy_conf}/livy.conf"),
-                properties = params.config['configurations']['livy-conf'],
-                key_value_delimiter = " ",
-                owner=params.livy_user,
-                group=params.livy_group,
-  )
-
-  # create log4j.properties in etc/conf dir
-  File(os.path.join(params.livy_conf, 'log4j.properties'),
-       owner=params.livy_user,
-       group=params.livy_group,
-       content=params.livy_log4j_properties,
-       mode=0644,
-  )
-
-  # create spark-blacklist.properties in etc/conf dir
-  File(os.path.join(params.livy_conf, 'spark-blacklist.conf'),
-       owner=params.livy_user,
-       group=params.livy_group,
-       content=params.livy_spark_blacklist_properties,
-       mode=0644,
-  )
-
-  Directory(params.livy_logs_dir,
-            owner=params.livy_user,
-            group=params.livy_group,
-            mode=0755,
-  )
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/4b588a92/ambari-server/src/main/resources/common-services/SPARK/2.2.0/scripts/setup_spark.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/2.2.0/scripts/setup_spark.py b/ambari-server/src/main/resources/common-services/SPARK/2.2.0/scripts/setup_spark.py
deleted file mode 100644
index 9329ce0..0000000
--- a/ambari-server/src/main/resources/common-services/SPARK/2.2.0/scripts/setup_spark.py
+++ /dev/null
@@ -1,116 +0,0 @@
-#!/usr/bin/python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-import sys
-import fileinput
-import shutil
-import os
-
-from resource_management.core.exceptions import ComponentIsNotRunning
-from resource_management.core.logger import Logger
-from resource_management.core import shell
-from resource_management.core.source import InlineTemplate
-from resource_management.core.resources.system import Directory, File
-from resource_management.libraries.resources.properties_file import PropertiesFile
-from resource_management.libraries.functions.version import format_stack_version
-from resource_management.libraries.functions.stack_features import check_stack_feature
-from resource_management.libraries.functions.constants import StackFeature
-from resource_management.libraries.functions.format import format
-from resource_management.libraries.resources.xml_config import XmlConfig
-
-def setup_spark(env, type, upgrade_type = None, action = None):
-  import params
-
-  Directory([params.spark_pid_dir, params.spark_log_dir],
-            owner=params.spark_user,
-            group=params.user_group,
-            mode=0775,
-            create_parents = True
-  )
-  if type == 'server' and action == 'config':
-    params.HdfsResource(params.spark_hdfs_user_dir,
-                       type="directory",
-                       action="create_on_execute",
-                       owner=params.spark_user,
-                       mode=0775
-    )
-    params.HdfsResource(None, action="execute")
-
-  PropertiesFile(format("{spark_conf}/spark-defaults.conf"),
-    properties = params.config['configurations']['spark-defaults'],
-    key_value_delimiter = " ",
-    owner=params.spark_user,
-    group=params.spark_group,
-    mode=0644
-  )
-
-  # create spark-env.sh in etc/conf dir
-  File(os.path.join(params.spark_conf, 'spark-env.sh'),
-       owner=params.spark_user,
-       group=params.spark_group,
-       content=InlineTemplate(params.spark_env_sh),
-       mode=0644,
-  )
-
-  #create log4j.properties in etc/conf dir
-  File(os.path.join(params.spark_conf, 'log4j.properties'),
-       owner=params.spark_user,
-       group=params.spark_group,
-       content=params.spark_log4j_properties,
-       mode=0644,
-  )
-
-  #create metrics.properties in etc/conf dir
-  File(os.path.join(params.spark_conf, 'metrics.properties'),
-       owner=params.spark_user,
-       group=params.spark_group,
-       content=InlineTemplate(params.spark_metrics_properties),
-       mode=0644
-  )
-
-  if params.is_hive_installed:
-    XmlConfig("hive-site.xml",
-          conf_dir=params.spark_conf,
-          configurations=params.spark_hive_properties,
-          owner=params.spark_user,
-          group=params.spark_group,
-          mode=0644)
-
-  if params.has_spark_thriftserver:
-    PropertiesFile(params.spark_thrift_server_conf_file,
-      properties = params.config['configurations']['spark-thrift-sparkconf'],
-      owner = params.hive_user,
-      group = params.user_group,
-      key_value_delimiter = " ",
-      mode=0644
-    )
-
-  effective_version = params.version if upgrade_type is not None else params.stack_version_formatted
-  if effective_version:
-    effective_version = format_stack_version(effective_version)
-
-  if params.spark_thrift_fairscheduler_content and effective_version and check_stack_feature(StackFeature.SPARK_16PLUS, effective_version):
-    # create spark-thrift-fairscheduler.xml
-    File(os.path.join(params.spark_conf,"spark-thrift-fairscheduler.xml"),
-      owner=params.spark_user,
-      group=params.spark_group,
-      mode=0755,
-      content=InlineTemplate(params.spark_thrift_fairscheduler_content)
-    )

http://git-wip-us.apache.org/repos/asf/ambari/blob/4b588a92/ambari-server/src/main/resources/common-services/SPARK/2.2.0/scripts/spark_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/2.2.0/scripts/spark_client.py b/ambari-server/src/main/resources/common-services/SPARK/2.2.0/scripts/spark_client.py
deleted file mode 100644
index 3acde4e..0000000
--- a/ambari-server/src/main/resources/common-services/SPARK/2.2.0/scripts/spark_client.py
+++ /dev/null
@@ -1,62 +0,0 @@
-#!/usr/bin/python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-import sys
-from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
-from resource_management.libraries.functions.stack_features import check_stack_feature
-from resource_management.libraries.functions.constants import StackFeature
-from resource_management.core.exceptions import ClientComponentHasNoStatus
-from resource_management.core.logger import Logger
-from resource_management.core import shell
-from setup_spark import setup_spark
-
-
-class SparkClient(Script):
-  def install(self, env):
-    self.install_packages(env)
-    self.configure(env)
-
-  def configure(self, env, upgrade_type=None, config_dir=None):
-    import params
-    env.set_params(params)
-    
-    setup_spark(env, 'client', upgrade_type=upgrade_type, action = 'config')
-
-  def status(self, env):
-    raise ClientComponentHasNoStatus()
-  
-  def get_component_name(self):
-    # TODO, change to "spark" after RPM switches the name
-    return "spark2-client"
-
-  def pre_upgrade_restart(self, env, upgrade_type=None):
-    import params
-
-    env.set_params(params)
-    if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      Logger.info("Executing Spark Client Stack Upgrade pre-restart")
-      # TODO, change to "spark" after RPM switches the name
-      conf_select.select(params.stack_name, "spark2", params.version)
-      stack_select.select("spark2-client", params.version)
-
-if __name__ == "__main__":
-  SparkClient().execute()
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/4b588a92/ambari-server/src/main/resources/common-services/SPARK/2.2.0/scripts/spark_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/2.2.0/scripts/spark_service.py b/ambari-server/src/main/resources/common-services/SPARK/2.2.0/scripts/spark_service.py
deleted file mode 100644
index 536d798..0000000
--- a/ambari-server/src/main/resources/common-services/SPARK/2.2.0/scripts/spark_service.py
+++ /dev/null
@@ -1,146 +0,0 @@
-#!/usr/bin/env python
-
-'''
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-'''
-import socket
-import tarfile
-import os
-from contextlib import closing
-
-from resource_management.libraries.script.script import Script
-from resource_management.libraries.resources.hdfs_resource import HdfsResource
-from resource_management.libraries.functions.copy_tarball import copy_to_hdfs, get_tarball_paths
-from resource_management.libraries.functions import format
-from resource_management.core.resources.system import File, Execute
-from resource_management.libraries.functions.version import format_stack_version
-from resource_management.libraries.functions.stack_features import check_stack_feature
-from resource_management.libraries.functions.constants import StackFeature
-from resource_management.libraries.functions.show_logs import show_logs
-
-
-def make_tarfile(output_filename, source_dir):
-  try:
-    os.remove(output_filename)
-  except OSError:
-    pass
-  parent_dir=os.path.dirname(output_filename)
-  if not os.path.exists(parent_dir):
-    os.makedirs(parent_dir)
-  os.chmod(parent_dir, 0711)
-  with closing(tarfile.open(output_filename, "w:gz")) as tar:
-    for file in os.listdir(source_dir):
-      tar.add(os.path.join(source_dir,file),arcname=file)
-  os.chmod(output_filename, 0644)
-
-
-def spark_service(name, upgrade_type=None, action=None):
-  import params
-
-  if action == 'start':
-
-    effective_version = params.version if upgrade_type is not None else params.stack_version_formatted
-    if effective_version:
-      effective_version = format_stack_version(effective_version)
-
-    if name == 'jobhistoryserver' and effective_version and check_stack_feature(StackFeature.SPARK_16PLUS, effective_version):
-      # TODO, change to "spark" after RPM switches the name
-      # create & copy spark2-hdp-yarn-archive.tar.gz to hdfs
-      if not params.sysprep_skip_copy_tarballs_hdfs:
-          source_dir=params.spark_home+"/jars"
-          tmp_archive_file=get_tarball_paths("spark2")[1]
-          make_tarfile(tmp_archive_file, source_dir)
-          copy_to_hdfs("spark2", params.user_group, params.hdfs_user, skip=params.sysprep_skip_copy_tarballs_hdfs, replace_existing_files=True)
-      # create spark history directory
-      params.HdfsResource(params.spark_history_dir,
-                          type="directory",
-                          action="create_on_execute",
-                          owner=params.spark_user,
-                          group=params.user_group,
-                          mode=0777,
-                          recursive_chmod=True
-                          )
-      params.HdfsResource(None, action="execute")
-
-    if params.security_enabled:
-      spark_kinit_cmd = format("{kinit_path_local} -kt {spark_kerberos_keytab} {spark_principal}; ")
-      Execute(spark_kinit_cmd, user=params.spark_user)
-
-    # Spark 1.3.1.2.3, and higher, which was included in HDP 2.3, does not have a dependency on Tez, so it does not
-    # need to copy the tarball, otherwise, copy it.
-    if params.stack_version_formatted and check_stack_feature(StackFeature.TEZ_FOR_SPARK, params.stack_version_formatted):
-      resource_created = copy_to_hdfs("tez", params.user_group, params.hdfs_user, skip=params.sysprep_skip_copy_tarballs_hdfs)
-      if resource_created:
-        params.HdfsResource(None, action="execute")
-
-    if name == 'jobhistoryserver':
-      historyserver_no_op_test = format(
-      'ls {spark_history_server_pid_file} >/dev/null 2>&1 && ps -p `cat {spark_history_server_pid_file}` >/dev/null 2>&1')
-      try:
-        Execute(format('{spark_history_server_start}'),
-                user=params.spark_user,
-                environment={'JAVA_HOME': params.java_home},
-                not_if=historyserver_no_op_test)
-      except:
-        show_logs(params.spark_log_dir, user=params.spark_user)
-        raise
-
-    elif name == 'sparkthriftserver':
-      if params.security_enabled:
-        hive_principal = params.hive_kerberos_principal
-        hive_kinit_cmd = format("{kinit_path_local} -kt {hive_kerberos_keytab} {hive_principal}; ")
-        Execute(hive_kinit_cmd, user=params.hive_user)
-
-      thriftserver_no_op_test = format(
-      'ls {spark_thrift_server_pid_file} >/dev/null 2>&1 && ps -p `cat {spark_thrift_server_pid_file}` >/dev/null 2>&1')
-      try:
-        Execute(format('{spark_thrift_server_start} --properties-file {spark_thrift_server_conf_file} {spark_thrift_cmd_opts_properties}'),
-                user=params.hive_user,
-                environment={'JAVA_HOME': params.java_home},
-                not_if=thriftserver_no_op_test
-        )
-      except:
-        show_logs(params.spark_log_dir, user=params.hive_user)
-        raise
-  elif action == 'stop':
-    if name == 'jobhistoryserver':
-      try:
-        Execute(format('{spark_history_server_stop}'),
-                user=params.spark_user,
-                environment={'JAVA_HOME': params.java_home}
-        )
-      except:
-        show_logs(params.spark_log_dir, user=params.spark_user)
-        raise
-      File(params.spark_history_server_pid_file,
-        action="delete"
-      )
-
-    elif name == 'sparkthriftserver':
-      try:
-        Execute(format('{spark_thrift_server_stop}'),
-                user=params.hive_user,
-                environment={'JAVA_HOME': params.java_home}
-        )
-      except:
-        show_logs(params.spark_log_dir, user=params.hive_user)
-        raise
-      File(params.spark_thrift_server_pid_file,
-        action="delete"
-      )
-
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/4b588a92/ambari-server/src/main/resources/common-services/SPARK/2.2.0/scripts/spark_thrift_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/2.2.0/scripts/spark_thrift_server.py b/ambari-server/src/main/resources/common-services/SPARK/2.2.0/scripts/spark_thrift_server.py
deleted file mode 100644
index 8953b35..0000000
--- a/ambari-server/src/main/resources/common-services/SPARK/2.2.0/scripts/spark_thrift_server.py
+++ /dev/null
@@ -1,91 +0,0 @@
-#!/usr/bin/python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-import sys
-import os
-
-from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
-from resource_management.libraries.functions.stack_features import check_stack_feature
-from resource_management.libraries.functions.constants import StackFeature
-from resource_management.libraries.functions.check_process_status import check_process_status
-from resource_management.core.logger import Logger
-from resource_management.core import shell
-from setup_spark import setup_spark
-from spark_service import spark_service
-
-
-class SparkThriftServer(Script):
-
-  def install(self, env):
-    import params
-    env.set_params(params)
-
-    self.install_packages(env)
-
-  def configure(self, env, upgrade_type=None, config_dir=None):
-    import params
-    env.set_params(params)
-    setup_spark(env, 'server', upgrade_type = upgrade_type, action = 'config')
-
-  def start(self, env, upgrade_type=None):
-    import params
-    env.set_params(params)
-
-    self.configure(env)
-    spark_service('sparkthriftserver', upgrade_type=upgrade_type, action='start')
-
-  def stop(self, env, upgrade_type=None):
-    import params
-    env.set_params(params)
-    spark_service('sparkthriftserver', upgrade_type=upgrade_type, action='stop')
-
-  def status(self, env):
-    import status_params
-    env.set_params(status_params)
-    check_process_status(status_params.spark_thrift_server_pid_file)
-
-  def get_component_name(self):
-    # TODO, change to "spark" after RPM switches the name
-    return "spark2-thriftserver"
-
-  def pre_upgrade_restart(self, env, upgrade_type=None):
-    import params
-
-    env.set_params(params)
-    Logger.info("Executing Spark Thrift Server Stack Upgrade pre-restart")
-    # TODO, change to "spark" after RPM switches the name
-    conf_select.select(params.stack_name, "spark2", params.version)
-    stack_select.select("spark2-thriftserver", params.version)
-      
-  def get_log_folder(self):
-    import params
-    return params.spark_log_dir
-  
-  def get_user(self):
-    import params
-    return params.hive_user
-
-  def get_pid_files(self):
-    import status_params
-    return [status_params.spark_thrift_server_pid_file]
-
-if __name__ == "__main__":
-  SparkThriftServer().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/4b588a92/ambari-server/src/main/resources/common-services/SPARK/2.2.0/scripts/status_params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/2.2.0/scripts/status_params.py b/ambari-server/src/main/resources/common-services/SPARK/2.2.0/scripts/status_params.py
deleted file mode 100644
index 07dcc47..0000000
--- a/ambari-server/src/main/resources/common-services/SPARK/2.2.0/scripts/status_params.py
+++ /dev/null
@@ -1,45 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management.libraries.functions.format import format
-from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions.default import default
-
-config = Script.get_config()
-
-spark_user = config['configurations']['spark-env']['spark_user']
-spark_group = config['configurations']['spark-env']['spark_group']
-user_group = config['configurations']['cluster-env']['user_group']
-
-if 'hive-env' in config['configurations']:
-  hive_user = config['configurations']['hive-env']['hive_user']
-else:
-  hive_user = "hive"
-
-spark_pid_dir = config['configurations']['spark-env']['spark_pid_dir']
-spark_history_server_pid_file = format("{spark_pid_dir}/spark-{spark_user}-org.apache.spark.deploy.history.HistoryServer-1.pid")
-spark_thrift_server_pid_file = format("{spark_pid_dir}/spark-{hive_user}-org.apache.spark.sql.hive.thriftserver.HiveThriftServer2-1.pid")
-stack_name = default("/hostLevelParams/stack_name", None)
-
-if "livy-env" in config['configurations']:
-  livy_user = config['configurations']['livy-env']['livy_user']
-  livy_group = config['configurations']['livy-env']['livy_group']
-  livy_pid_dir = config['configurations']['livy-env']['livy_pid_dir']
-  livy_server_pid_file = format("{livy_pid_dir}/livy-{livy_user}-server.pid")
\ No newline at end of file