You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by di...@apache.org on 2017/07/13 17:45:26 UTC

ambari git commit: AMBARI-21461 Spark thrift server restart fails during migration EU (dili)

Repository: ambari
Updated Branches:
  refs/heads/branch-2.5 65e57a12b -> 33c279cf2


AMBARI-21461 Spark thrift server restart fails during migration EU (dili)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/33c279cf
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/33c279cf
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/33c279cf

Branch: refs/heads/branch-2.5
Commit: 33c279cf272e824d1efdea1aec7bba667b618596
Parents: 65e57a1
Author: Di Li <di...@apache.org>
Authored: Thu Jul 13 13:42:33 2017 -0400
Committer: Di Li <di...@apache.org>
Committed: Thu Jul 13 13:42:33 2017 -0400

----------------------------------------------------------------------
 .../BigInsights/4.0/services/SPARK/metainfo.xml |   2 +-
 .../package/scripts/spark_thrift_server.py      | 125 +++++++++++++++++++
 .../SPARK/package/scripts/thrift_server.py      | 125 -------------------
 .../BigInsights/4.2/services/SPARK/metainfo.xml |   2 +-
 .../package/scripts/spark_thrift_server.py      | 119 ++++++++++++++++++
 .../SPARK/package/scripts/thrift_server.py      | 119 ------------------
 6 files changed, 246 insertions(+), 246 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/33c279cf/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SPARK/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SPARK/metainfo.xml b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SPARK/metainfo.xml
index 02abc62..ef89698 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SPARK/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SPARK/metainfo.xml
@@ -83,7 +83,7 @@
             </dependency>
           </dependencies>
           <commandScript>
-            <script>scripts/thrift_server.py</script>
+            <script>scripts/spark_thrift_server.py</script>
             <scriptType>PYTHON</scriptType>
             <timeout>600</timeout>
           </commandScript>

http://git-wip-us.apache.org/repos/asf/ambari/blob/33c279cf/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SPARK/package/scripts/spark_thrift_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SPARK/package/scripts/spark_thrift_server.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SPARK/package/scripts/spark_thrift_server.py
new file mode 100755
index 0000000..39e15d3
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SPARK/package/scripts/spark_thrift_server.py
@@ -0,0 +1,125 @@
+#!/usr/bin/python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+import sys
+import socket
+import os
+from resource_management import *
+from resource_management.libraries.functions import stack_select
+from resource_management.libraries.functions import conf_select
+from resource_management.core.exceptions import ComponentIsNotRunning
+from resource_management.core.logger import Logger
+from resource_management.core import shell
+from resource_management.libraries.functions import Direction
+from spark import *
+
+
+class ThriftServer(Script):
+
+  def get_component_name(self):
+    return "spark-thriftserver"
+
+
+  def pre_upgrade_restart(self, env, upgrade_type=None):
+    import params
+
+    env.set_params(params)
+    if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
+      conf_select.select(params.stack_name, "spark", params.version)
+      stack_select.select("spark-thriftserver", params.version)
+
+  def install(self, env):
+    self.install_packages(env)
+    import params
+    env.set_params(params)
+    self.configure(env)
+
+  def stop(self, env, upgrade_type=None):
+    import params
+
+    env.set_params(params)
+    self.configure(env)
+    daemon_cmd = format('{spark_thrift_server_stop}')
+    if params.security_enabled:
+      Execute(daemon_cmd,
+              user=params.hive_user,
+              environment={'JAVA_HOME': params.java_home}
+              )
+    else:
+      Execute(daemon_cmd,
+              user=params.spark_user,
+              environment={'JAVA_HOME': params.java_home}
+      )
+    if os.path.isfile(params.spark_thrift_server_pid_file):
+      os.remove(params.spark_thrift_server_pid_file)
+
+
+  def start(self, env, upgrade_type=None):
+    import params
+
+    env.set_params(params)
+    # TODO this looks wrong, maybe just call spark(env)
+    self.configure(env)
+
+    if params.security_enabled:
+      hive_kerberos_keytab = params.config['configurations']['hive-site']['hive.metastore.kerberos.keytab.file']
+      hive_principal = params.config['configurations']['hive-site']['hive.metastore.kerberos.principal'].replace('_HOST', socket.getfqdn().lower())
+      hive_kinit_cmd = format("{kinit_path_local} -kt {hive_kerberos_keytab} {hive_principal}; ")
+      Execute(hive_kinit_cmd, user=params.hive_user)
+
+    # FIXME! TODO! remove this after soft link bug is fixed:
+    #if not os.path.islink('/usr/iop/current/spark'):
+    #  iop_version = get_iop_version()
+    #  cmd = 'ln -s /usr/iop/' + iop_version + '/spark /usr/iop/current/spark'
+    #  Execute(cmd)
+
+    daemon_cmd = format('{spark_thrift_server_start}')
+    no_op_test = format(
+      'ls {spark_thrift_server_pid_file} >/dev/null 2>&1 && ps -p `cat {spark_thrift_server_pid_file}` >/dev/null 2>&1')
+    if (upgrade_type is not None and params.upgrade_direction == Direction.UPGRADE) or params.security_enabled:
+      Execute(daemon_cmd,
+              user=params.hive_user,
+              environment={'JAVA_HOME': params.java_home},
+              not_if=no_op_test
+              )
+    else:
+      Execute(daemon_cmd,
+              user=params.spark_user,
+              environment={'JAVA_HOME': params.java_home},
+              not_if=no_op_test
+      )
+
+  def status(self, env):
+    import status_params
+
+    env.set_params(status_params)
+    pid_file = format("{spark_thrift_server_pid_file}")
+    # Recursively check all existing gmetad pid files
+    check_process_status(pid_file)
+
+  # Note: This function is not called from start()/install()
+  def configure(self, env):
+    import params
+
+    env.set_params(params)
+    spark(env)
+
+if __name__ == "__main__":
+  ThriftServer().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/33c279cf/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SPARK/package/scripts/thrift_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SPARK/package/scripts/thrift_server.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SPARK/package/scripts/thrift_server.py
deleted file mode 100755
index 39e15d3..0000000
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SPARK/package/scripts/thrift_server.py
+++ /dev/null
@@ -1,125 +0,0 @@
-#!/usr/bin/python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-import sys
-import socket
-import os
-from resource_management import *
-from resource_management.libraries.functions import stack_select
-from resource_management.libraries.functions import conf_select
-from resource_management.core.exceptions import ComponentIsNotRunning
-from resource_management.core.logger import Logger
-from resource_management.core import shell
-from resource_management.libraries.functions import Direction
-from spark import *
-
-
-class ThriftServer(Script):
-
-  def get_component_name(self):
-    return "spark-thriftserver"
-
-
-  def pre_upgrade_restart(self, env, upgrade_type=None):
-    import params
-
-    env.set_params(params)
-    if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
-      conf_select.select(params.stack_name, "spark", params.version)
-      stack_select.select("spark-thriftserver", params.version)
-
-  def install(self, env):
-    self.install_packages(env)
-    import params
-    env.set_params(params)
-    self.configure(env)
-
-  def stop(self, env, upgrade_type=None):
-    import params
-
-    env.set_params(params)
-    self.configure(env)
-    daemon_cmd = format('{spark_thrift_server_stop}')
-    if params.security_enabled:
-      Execute(daemon_cmd,
-              user=params.hive_user,
-              environment={'JAVA_HOME': params.java_home}
-              )
-    else:
-      Execute(daemon_cmd,
-              user=params.spark_user,
-              environment={'JAVA_HOME': params.java_home}
-      )
-    if os.path.isfile(params.spark_thrift_server_pid_file):
-      os.remove(params.spark_thrift_server_pid_file)
-
-
-  def start(self, env, upgrade_type=None):
-    import params
-
-    env.set_params(params)
-    # TODO this looks wrong, maybe just call spark(env)
-    self.configure(env)
-
-    if params.security_enabled:
-      hive_kerberos_keytab = params.config['configurations']['hive-site']['hive.metastore.kerberos.keytab.file']
-      hive_principal = params.config['configurations']['hive-site']['hive.metastore.kerberos.principal'].replace('_HOST', socket.getfqdn().lower())
-      hive_kinit_cmd = format("{kinit_path_local} -kt {hive_kerberos_keytab} {hive_principal}; ")
-      Execute(hive_kinit_cmd, user=params.hive_user)
-
-    # FIXME! TODO! remove this after soft link bug is fixed:
-    #if not os.path.islink('/usr/iop/current/spark'):
-    #  iop_version = get_iop_version()
-    #  cmd = 'ln -s /usr/iop/' + iop_version + '/spark /usr/iop/current/spark'
-    #  Execute(cmd)
-
-    daemon_cmd = format('{spark_thrift_server_start}')
-    no_op_test = format(
-      'ls {spark_thrift_server_pid_file} >/dev/null 2>&1 && ps -p `cat {spark_thrift_server_pid_file}` >/dev/null 2>&1')
-    if (upgrade_type is not None and params.upgrade_direction == Direction.UPGRADE) or params.security_enabled:
-      Execute(daemon_cmd,
-              user=params.hive_user,
-              environment={'JAVA_HOME': params.java_home},
-              not_if=no_op_test
-              )
-    else:
-      Execute(daemon_cmd,
-              user=params.spark_user,
-              environment={'JAVA_HOME': params.java_home},
-              not_if=no_op_test
-      )
-
-  def status(self, env):
-    import status_params
-
-    env.set_params(status_params)
-    pid_file = format("{spark_thrift_server_pid_file}")
-    # Recursively check all existing gmetad pid files
-    check_process_status(pid_file)
-
-  # Note: This function is not called from start()/install()
-  def configure(self, env):
-    import params
-
-    env.set_params(params)
-    spark(env)
-
-if __name__ == "__main__":
-  ThriftServer().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/33c279cf/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/metainfo.xml b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/metainfo.xml
index 7b6bae1..819ffd4 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/metainfo.xml
@@ -83,7 +83,7 @@
             </dependency>
           </dependencies>
           <commandScript>
-            <script>scripts/thrift_server.py</script>
+            <script>scripts/spark_thrift_server.py</script>
             <scriptType>PYTHON</scriptType>
             <timeout>600</timeout>
           </commandScript>

http://git-wip-us.apache.org/repos/asf/ambari/blob/33c279cf/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/spark_thrift_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/spark_thrift_server.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/spark_thrift_server.py
new file mode 100755
index 0000000..a0226b5
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/spark_thrift_server.py
@@ -0,0 +1,119 @@
+#!/usr/bin/python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+import sys
+import socket
+import os
+from resource_management import *
+from resource_management.libraries.functions import stack_select
+from resource_management.libraries.functions import conf_select
+from resource_management.core.exceptions import ComponentIsNotRunning
+from resource_management.core.logger import Logger
+from resource_management.core import shell
+from resource_management.libraries.functions import Direction
+from spark import *
+
+
+class ThriftServer(Script):
+
+  def get_component_name(self):
+    return "spark-thriftserver"
+
+
+  def pre_upgrade_restart(self, env, upgrade_type=None):
+    import params
+
+    env.set_params(params)
+    if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
+      conf_select.select(params.stack_name, "spark", params.version)
+      stack_select.select("spark-thriftserver", params.version)
+
+  def install(self, env):
+    self.install_packages(env)
+    import params
+    env.set_params(params)
+    self.configure(env)
+
+  def stop(self, env, upgrade_type=None):
+    import params
+
+    env.set_params(params)
+    self.configure(env)
+    daemon_cmd = format('{spark_thrift_server_stop}')
+    Execute(daemon_cmd,
+            user=params.hive_user,
+            environment={'JAVA_HOME': params.java_home}
+    )
+    if os.path.isfile(params.spark_thrift_server_pid_file):
+      os.remove(params.spark_thrift_server_pid_file)
+
+
+  def start(self, env, upgrade_type=None):
+    import params
+
+    env.set_params(params)
+    # TODO this looks wrong, maybe just call spark(env)
+    self.configure(env)
+
+    if params.security_enabled:
+        hive_kerberos_keytab = params.config['configurations']['hive-site']['hive.metastore.kerberos.keytab.file']
+        hive_principal = params.config['configurations']['hive-site']['hive.metastore.kerberos.principal'].replace('_HOST', socket.getfqdn().lower())
+        hive_kinit_cmd = format("{kinit_path_local} -kt {hive_kerberos_keytab} {hive_principal}; ")
+        Execute(hive_kinit_cmd, user=params.hive_user)
+
+    # FIXME! TODO! remove this after soft link bug is fixed:
+    #if not os.path.islink('/usr/iop/current/spark'):
+    #  iop_version = get_iop_version()
+    #  cmd = 'ln -s /usr/iop/' + iop_version + '/spark /usr/iop/current/spark'
+    #  Execute(cmd)
+
+    daemon_cmd = format('{spark_thrift_server_start} --conf spark.ui.port={params.spark_thriftserver_ui_port}')
+    no_op_test = format(
+      'ls {spark_thrift_server_pid_file} >/dev/null 2>&1 && ps -p `cat {spark_thrift_server_pid_file}` >/dev/null 2>&1')
+    if upgrade_type is not None and params.upgrade_direction == Direction.DOWNGRADE and not params.security_enabled:
+      Execute(daemon_cmd,
+              user=params.spark_user,
+              environment={'JAVA_HOME': params.java_home},
+              not_if=no_op_test
+              )
+    else:
+      Execute(daemon_cmd,
+              user=params.hive_user,
+              environment={'JAVA_HOME': params.java_home},
+              not_if=no_op_test
+      )
+
+  def status(self, env):
+    import status_params
+
+    env.set_params(status_params)
+    pid_file = format("{spark_thrift_server_pid_file}")
+    # Recursively check all existing gmetad pid files
+    check_process_status(pid_file)
+
+  # Note: This function is not called from start()/install()
+  def configure(self, env):
+    import params
+
+    env.set_params(params)
+    spark(env)
+
+if __name__ == "__main__":
+  ThriftServer().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/33c279cf/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/thrift_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/thrift_server.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/thrift_server.py
deleted file mode 100755
index d1a8b67..0000000
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/thrift_server.py
+++ /dev/null
@@ -1,119 +0,0 @@
-#!/usr/bin/python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-import sys
-import socket
-import os
-from resource_management import *
-from resource_management.libraries.functions import stack_select
-from resource_management.libraries.functions import conf_select
-from resource_management.core.exceptions import ComponentIsNotRunning
-from resource_management.core.logger import Logger
-from resource_management.core import shell
-from resource_management.libraries.functions import Direction
-from spark import *
-
-
-class ThriftServer(Script):
-
-  def get_component_name(self):
-    return "spark-thriftserver"
-  
-
-  def pre_upgrade_restart(self, env, upgrade_type=None):
-    import params
-
-    env.set_params(params)
-    if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
-      conf_select.select(params.stack_name, "spark", params.version)
-      stack_select.select("spark-thriftserver", params.version)
-
-  def install(self, env):
-    self.install_packages(env)
-    import params
-    env.set_params(params)
-    self.configure(env)
-
-  def stop(self, env, upgrade_type=None):
-    import params
-
-    env.set_params(params)
-    self.configure(env)
-    daemon_cmd = format('{spark_thrift_server_stop}')
-    Execute(daemon_cmd,
-            user=params.hive_user,
-            environment={'JAVA_HOME': params.java_home}
-    )
-    if os.path.isfile(params.spark_thrift_server_pid_file):
-      os.remove(params.spark_thrift_server_pid_file)
-
-
-  def start(self, env, upgrade_type=None):
-    import params
-
-    env.set_params(params)
-    # TODO this looks wrong, maybe just call spark(env)
-    self.configure(env)
-
-    if params.security_enabled:
-        hive_kerberos_keytab = params.config['configurations']['hive-site']['hive.metastore.kerberos.keytab.file']
-        hive_principal = params.config['configurations']['hive-site']['hive.metastore.kerberos.principal'].replace('_HOST', socket.getfqdn().lower())
-        hive_kinit_cmd = format("{kinit_path_local} -kt {hive_kerberos_keytab} {hive_principal}; ")
-        Execute(hive_kinit_cmd, user=params.hive_user)
-
-    # FIXME! TODO! remove this after soft link bug is fixed:
-    #if not os.path.islink('/usr/iop/current/spark'):
-    #  iop_version = get_iop_version()
-    #  cmd = 'ln -s /usr/iop/' + iop_version + '/spark /usr/iop/current/spark'
-    #  Execute(cmd)
-
-    daemon_cmd = format('{spark_thrift_server_start} --conf spark.ui.port={params.spark_thriftserver_ui_port}')
-    no_op_test = format(
-      'ls {spark_thrift_server_pid_file} >/dev/null 2>&1 && ps -p `cat {spark_thrift_server_pid_file}` >/dev/null 2>&1')
-    if upgrade_type is not None and params.upgrade_direction == Direction.DOWNGRADE and not params.security_enabled:
-      Execute(daemon_cmd,
-              user=params.spark_user,
-              environment={'JAVA_HOME': params.java_home},
-              not_if=no_op_test
-              )
-    else:
-      Execute(daemon_cmd,
-              user=params.hive_user,
-              environment={'JAVA_HOME': params.java_home},
-              not_if=no_op_test
-      )
-
-  def status(self, env):
-    import status_params
-
-    env.set_params(status_params)
-    pid_file = format("{spark_thrift_server_pid_file}")
-    # Recursively check all existing gmetad pid files
-    check_process_status(pid_file)
-
-  # Note: This function is not called from start()/install()
-  def configure(self, env):
-    import params
-
-    env.set_params(params)
-    spark(env)
-
-if __name__ == "__main__":
-  ThriftServer().execute()