You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by ao...@apache.org on 2015/09/27 00:16:06 UTC

[3/3] ambari git commit: AMBARI-13252. RU: Spark service check failed multiple times - non root server, agent U14 (aonishuk)

AMBARI-13252. RU: Spark service check failed multiple times - non root server, agent U14 (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/5eba9cfa
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/5eba9cfa
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/5eba9cfa

Branch: refs/heads/branch-2.1.2
Commit: 5eba9cfae57287a199b843623df49451ee9ec57f
Parents: e2e9a65
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Sun Sep 27 01:15:55 2015 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Sun Sep 27 01:15:55 2015 +0300

----------------------------------------------------------------------
 .../SPARK/1.2.0.2.2/package/scripts/setup_spark.py               | 4 +++-
 .../src/test/python/stacks/2.2/SPARK/test_job_history_server.py  | 4 ++++
 .../src/test/python/stacks/2.2/SPARK/test_spark_client.py        | 4 ++++
 .../src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py | 2 ++
 4 files changed, 13 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/5eba9cfa/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/setup_spark.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/setup_spark.py b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/setup_spark.py
index 1044e6b..9969a9b 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/setup_spark.py
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/setup_spark.py
@@ -47,7 +47,9 @@ def setup_spark(env, type, action = None):
     
   PropertiesFile(format("{spark_conf}/spark-defaults.conf"),
     properties = params.config['configurations']['spark-defaults'],
-    key_value_delimiter = " ",               
+    key_value_delimiter = " ", 
+    owner=params.spark_user,
+    group=params.spark_group,              
   )
 
   # create spark-env.sh in etc/conf dir

http://git-wip-us.apache.org/repos/asf/ambari/blob/5eba9cfa/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py b/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py
index 9cf1bd1..cb03a76 100644
--- a/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py
+++ b/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py
@@ -188,7 +188,9 @@ class TestJobHistoryServer(RMFTestCase):
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
     )
     self.assertResourceCalled('PropertiesFile', '/usr/hdp/current/spark-client/conf/spark-defaults.conf',
+        owner = 'spark',
         key_value_delimiter = ' ',
+        group = 'spark',
         properties = self.getConfig()['configurations']['spark-defaults'],
     )
     self.assertResourceCalled('File', '/usr/hdp/current/spark-client/conf/spark-env.sh',
@@ -251,7 +253,9 @@ class TestJobHistoryServer(RMFTestCase):
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
     )
     self.assertResourceCalled('PropertiesFile', '/usr/hdp/current/spark-client/conf/spark-defaults.conf',
+        owner = 'spark',
         key_value_delimiter = ' ',
+        group = 'spark',
         properties = self.getConfig()['configurations']['spark-defaults'],
     )
     self.assertResourceCalled('File', '/usr/hdp/current/spark-client/conf/spark-env.sh',

http://git-wip-us.apache.org/repos/asf/ambari/blob/5eba9cfa/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_client.py b/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_client.py
index deec6d6..043d3bd 100644
--- a/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_client.py
+++ b/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_client.py
@@ -60,7 +60,9 @@ class TestSparkClient(RMFTestCase):
         recursive = True,
     )
     self.assertResourceCalled('PropertiesFile', '/usr/hdp/current/spark-client/conf/spark-defaults.conf',
+        owner = 'spark',
         key_value_delimiter = ' ',
+        group = 'spark',
         properties = self.getConfig()['configurations']['spark-defaults'],
     )
     self.assertResourceCalled('File', '/usr/hdp/current/spark-client/conf/spark-env.sh',
@@ -96,7 +98,9 @@ class TestSparkClient(RMFTestCase):
         recursive = True,
     )
     self.assertResourceCalled('PropertiesFile', '/usr/hdp/current/spark-client/conf/spark-defaults.conf',
+        owner = 'spark',
         key_value_delimiter = ' ',
+        group = 'spark',
         properties = self.getConfig()['configurations']['spark-defaults'],
     )
     self.assertResourceCalled('File', '/usr/hdp/current/spark-client/conf/spark-env.sh',

http://git-wip-us.apache.org/repos/asf/ambari/blob/5eba9cfa/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py b/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py
index a0b80f0..f5f12f2 100644
--- a/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py
+++ b/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py
@@ -116,7 +116,9 @@ class TestSparkThriftServer(RMFTestCase):
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
     )
     self.assertResourceCalled('PropertiesFile', '/usr/hdp/current/spark-client/conf/spark-defaults.conf',
+        owner = 'spark',
         key_value_delimiter = ' ',
+        group = 'spark',
         properties = self.getConfig()['configurations']['spark-defaults'],
     )
     self.assertResourceCalled('File', '/usr/hdp/current/spark-client/conf/spark-env.sh',