You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by nc...@apache.org on 2015/09/30 16:45:45 UTC

[25/50] [abbrv] ambari git commit: AMBARI-13252. RU: Spark service check failed multiple times - non root server, agent U14 (aonishuk)

AMBARI-13252. RU: Spark service check failed multiple times - non root server, agent U14 (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/62b7fe87
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/62b7fe87
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/62b7fe87

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 62b7fe87fce2f4d052a358f156aa19ad5c260b8d
Parents: bb1491f
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Sun Sep 27 01:15:50 2015 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Sun Sep 27 01:15:50 2015 +0300

----------------------------------------------------------------------
 .../SPARK/1.2.0.2.2/package/scripts/setup_spark.py               | 4 +++-
 .../src/test/python/stacks/2.2/SPARK/test_job_history_server.py  | 4 ++++
 .../src/test/python/stacks/2.2/SPARK/test_spark_client.py        | 4 ++++
 .../src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py | 2 ++
 4 files changed, 13 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/62b7fe87/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/setup_spark.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/setup_spark.py b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/setup_spark.py
index 1044e6b..9969a9b 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/setup_spark.py
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/setup_spark.py
@@ -47,7 +47,9 @@ def setup_spark(env, type, action = None):
     
   PropertiesFile(format("{spark_conf}/spark-defaults.conf"),
     properties = params.config['configurations']['spark-defaults'],
-    key_value_delimiter = " ",               
+    key_value_delimiter = " ", 
+    owner=params.spark_user,
+    group=params.spark_group,              
   )
 
   # create spark-env.sh in etc/conf dir

http://git-wip-us.apache.org/repos/asf/ambari/blob/62b7fe87/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py b/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py
index 4b87531..369fdf9 100644
--- a/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py
+++ b/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py
@@ -195,7 +195,9 @@ class TestJobHistoryServer(RMFTestCase):
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
     )
     self.assertResourceCalled('PropertiesFile', '/usr/hdp/current/spark-client/conf/spark-defaults.conf',
+        owner = 'spark',
         key_value_delimiter = ' ',
+        group = 'spark',
         properties = self.getConfig()['configurations']['spark-defaults'],
     )
     self.assertResourceCalled('File', '/usr/hdp/current/spark-client/conf/spark-env.sh',
@@ -260,7 +262,9 @@ class TestJobHistoryServer(RMFTestCase):
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
     )
     self.assertResourceCalled('PropertiesFile', '/usr/hdp/current/spark-client/conf/spark-defaults.conf',
+        owner = 'spark',
         key_value_delimiter = ' ',
+        group = 'spark',
         properties = self.getConfig()['configurations']['spark-defaults'],
     )
     self.assertResourceCalled('File', '/usr/hdp/current/spark-client/conf/spark-env.sh',

http://git-wip-us.apache.org/repos/asf/ambari/blob/62b7fe87/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_client.py b/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_client.py
index 081db57..a414dda 100644
--- a/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_client.py
+++ b/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_client.py
@@ -63,7 +63,9 @@ class TestSparkClient(RMFTestCase):
         recursive = True,
     )
     self.assertResourceCalled('PropertiesFile', '/usr/hdp/current/spark-client/conf/spark-defaults.conf',
+        owner = 'spark',
         key_value_delimiter = ' ',
+        group = 'spark',
         properties = self.getConfig()['configurations']['spark-defaults'],
     )
     self.assertResourceCalled('File', '/usr/hdp/current/spark-client/conf/spark-env.sh',
@@ -99,7 +101,9 @@ class TestSparkClient(RMFTestCase):
         recursive = True,
     )
     self.assertResourceCalled('PropertiesFile', '/usr/hdp/current/spark-client/conf/spark-defaults.conf',
+        owner = 'spark',
         key_value_delimiter = ' ',
+        group = 'spark',
         properties = self.getConfig()['configurations']['spark-defaults'],
     )
     self.assertResourceCalled('File', '/usr/hdp/current/spark-client/conf/spark-env.sh',

http://git-wip-us.apache.org/repos/asf/ambari/blob/62b7fe87/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py b/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py
index 031e0ac..9e41e11 100644
--- a/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py
+++ b/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py
@@ -118,7 +118,9 @@ class TestSparkThriftServer(RMFTestCase):
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
     )
     self.assertResourceCalled('PropertiesFile', '/usr/hdp/current/spark-client/conf/spark-defaults.conf',
+        owner = 'spark',
         key_value_delimiter = ' ',
+        group = 'spark',
         properties = self.getConfig()['configurations']['spark-defaults'],
     )
     self.assertResourceCalled('File', '/usr/hdp/current/spark-client/conf/spark-env.sh',