You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@ambari.apache.org by "JiaLiangC (via GitHub)" <gi...@apache.org> on 2023/05/25 02:48:10 UTC

[GitHub] [ambari] JiaLiangC commented on a diff in pull request #3695: AMBARI-25932: fix wrong config file name in spark service advisor

JiaLiangC commented on code in PR #3695:
URL: https://github.com/apache/ambari/pull/3695#discussion_r1204937179


##########
ambari-server/src/main/resources/stacks/BIGTOP/3.2.0/services/SPARK/service_advisor.py:
##########
@@ -163,115 +163,100 @@ def isKerberosEnabled(services, configurations):
     :rtype: bool
     :return: True or False
     """
-    if configurations and "spark2-defaults" in configurations and \
-            "spark.history.kerberos.enabled" in configurations["spark2-defaults"]["properties"]:
-      return configurations["spark2-defaults"]["properties"]["spark.history.kerberos.enabled"].lower() == "true"
-    elif services and "spark2-defaults" in services["configurations"] and \
-            "spark.history.kerberos.enabled" in services["configurations"]["spark2-defaults"]["properties"]:
-      return services["configurations"]["spark2-defaults"]["properties"]["spark.history.kerberos.enabled"].lower() == "true"
+    if configurations and "spark-defaults" in configurations and \
+            "spark.history.kerberos.enabled" in configurations["spark-defaults"]["properties"]:
+      return configurations["spark-defaults"]["properties"]["spark.history.kerberos.enabled"].lower() == "true"
+    elif services and "spark-defaults" in services["configurations"] and \
+            "spark.history.kerberos.enabled" in services["configurations"]["spark-defaults"]["properties"]:
+      return services["configurations"]["spark-defaults"]["properties"]["spark.history.kerberos.enabled"].lower() == "true"
     else:
       return False
 
 
-class Spark2Recommender(service_advisor.ServiceAdvisor):
+class SparkRecommender(service_advisor.ServiceAdvisor):
   """
-  Spark2 Recommender suggests properties when adding the service for the first time or modifying configs via the UI.
+  Spark Recommender suggests properties when adding the service for the first time or modifying configs via the UI.
   """
 
   def __init__(self, *args, **kwargs):
-    self.as_super = super(Spark2Recommender, self)
+    self.as_super = super(SparkRecommender, self)
     self.as_super.__init__(*args, **kwargs)
 
-  def recommendSpark2ConfigurationsFromHDP25(self, configurations, clusterData, services, hosts):
+  def recommendSparkConfigurationsFromHDP25(self, configurations, clusterData, services, hosts):
     """
     :type configurations dict
     :type clusterData dict
     :type services dict
     :type hosts dict
     """
-    putSparkProperty = self.putProperty(configurations, "spark2-defaults", services)
-    putSparkThriftSparkConf = self.putProperty(configurations, "spark2-thrift-sparkconf", services)
-
-    spark_queue = self.recommendYarnQueue(services, "spark2-defaults", "spark.yarn.queue")
+    putSparkProperty = self.putProperty(configurations, "spark-defaults", services)
+    spark_queue = self.recommendYarnQueue(services, "spark-defaults", "spark.yarn.queue")
     if spark_queue is not None:
       putSparkProperty("spark.yarn.queue", spark_queue)
 
-    spark_thrift_queue = self.recommendYarnQueue(services, "spark2-thrift-sparkconf", "spark.yarn.queue")
-    if spark_thrift_queue is not None:
-      putSparkThriftSparkConf("spark.yarn.queue", spark_thrift_queue)
-
-
-  def recommendSPARK2ConfigurationsFromHDP26(self, configurations, clusterData, services, hosts):
+  def recommendSPARKConfigurationsFromHDP26(self, configurations, clusterData, services, hosts):
     """
     :type configurations dict
     :type clusterData dict
     :type services dict
     :type hosts dict
     """
 
-    if Spark2ServiceAdvisor.isKerberosEnabled(services, configurations):
+    if SparkServiceAdvisor.isKerberosEnabled(services, configurations):
 
-      spark2_defaults = self.getServicesSiteProperties(services, "spark2-defaults")
+      spark_defaults = self.getServicesSiteProperties(services, "spark-defaults")
 
-      if spark2_defaults:
-        putSpark2DafaultsProperty = self.putProperty(configurations, "spark2-defaults", services)
-        putSpark2DafaultsProperty('spark.acls.enable', 'true')
-        putSpark2DafaultsProperty('spark.admin.acls', '')
-        putSpark2DafaultsProperty('spark.history.ui.acls.enable', 'true')
-        putSpark2DafaultsProperty('spark.history.ui.admin.acls', '')
+      if spark_defaults:
+        putSparkDafaultsProperty = self.putProperty(configurations, "spark-defaults", services)
+        putSparkDafaultsProperty('spark.acls.enable', 'true')
+        putSparkDafaultsProperty('spark.admin.acls', '')
+        putSparkDafaultsProperty('spark.history.ui.acls.enable', 'true')
+        putSparkDafaultsProperty('spark.history.ui.admin.acls', '')
 
 
-    self.__addZeppelinToLivy2SuperUsers(configurations, services)
 
-
-  def recommendSPARK2ConfigurationsFromHDP30(self, configurations, clusterData, services, hosts):
+  def recommendSPARKConfigurationsFromHDP30(self, configurations, clusterData, services, hosts):
 
     # SAC
-    if "spark2-atlas-application-properties-override" in services["configurations"]:
-      spark2_atlas_application_properties_override = self.getServicesSiteProperties(services, "spark2-atlas-application-properties-override")
-      spark2_defaults_properties = self.getServicesSiteProperties(services, "spark2-defaults")
-      spark2_thriftspark_conf_properties = self.getServicesSiteProperties(services, "spark2-thrift-sparkconf")
-      putSpark2DefautlsProperty = self.putProperty(configurations, "spark2-defaults", services)
-      putSpark2DefaultsPropertyAttribute = self.putPropertyAttribute(configurations,"spark2-defaults")
-      putSpark2ThriftSparkConfProperty = self.putProperty(configurations, "spark2-thrift-sparkconf", services)
-      putSpark2AtlasHookProperty = self.putProperty(configurations, "spark2-atlas-application-properties-override", services)
-      putSpark2AtlasHookPropertyAttribute = self.putPropertyAttribute(configurations,"spark2-atlas-application-properties-override")
-      spark2_sac_enabled = None
-      if self.checkSiteProperties(spark2_atlas_application_properties_override, "atlas.spark.enabled"):
-        spark2_sac_enabled = spark2_atlas_application_properties_override["atlas.spark.enabled"]
-        spark2_sac_enabled = str(spark2_sac_enabled).upper() == 'TRUE'
-
-      if spark2_sac_enabled:
-
-        self.setOrAddValueToProperty(putSpark2DefautlsProperty, spark2_defaults_properties, "spark.driver.extraClassPath", "/usr/hdp/current/spark-atlas-connector/*", ":")
-        self.setOrAddValueToProperty(putSpark2DefautlsProperty, spark2_defaults_properties, "spark.yarn.dist.files", "/etc/spark2/conf/atlas-application.properties.yarn#atlas-application.properties", ",")
-        self.setOrAddValueToProperty(putSpark2ThriftSparkConfProperty, spark2_thriftspark_conf_properties, "spark.driver.extraClassPath", "/usr/hdp/current/spark-atlas-connector/*", ":")
-
-        self.setOrAddValueToProperty(putSpark2DefautlsProperty, spark2_defaults_properties, "spark.extraListeners", "com.hortonworks.spark.atlas.SparkAtlasEventTracker", ",")
-        self.setOrAddValueToProperty(putSpark2DefautlsProperty, spark2_defaults_properties, "spark.sql.queryExecutionListeners", "com.hortonworks.spark.atlas.SparkAtlasEventTracker", ",")
-        self.setOrAddValueToProperty(putSpark2ThriftSparkConfProperty, spark2_thriftspark_conf_properties, "spark.extraListeners", "com.hortonworks.spark.atlas.SparkAtlasEventTracker", ",")
-        self.setOrAddValueToProperty(putSpark2ThriftSparkConfProperty, spark2_thriftspark_conf_properties, "spark.sql.queryExecutionListeners", "com.hortonworks.spark.atlas.SparkAtlasEventTracker", ",")
-
-        self.setOrAddValueToProperty(putSpark2DefautlsProperty, spark2_defaults_properties, "spark.sql.streaming.streamingQueryListeners", "com.hortonworks.spark.atlas.SparkAtlasStreamingQueryEventTracker", ",")
-        self.setOrAddValueToProperty(putSpark2ThriftSparkConfProperty, spark2_thriftspark_conf_properties, "spark.sql.streaming.streamingQueryListeners", "com.hortonworks.spark.atlas.SparkAtlasStreamingQueryEventTracker", ",")
-
-        putSpark2AtlasHookProperty("atlas.client.checkModelInStart", "false")
+    if "spark-atlas-application-properties-override" in services["configurations"]:
+      spark_atlas_application_properties_override = self.getServicesSiteProperties(services, "spark-atlas-application-properties-override")
+      spark_defaults_properties = self.getServicesSiteProperties(services, "spark-defaults")
+      putSparkDefautlsProperty = self.putProperty(configurations, "spark-defaults", services)
+      putSparkDefaultsPropertyAttribute = self.putPropertyAttribute(configurations,"spark-defaults")
+      putSparkAtlasHookProperty = self.putProperty(configurations, "spark-atlas-application-properties-override", services)
+      putSparkAtlasHookPropertyAttribute = self.putPropertyAttribute(configurations,"spark-atlas-application-properties-override")
+      spark_sac_enabled = None
+      if self.checkSiteProperties(spark_atlas_application_properties_override, "atlas.spark.enabled"):
+        spark_sac_enabled = spark_atlas_application_properties_override["atlas.spark.enabled"]
+        spark_sac_enabled = str(spark_sac_enabled).upper() == 'TRUE'
+
+      if spark_sac_enabled:
+        self.setOrAddValueToProperty(putSparkDefautlsProperty, spark_defaults_properties, "spark.driver.extraClassPath", "/usr/bigtop/current/spark-atlas-connector/*", ":")
+        self.setOrAddValueToProperty(putSparkDefautlsProperty, spark_defaults_properties, "spark.yarn.dist.files", "/etc/spark/conf/atlas-application.properties.yarn#atlas-application.properties", ",")
+        self.setOrAddValueToProperty(putSparkDefautlsProperty, spark_defaults_properties, "spark.driver.extraClassPath", "/usr/bigtop/current/spark-atlas-connector/*", ":")
+        self.setOrAddValueToProperty(putSparkDefautlsProperty, spark_defaults_properties, "spark.extraListeners", "com.hortonworks.spark.atlas.SparkAtlasEventTracker", ",")
+        self.setOrAddValueToProperty(putSparkDefautlsProperty, spark_defaults_properties, "spark.sql.queryExecutionListeners", "com.hortonworks.spark.atlas.SparkAtlasEventTracker", ",")
+        self.setOrAddValueToProperty(putSparkDefautlsProperty, spark_defaults_properties, "spark.extraListeners", "com.hortonworks.spark.atlas.SparkAtlasEventTracker", ",")
+        self.setOrAddValueToProperty(putSparkDefautlsProperty, spark_defaults_properties, "spark.sql.queryExecutionListeners", "com.hortonworks.spark.atlas.SparkAtlasEventTracker", ",")
+
+        self.setOrAddValueToProperty(putSparkDefautlsProperty, spark_defaults_properties, "spark.sql.streaming.streamingQueryListeners", "com.hortonworks.spark.atlas.SparkAtlasStreamingQueryEventTracker", ",")
+        self.setOrAddValueToProperty(putSparkDefautlsProperty, spark_defaults_properties, "spark.sql.streaming.streamingQueryListeners", "com.hortonworks.spark.atlas.SparkAtlasStreamingQueryEventTracker", ",")

Review Comment:
   We can remove it for now since we haven't integrated Atlas yet. The function "recommendSPARKConfigurationsFromHDP30" is mainly used to set properties related to Spark SAC and Atlas. Therefore, we can remove this function first and add the testing advisor related to it later when we have integrated the relevant services.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: dev-unsubscribe@ambari.apache.org

For queries about this service, please contact Infrastructure at:
users@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: dev-unsubscribe@ambari.apache.org
For additional commands, e-mail: dev-help@ambari.apache.org