You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by nc...@apache.org on 2016/02/02 13:40:51 UTC
[06/11] ambari git commit: AMBARI-14850: Removed unused functions
from params.py in HAWQ (bhuvnesh2703 via jaoki)
AMBARI-14850: Removed unused functions from params.py in HAWQ (bhuvnesh2703 via jaoki)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/6ce15655
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/6ce15655
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/6ce15655
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 6ce156554600214339c7e8c35219abaf570346e9
Parents: 9a03489
Author: Jun Aoki <ja...@apache.org>
Authored: Mon Feb 1 17:19:59 2016 -0800
Committer: Jun Aoki <ja...@apache.org>
Committed: Mon Feb 1 17:19:59 2016 -0800
----------------------------------------------------------------------
.../HAWQ/2.0.0/package/scripts/params.py | 21 --------------------
1 file changed, 21 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/6ce15655/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py
index 604ddc0..add3c63 100644
--- a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py
@@ -22,14 +22,7 @@ from hawq_constants import PXF_PORT, pxf_hdfs_test_dir
from resource_management import Script
from resource_management.libraries.functions.default import default
from resource_management.libraries.resources.hdfs_resource import HdfsResource
-from resource_management.libraries.resources.execute_hadoop import ExecuteHadoop
from resource_management.libraries.functions import get_kinit_path
-from resource_management.libraries.functions import conf_select
-try:
- from resource_management.libraries.functions import hdp_select as hadoop_select
-except ImportError:
- from resource_management.libraries.functions import phd_select as hadoop_select
-
config = Script.get_config()
@@ -70,9 +63,6 @@ security_enabled = config['configurations']['cluster-env']['security_enabled']
hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
-hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
-hadoop_bin_dir = hadoop_select.get_hadoop_dir("bin")
-execute_path = os.environ['PATH'] + os.pathsep + hadoop_bin_dir
dfs_nameservice = default('/configurations/hdfs-site/dfs.nameservices', None)
# HDFSResource partial function
@@ -85,17 +75,6 @@ HdfsResource = functools.partial(HdfsResource,
hdfs_site=hdfs_site,
default_fs=default_fs)
-# ExecuteHadoop partial function
-ExecuteHadoop = functools.partial(ExecuteHadoop,
- user=hdfs_superuser,
- logoutput=True,
- conf_dir=hadoop_conf_dir,
- security_enabled=security_enabled,
- kinit_path_local=kinit_path_local,
- keytab=hdfs_user_keytab,
- principal=hdfs_principal_name,
- bin_dir=execute_path)
-
# For service Check
is_pxf_installed = __get_component_host("pxf_hosts") is not None