You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hawq.apache.org by wl...@apache.org on 2016/09/01 01:37:29 UTC

incubator-hawq git commit: HAWQ-845. Parameterize kerberos principal name for HAWQ

Repository: incubator-hawq
Updated Branches:
  refs/heads/master a16030888 -> 030491b89


HAWQ-845. Parameterize kerberos principal name for HAWQ


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/030491b8
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/030491b8
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/030491b8

Branch: refs/heads/master
Commit: 030491b896875e574a2563d77c92fbfd1503d5bf
Parents: a160308
Author: Wen Lin <wl...@pivotal.io>
Authored: Wed Aug 31 14:10:51 2016 +0800
Committer: Wen Lin <wl...@pivotal.io>
Committed: Wed Aug 31 14:10:51 2016 +0800

----------------------------------------------------------------------
 .../resourcebroker_LIBYARN_proc.c               | 18 +++++++-----
 src/bin/gpcheckhdfs/gpcheckhdfs.c               | 30 +++++++++++++-------
 tools/bin/gpcheck                               |  3 +-
 tools/bin/hawq_ctl                              |  9 ++++--
 tools/bin/hawqfilespace                         |  7 ++++-
 5 files changed, 45 insertions(+), 22 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/030491b8/src/backend/resourcemanager/resourcebroker/resourcebroker_LIBYARN_proc.c
----------------------------------------------------------------------
diff --git a/src/backend/resourcemanager/resourcebroker/resourcebroker_LIBYARN_proc.c b/src/backend/resourcemanager/resourcebroker/resourcebroker_LIBYARN_proc.c
index ee85d2c..9ea8510 100644
--- a/src/backend/resourcemanager/resourcebroker/resourcebroker_LIBYARN_proc.c
+++ b/src/backend/resourcemanager/resourcebroker/resourcebroker_LIBYARN_proc.c
@@ -488,6 +488,10 @@ int  loadParameters(void)
 	/* If kerberos is enable, fetch the principal from ticket cache file. */
 	if (enable_secure_filesystem)
 	{
+		if (!login())
+		{
+			elog(WARNING, "Resource broker failed to refresh kerberos ticket.");
+		}
 		YARNUser = ExtractPrincipalFromTicketCache(krb5_ccname);
 		YARNUserShouldFree = true;
 	}
@@ -503,13 +507,13 @@ int  loadParameters(void)
 			  "Scheduler server %s:%s "
 			  "Queue %s Application name %s, "
 			  "by user:%s",
-		      YARNServer.Str,
-		      YARNPort.Str,
-		      YARNSchedulerServer.Str,
-		      YARNSchedulerPort.Str,
-		      YARNQueueName.Str,
-		      YARNAppName.Str,
-		      YARNUser);
+			  YARNServer.Str,
+			  YARNPort.Str,
+			  YARNSchedulerServer.Str,
+			  YARNSchedulerPort.Str,
+			  YARNQueueName.Str,
+			  YARNAppName.Str,
+			  YARNUser);
 exit:
 	if ( res != FUNC_RETURN_OK ) {
 		elog(WARNING, "YARN mode resource broker failed to load YARN connection arguments.");

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/030491b8/src/bin/gpcheckhdfs/gpcheckhdfs.c
----------------------------------------------------------------------
diff --git a/src/bin/gpcheckhdfs/gpcheckhdfs.c b/src/bin/gpcheckhdfs/gpcheckhdfs.c
index 935b076..685e79c 100644
--- a/src/bin/gpcheckhdfs/gpcheckhdfs.c
+++ b/src/bin/gpcheckhdfs/gpcheckhdfs.c
@@ -63,7 +63,9 @@ void getHostAndPort(const char * dfs_url, char * host, char * port);
 /*
  * test whether hdfs can be connected while kerberos is on or not
  */
-int testHdfsConnect(hdfsFS * fs, const char * host, int port, const char * krbstatus, const char * krb_keytabfile);
+int testHdfsConnect(hdfsFS * fs, const char * host, int port,
+        const char * krbstatus, const char * krb_srvname,
+        const char * krb_keytabfile);
 
 /*
  * test whether the filepath which dfs_url defined in hdfs is existed or not.
@@ -81,25 +83,31 @@ int main(int argc, char * argv[]) {
     *  argv[1]:dfs_name
     *  argv[2]:dfs_url
     *  argv[3]:krb status
-    *  argv[4]:krb keytab file
+    *  argv[4]:krb service name
+    *  argv[5]:krb keytab file
     */
-    if (argc < 3 || argc > 5 || (argc == 4 && 0 != strcasecmp(argv[3],"off") && 0 != strcasecmp(argv[3],"false"))) {
-        fprintf(stderr, "ERROR: gpcheckhdfs parameter error, Please check your config file\n"
-            "\tDFS_NAME and DFS_URL are required, KERBEROS_KEYFILE and ENABLE_SECURE_FILESYSTEM are optional\n");
+    if (argc < 3 || argc > 6
+            || ((argc == 4 || argc == 5) && 0 != strcasecmp(argv[3], "off") && 0 != strcasecmp(argv[3], "false"))) {
+        fprintf(stderr,
+                "ERROR: gpcheckhdfs parameter error, Please check your config file\n"
+                        "\tDFS_NAME and DFS_URL are required, KERBEROS_SERVICENAME, KERBEROS_KEYFILE and "
+                        "ENABLE_SECURE_FILESYSTEM are optional\n");
         return GPCHKHDFS_ERR;
     } 
 
     char * dfs_name = argv[1];
     char * dfs_url = argv[2];
     char * krbstatus = NULL;
+    char * krb_srvname = NULL;
     char * krb_keytabfile = NULL;
 
     if (argc >= 4) {
         krbstatus = argv[3];
     }
 
-    if (argc >= 5) {
-        krb_keytabfile = argv[4];
+    if (argc >= 6) {
+        krb_srvname = argv[4];
+        krb_keytabfile = argv[5];
     }
 
     char * host = (char *)malloc(255 * sizeof(char));
@@ -113,7 +121,8 @@ int main(int argc, char * argv[]) {
     }
 
     hdfsFS fs;
-    int connErrCode = testHdfsConnect(&fs, host, iPort, krbstatus, krb_keytabfile);
+    int connErrCode = testHdfsConnect(&fs, host, iPort, krbstatus, krb_srvname,
+            krb_keytabfile);
 
     if (connErrCode) {
         return connErrCode;
@@ -189,7 +198,9 @@ int testHdfsOperateFile(hdfsFS fs, const char * filepath, const char * dfscomple
     return 0;
 }
 
-int testHdfsConnect(hdfsFS * fsptr, const char * host, int iPort, const char * krbstatus, const char * krb_keytabfile) {
+int testHdfsConnect(hdfsFS * fsptr, const char * host, int iPort,
+        const char * krbstatus, const char * krb_srvname,
+        const char * krb_keytabfile) {
     struct hdfsBuilder * builder = hdfsNewBuilder();
     hdfsBuilderSetNameNode(builder, host);
 
@@ -198,7 +209,6 @@ int testHdfsConnect(hdfsFS * fsptr, const char * host, int iPort, const char * k
 
     if (NULL != krbstatus && NULL != krb_keytabfile &&
             (!strcasecmp(krbstatus, "on") || !strcasecmp(krbstatus, "true"))) {   //Kerberos if On
-        char * krb_srvname = "postgres";
         char * krb5_ccname = "/tmp/postgres.ccname";
         char cmd[1024];
         snprintf(cmd, sizeof(cmd), "kinit -k -t %s -c %s %s",

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/030491b8/tools/bin/gpcheck
----------------------------------------------------------------------
diff --git a/tools/bin/gpcheck b/tools/bin/gpcheck
index e9da85a..3d45f18 100755
--- a/tools/bin/gpcheck
+++ b/tools/bin/gpcheck
@@ -976,9 +976,8 @@ def testHDFSConfig(host):
                 checkFailed(host.hostname, "YARN configuration: %s not defined" % item)
 
     # Check yarn kerberos properties
-    #yarn_kerberos_check_list = ['hadoop.proxyuser.yarn.groups', 'hadoop.proxyuser.yarn.hosts', 'hadoop.proxyuser.postgres.hosts', 'hadoop.proxyuser.postgres.groups']
     if yarn_enabled and options.kerberos:
-        yarn_kerberos_check_list = ['yarn.nodemanager.keytab', 'yarn.nodemanager.principal','hadoop.proxyuser.postgres.groups', \
+        yarn_kerberos_check_list = ['yarn.nodemanager.keytab', 'yarn.nodemanager.principal', \
                                     'yarn.resourcemanager.keytab', 'yarn.resourcemanager.principal']
         for item in yarn_kerberos_check_list:
             if item in actual_config:

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/030491b8/tools/bin/hawq_ctl
----------------------------------------------------------------------
diff --git a/tools/bin/hawq_ctl b/tools/bin/hawq_ctl
index bcfadc7..81296d7 100755
--- a/tools/bin/hawq_ctl
+++ b/tools/bin/hawq_ctl
@@ -128,6 +128,11 @@ class HawqInit:
         else:
             self.krb_server_keyfile = ''
 
+        if 'krb_srvname' in self.hawq_dict:
+            self.krb_srvname = self.hawq_dict['krb_srvname']
+        else:
+            self.krb_srvname = 'postgres'
+    
     def _write_config(self):
         configFile = "%s/etc/_mgmt_config" % self.GPHOME
         # Clean configFile while the first write in.
@@ -165,8 +170,8 @@ class HawqInit:
             local_ssh(cmd, logger)
 
     def check_hdfs_path(self):
-        cmd = "%s/bin/gpcheckhdfs hdfs %s %s %s" % \
-              (self.GPHOME, self.dfs_url, self.enable_secure_filesystem, self.krb_server_keyfile)
+        cmd = "%s/bin/gpcheckhdfs hdfs %s %s %s %s" % \
+              (self.GPHOME, self.dfs_url, self.enable_secure_filesystem, self.krb_srvname, self.krb_server_keyfile)
         logger.info("Check if hdfs path is available")
         logger.debug("Check hdfs: %s" % cmd)
         check_return_code(local_ssh(cmd, logger, warning = True), logger, "Check hdfs failed, please verify your hdfs settings")

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/030491b8/tools/bin/hawqfilespace
----------------------------------------------------------------------
diff --git a/tools/bin/hawqfilespace b/tools/bin/hawqfilespace
index dd85aff..61dd423 100755
--- a/tools/bin/hawqfilespace
+++ b/tools/bin/hawqfilespace
@@ -261,6 +261,11 @@ def getdir(prompt, hosts=[], primary=None, shared=False, fsysn=None, db=None):
                 rows = cursor.fetchall()
                 krbstatus = rows[0][0]
 
+                cursor = dbconn.execSQL(db, "show krb_srvname")
+                db.commit()
+                rows = cursor.fetchall();
+                krb_srvname = rows[0][0]
+
                 cursor = dbconn.execSQL(db, "show krb_server_keyfile")
                 db.commit()
                 rows = cursor.fetchall();
@@ -272,7 +277,7 @@ def getdir(prompt, hosts=[], primary=None, shared=False, fsysn=None, db=None):
             dfs_name = fsysn
             dfs_url = value
 
-            command = "gpcheckhdfs %s %s %s %s" % (dfs_name, dfs_url, krbstatus, krb_keyfile)
+            command = "gpcheckhdfs %s %s %s %s %s" % (dfs_name, dfs_url, krbstatus, krb_srvname, krb_keyfile)
             (status,returnCode) = commands.getstatusoutput(command)
             if status != 0:
                 print command