You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@bigtop.apache.org by rv...@apache.org on 2013/03/28 22:14:41 UTC

[4/5] git commit: BIGTOP-893. migrate puppet code to use init-hdfs.sh

BIGTOP-893. migrate puppet code to use init-hdfs.sh


Project: http://git-wip-us.apache.org/repos/asf/bigtop/repo
Commit: http://git-wip-us.apache.org/repos/asf/bigtop/commit/5b640023
Tree: http://git-wip-us.apache.org/repos/asf/bigtop/tree/5b640023
Diff: http://git-wip-us.apache.org/repos/asf/bigtop/diff/5b640023

Branch: refs/heads/master
Commit: 5b64002342a1df10927d4a6fdeb42f9c956e68ad
Parents: 4d3290d
Author: Roman Shaposhnik <rv...@cloudera.com>
Authored: Thu Mar 28 12:49:35 2013 -0700
Committer: Roman Shaposhnik <rv...@cloudera.com>
Committed: Thu Mar 28 12:49:35 2013 -0700

----------------------------------------------------------------------
 bigtop-deploy/puppet/manifests/cluster.pp      |   49 +++++--------------
 bigtop-packages/src/common/hadoop/init-hdfs.sh |    3 +
 2 files changed, 16 insertions(+), 36 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/bigtop/blob/5b640023/bigtop-deploy/puppet/manifests/cluster.pp
----------------------------------------------------------------------
diff --git a/bigtop-deploy/puppet/manifests/cluster.pp b/bigtop-deploy/puppet/manifests/cluster.pp
index 041a37a..de1b8bf 100644
--- a/bigtop-deploy/puppet/manifests/cluster.pp
+++ b/bigtop-deploy/puppet/manifests/cluster.pp
@@ -232,48 +232,25 @@ class hadoop_head_node inherits hadoop_cluster_node {
         kerberos_realm => $kerberos_realm, 
   }
 
-  hadoop::create_hdfs_dirs { [ "/tmp", "/var", "/var/log", "/hbase", "/benchmarks", "/user", "/user/history", "/user/jenkins", "/user/hive", "/user/root", "/user/hue", "/user/oozie" ]:
-    auth           => $hadoop_security_authentication,
-    hdfs_dirs_meta => { "/tmp"          => { perm =>"1777", user => "hdfs"   },
-                        "/var"          => { perm => "755", user => "hdfs"   },
-                        "/var/log"      => { perm =>"1775", user => "yarn:mapred" },
-                        "/hbase"        => { perm => "755", user => "hbase"  },
-                        "/benchmarks"   => { perm => "777", user => "hdfs"   },
-                        "/user"         => { perm => "755", user => "hdfs"   },
-                        "/user/history" => { perm => "775", user => "mapred" },
-                        "/user/jenkins" => { perm => "777", user => "jenkins"},
-                        "/user/hive"    => { perm => "777", user => "hive"   },
-                        "/user/root"    => { perm => "777", user => "root"   },
-                        "/user/hue"     => { perm => "777", user => "hue"    },
-                        "/user/oozie"   => { perm => "777", user => "oozie"  },
-                      },
-  }
-  Hadoop::Create_hdfs_dirs<||> -> Hadoop-hbase::Master<||>
-  Hadoop::Create_hdfs_dirs<||> -> Hadoop::Resourcemanager<||>
-  Hadoop::Create_hdfs_dirs<||> -> Hadoop::Historyserver<||>
-  Hadoop::Create_hdfs_dirs<||> -> Hadoop::Httpfs<||>
-  Hadoop::Create_hdfs_dirs<||> -> Hadoop::Rsync_hdfs<||>
-
-  hadoop::rsync_hdfs { [ "/user/oozie/share/lib/hive",
-                         "/user/oozie/share/lib/mapreduce-streaming",
-                         "/user/oozie/share/lib/distcp",
-                         "/user/oozie/share/lib/pig",
-                         "/user/oozie/share/lib/sqoop" ]:
-    auth           => $hadoop_security_authentication,
-    files          => {  "/user/oozie/share/lib/hive"                 => "/usr/lib/hive/lib/*.jar",
-                         "/user/oozie/share/lib/mapreduce-streaming"  => "/usr/lib/hadoop-mapreduce/hadoop-streaming*.jar",
-                         "/user/oozie/share/lib/distcp"               => "/usr/lib/hadoop-mapreduce/hadoop-distcp*.jar",
-                         "/user/oozie/share/lib/pig"                  => "/usr/lib/pig/{lib/,}*.jar",
-                         "/user/oozie/share/lib/sqoop"                => "/usr/lib/sqoop/{lib/,}*.jar" },
-  }
-  Hadoop::Rsync_hdfs<||> -> Hadoop-oozie::Server<||>
-
   solr::server { "solrcloud server":
        collections => $solrcloud_collections,
        port        => $solrcloud_port,
        port_admin  => $solrcloud_port_admin,
        zk          => $solrcloud_zk,
   }
+
+  exec { "init hdfs":
+        path    => ['/bin','/sbin','/usr/bin','/usr/sbin'],
+        command => 'bash -x /usr/lib/hadoop/libexec/init-hdfs.sh',
+        require => Package['hadoop-hdfs']
+  }
+
+  Exec<| title == "init hdfs" |> -> Hadoop-hbase::Master<||>
+  Exec<| title == "init hdfs" |> -> Hadoop::Resourcemanager<||>
+  Exec<| title == "init hdfs" |> -> Hadoop::Historyserver<||>
+  Exec<| title == "init hdfs" |> -> Hadoop::Httpfs<||>
+  Exec<| title == "init hdfs" |> -> Hadoop::Rsync_hdfs<||>
+  Exec<| title == "init hdfs" |> -> Hadoop-oozie::Server<||>
 }
 
 class standby_head_node inherits hadoop_cluster_node {

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5b640023/bigtop-packages/src/common/hadoop/init-hdfs.sh
----------------------------------------------------------------------
diff --git a/bigtop-packages/src/common/hadoop/init-hdfs.sh b/bigtop-packages/src/common/hadoop/init-hdfs.sh
index de43564..80cb212 100755
--- a/bigtop-packages/src/common/hadoop/init-hdfs.sh
+++ b/bigtop-packages/src/common/hadoop/init-hdfs.sh
@@ -26,6 +26,9 @@ su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -mkdir /var'
 su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -mkdir /var/log'
 su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -chmod -R 1775 /var/log'
 su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -chown yarn:mapred /var/log'
+su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -mkdir -p /var/log/hadoop-yarn/apps'
+su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -chmod -R 1777 /var/log/hadoop-yarn/apps'
+su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -chown yarn:mapred /var/log/hadoop-yarn/apps'
 su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -mkdir /hbase'
 su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -chown hbase /hbase'
 su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -mkdir /benchmarks'