You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@bigtop.apache.org by rv...@apache.org on 2011/11/01 22:53:09 UTC
svn commit: r1196308 [2/6] - in /incubator/bigtop/branches/hadoop-0.23: ./
bigtop-deploy/ bigtop-deploy/puppet/ bigtop-deploy/puppet/manifests/
bigtop-deploy/puppet/modules/ bigtop-deploy/puppet/modules/hadoop-flume/
bigtop-deploy/puppet/modules/hadoop...
Added: incubator/bigtop/branches/hadoop-0.23/bigtop-deploy/puppet/modules/hadoop/manifests/init.pp
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-deploy/puppet/modules/hadoop/manifests/init.pp?rev=1196308&view=auto
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-deploy/puppet/modules/hadoop/manifests/init.pp (added)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-deploy/puppet/modules/hadoop/manifests/init.pp Tue Nov 1 21:53:04 2011
@@ -0,0 +1,242 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+class hadoop {
+
+ /**
+ * Common definitions for hadoop nodes.
+ * They all need these files so we can access hdfs/jobs from any node
+ */
+ class common {
+ file {
+ "/etc/hadoop/conf/core-site.xml":
+ content => template('hadoop/core-site.xml'),
+ }
+
+ file {
+ "/etc/hadoop/conf/mapred-site.xml":
+ content => template('hadoop/mapred-site.xml'),
+ }
+
+ file {
+ "/etc/hadoop/conf/hdfs-site.xml":
+ content => template('hadoop/hdfs-site.xml'),
+ }
+
+ file {
+ "/etc/hadoop/conf/hadoop-env.sh":
+ content => template('hadoop/hadoop-env.sh'),
+ }
+
+ file {
+ "/etc/default/hadoop":
+ content => template('hadoop/hadoop'),
+ }
+
+ package { "hadoop":
+ ensure => latest,
+ }
+
+ package { "hadoop-native":
+ ensure => latest,
+ require => [Package["hadoop"], Yumrepo["Bigtop"]],
+ }
+ }
+
+
+ define datanode ($namenode_host, $namenode_port, $port = "50075", $auth = "simple", $dirs = ["/tmp/data"]) {
+
+ $hadoop_namenode_host = $namenode_host
+ $hadoop_namenode_port = $namenode_port
+ $hadoop_datanode_port = $port
+ $hadoop_security_authentication = $auth
+
+ include common
+
+ package { "hadoop-datanode":
+ ensure => latest,
+ require => Package["jdk"],
+ }
+
+ if ($hadoop_security_authentication == "kerberos") {
+ package { "hadoop-sbin":
+ ensure => latest,
+ require => [Package["hadoop"], Yumrepo["Bigtop"]],
+ }
+ }
+
+ service { "hadoop-datanode":
+ ensure => running,
+ hasstatus => true,
+ subscribe => [Package["hadoop-datanode"], File["/etc/hadoop/conf/core-site.xml"], File["/etc/hadoop/conf/hdfs-site.xml"], File["/etc/hadoop/conf/hadoop-env.sh"]],
+ require => [ Package["hadoop-datanode"] ],
+ } -> file { $dirs:
+ ensure => directory,
+ owner => hdfs,
+ group => hdfs,
+ mode => 755,
+ require => [Package["hadoop"]],
+ }
+ }
+
+ define create_hdfs_dirs($hdfs_dirs_meta) {
+ $user = $hdfs_dirs_meta[$title][user]
+ $perm = $hdfs_dirs_meta[$title][perm]
+
+ exec { "HDFS init $title":
+ user => "hdfs",
+ command => "/bin/bash -c 'hadoop fs -mkdir $title && hadoop fs -chmod $perm $title && hadoop fs -chown $user $title'",
+ unless => "/bin/bash -c 'hadoop fs -ls $name >/dev/null 2>&1'",
+ require => [ Service["hadoop-namenode"], Exec["namenode format"] ],
+ }
+ }
+
+ define namenode ($jobtracker_host, $jobtracker_port, $host = $fqdn , $port = "8020", $thrift_port= "10090", $auth = "simple", $dirs = ["/tmp/nn"]) {
+
+ $hadoop_namenode_host = $host
+ $hadoop_namenode_port = $port
+ $hadoop_namenode_thrift_port = $thrift_port
+ $hadoop_jobtracker_host = $jobtracker_host
+ $hadoop_jobtracker_port = $jobtracker_port
+ $hadoop_security_authentication = $auth
+
+ include common
+
+ package { "hadoop-namenode":
+ ensure => latest,
+ require => Package["jdk"],
+ }
+
+ service { "hadoop-namenode":
+ ensure => running,
+ hasstatus => true,
+ subscribe => [Package["hadoop-namenode"], File["/etc/hadoop/conf/core-site.xml"], File["/etc/hadoop/conf/hadoop-env.sh"]],
+ require => [Package["hadoop-namenode"], Exec["namenode format"]],
+ } -> file { $dirs:
+ ensure => directory,
+ owner => hdfs,
+ group => hdfs,
+ mode => 700,
+ }
+
+ exec { "namenode format":
+ user => "hdfs",
+ command => "/bin/bash -c 'yes Y | hadoop namenode -format'",
+ creates => inline_template("<%= hadoop_storage_locations.split(';')[0] %>/namenode/image"),
+ require => [Package["hadoop-namenode"]],
+ }
+ }
+
+
+ define jobtracker ($namenode_host, $namenode_port, $host = $fqdn, $port = "8021", $thrift_port = "9290", $auth = "simple", $dirs = ["/tmp/mr"]) {
+
+ $hadoop_namenode_host = $namenode_host
+ $hadoop_namenode_port = $namenode_port
+ $hadoop_jobtracker_thrift_port = $thrift_port
+ $hadoop_jobtracker_host = $host
+ $hadoop_jobtracker_port = $port
+ $hadoop_security_authentication = $auth
+
+ include common
+
+ package { "hadoop-jobtracker":
+ ensure => latest,
+ require => Package["jdk"],
+ }
+
+ service { "hadoop-jobtracker":
+ ensure => running,
+ hasstatus => true,
+ subscribe => [Package["hadoop-jobtracker"], File["/etc/hadoop/conf/core-site.xml"], File["/etc/hadoop/conf/mapred-site.xml"], File["/etc/hadoop/conf/hadoop-env.sh"]],
+ require => [ Package["hadoop-jobtracker"] ]
+ } -> file { $dirs:
+ ensure => directory,
+ owner => mapred,
+ group => mapred,
+ mode => 755,
+ require => [Package["hadoop"]],
+ }
+ }
+
+
+ define tasktracker ($namenode_host, $namenode_port, $jobtracker_host, $jobtracker_port, $auth = "simple", $dirs = ["/tmp/mr"]){
+
+ $hadoop_namenode_host = $namenode_host
+ $hadoop_namenode_port = $namenode_port
+ $hadoop_jobtracker_host = $jobtracker_host
+ $hadoop_jobtracker_port = $jobtracker_port
+ $hadoop_security_authentication = $auth
+
+ include common
+
+ package { "hadoop-tasktracker":
+ ensure => latest,
+ require => Package["jdk"],
+ }
+
+ file { "/etc/hadoop/conf/taskcontroller.cfg":
+ content => template('hadoop/taskcontroller.cfg'),
+ }
+
+ service { "hadoop-tasktracker":
+ ensure => running,
+ hasstatus => true,
+ subscribe => [Package["hadoop-tasktracker"], File["/etc/hadoop/conf/core-site.xml"], File["/etc/hadoop/conf/mapred-site.xml"], File["/etc/hadoop/conf/hadoop-env.sh"]],
+ require => [ Package["hadoop-tasktracker"], File["/etc/hadoop/conf/taskcontroller.cfg"] ],
+ } -> file { $dirs:
+ ensure => directory,
+ owner => mapred,
+ group => mapred,
+ mode => 755,
+ require => [Package["hadoop"]],
+ }
+ }
+
+
+ define secondarynamenode ($namenode_host, $namenode_port, $port = "50090", $auth = "simple") {
+
+ $hadoop_secondarynamenode_port = $port
+ $hadoop_security_authentication = $auth
+ include common
+
+ package { "hadoop-secondarynamenode":
+ ensure => latest,
+ require => Package["jdk"],
+ }
+
+ service { "hadoop-secondarynamenode":
+ ensure => running,
+ hasstatus => true,
+ subscribe => [Package["hadoop-secondarynamenode"], File["/etc/hadoop/conf/core-site.xml"], File["/etc/hadoop/conf/hadoop-env.sh"]],
+ require => [Package["hadoop-secondarynamenode"]],
+ }
+ }
+
+ define client ($namenode_host, $namenode_port, $jobtracker_host, $jobtracker_port, $auth = "simple") {
+ $hadoop_namenode_host = $namenode_host
+ $hadoop_namenode_port = $namenode_port
+ $hadoop_jobtracker_host = $jobtracker_host
+ $hadoop_jobtracker_port = $jobtracker_port
+ $hadoop_security_authentication = $auth
+
+ include common
+
+ package { ["hadoop-doc", "hadoop-source", "hadoop-debuginfo",
+ "hadoop-fuse", "hadoop-libhdfs", "hadoop-pipes"]:
+ ensure => latest,
+ require => [Package["jdk"], Package["hadoop"]],
+ }
+ }
+}
Added: incubator/bigtop/branches/hadoop-0.23/bigtop-deploy/puppet/modules/hadoop/templates/core-site.xml
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-deploy/puppet/modules/hadoop/templates/core-site.xml?rev=1196308&view=auto
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-deploy/puppet/modules/hadoop/templates/core-site.xml (added)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-deploy/puppet/modules/hadoop/templates/core-site.xml Tue Nov 1 21:53:04 2011
@@ -0,0 +1,89 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<!-- Licensed to the Apache Software Foundation (ASF) under one or more -->
+<!-- contributor license agreements. See the NOTICE file distributed with -->
+<!-- this work for additional information regarding copyright ownership. -->
+<!-- The ASF licenses this file to You under the Apache License, Version 2.0 -->
+<!-- (the "License"); you may not use this file except in compliance with -->
+<!-- the License. You may obtain a copy of the License at -->
+<!-- -->
+<!-- http://www.apache.org/licenses/LICENSE-2.0 -->
+<!-- -->
+<!-- Unless required by applicable law or agreed to in writing, software -->
+<!-- distributed under the License is distributed on an "AS IS" BASIS, -->
+<!-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -->
+<!-- See the License for the specific language governing permissions and -->
+<!-- limitations under the License. -->
+
+<configuration>
+
+ <property>
+ <!-- URI of NN. Fully qualified. No IP.-->
+ <name>fs.default.name</name>
+ <value>hdfs://<%= hadoop_namenode_host %>:<%= hadoop_namenode_port%></value>
+ </property>
+
+ <property>
+ <name>hadoop.security.authentication</name>
+ <value><%= hadoop_security_authentication %></value>
+ </property>
+
+<% if hadoop_security_authentication == "kerberos" %>
+ <property>
+ <name>hadoop.security.authorization</name>
+ <value>true</value>
+ </property>
+<% end %>
+
+<% if has_variable?("hadoop_security_group_mapping") %>
+ <property>
+ <name>hadoop.security.group.mapping</name>
+ <value><%= hadoop_security_group_mapping %></value>
+ </property>
+<% end %>
+
+<% if has_variable?("hadoop_core_proxyusers") %>
+<% hadoop_core_proxyusers.each do |superuser, data| %>
+ <property>
+ <name>hadoop.proxyuser.<%= superuser %>.hosts</name>
+ <value><%= data['hosts'] %></value>
+ </property>
+ <property>
+ <name>hadoop.proxyuser.<%= superuser %>.groups</name>
+ <value><%= data['groups'] %></value>
+ </property>
+<% end %>
+
+<% end %>
+ <property>
+ <name>dfs.permissions</name>
+ <value>false</value>
+ </property>
+
+<% if has_variable?("hadoop_snappy_codec") %>
+ <property>
+ <name>io.compression.codecs</name>
+ <value>org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.BZip2Codec,org.apache.hadoop.io.compress.SnappyCodec</value>
+ </property>
+ <!-- property>
+ <name>io.compression.codec.lzo.class</name>
+ <value>com.hadoop.compression.lzo.LzoCodec</value>
+ </property-->
+<% end %>
+
+<% if has_variable?("hadoop_config_fs_inmemory_size_mb") %>
+ <property>
+ <name>fs.inmemory.size.mb</name>
+ <value><%= hadoop_config_fs_inmemory_size_mb %></value>
+ </property>
+<% end %>
+
+<% if has_variable?("hadoop_config_io_file_buffer_size") %>
+ <property>
+ <name>io.file.buffer.size</name>
+ <value><%= hadoop_config_io_file_buffer_size %></value>
+ </property>
+<% end %>
+
+</configuration>
Copied: incubator/bigtop/branches/hadoop-0.23/bigtop-deploy/puppet/modules/hadoop/templates/hadoop (from r1196246, incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/pig/do-component-build)
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-deploy/puppet/modules/hadoop/templates/hadoop?p2=incubator/bigtop/branches/hadoop-0.23/bigtop-deploy/puppet/modules/hadoop/templates/hadoop&p1=incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/pig/do-component-build&r1=1196246&r2=1196308&rev=1196308&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/pig/do-component-build (original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-deploy/puppet/modules/hadoop/templates/hadoop Tue Nov 1 21:53:04 2011
@@ -1,4 +1,3 @@
-#!/bin/sh
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
@@ -12,10 +11,18 @@
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
-# limitations under the License.
-
-set -ex
-
-ant -Djavac.version=1.6 -Djava5.home=${JAVA5_HOME} -Dforrest.home=${FORREST_HOME} -Ddist.dir=debian/tmp -Dversion=${PIG_BASE_VERSION} package "$@"
-
-
+export HADOOP_HOME_WARN_SUPPRESS=true
+export HADOOP_HOME=/usr/lib/hadoop
+export HADOOP_PID_DIR=/var/run/hadoop
+export HADOOP_LOG_DIR=/var/log/hadoop
+export HADOOP_NAMENODE_USER=hdfs
+export HADOOP_SECONDARYNAMENODE_USER=hdfs
+export HADOOP_DATANODE_USER=hdfs
+<% if hadoop_security_authentication == "kerberos" %>
+export HADOOP_SECURE_DN_USER=hdfs
+export HADOOP_SECURE_DN_PID_DIR=$HADOOP_PID_DIR
+export HADOOP_SECURE_DN_LOG_DIR=$HADOOP_LOG_DIR
+<% end %>
+export HADOOP_JOBTRACKER_USER=mapred
+export HADOOP_TASKTRACKER_USER=mapred
+export HADOOP_IDENT_STRING=hadoop
Added: incubator/bigtop/branches/hadoop-0.23/bigtop-deploy/puppet/modules/hadoop/templates/hadoop-env.sh
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-deploy/puppet/modules/hadoop/templates/hadoop-env.sh?rev=1196308&view=auto
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-deploy/puppet/modules/hadoop/templates/hadoop-env.sh (added)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-deploy/puppet/modules/hadoop/templates/hadoop-env.sh Tue Nov 1 21:53:04 2011
@@ -0,0 +1,90 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+<% def shell_config(shell_var, *puppet_var)
+ puppet_var = puppet_var[0] || shell_var.downcase
+ if has_variable? puppet_var
+ return "export #{shell_var}=#{scope.lookupvar(puppet_var)}"
+ else
+ return "#export #{shell_var}="
+ end
+ end %>
+# WARNING: Heavy puppet machinery is involved managing this file,
+# your edits stand no chance
+#
+# Set Hadoop-specific environment variables here.
+
+# The only required environment variable is JAVA_HOME. All others are
+# optional. When running a distributed configuration it is best to
+# set JAVA_HOME in this file, so that it is correctly defined on
+# remote nodes.
+
+# The java implementation to use. Required.
+<%= shell_config("JAVA_HOME", "hadoop_java_home") %>
+
+# Extra Java CLASSPATH elements. Optional.
+<%= shell_config("HADOOP_CLASSPATH") %>
+
+# The maximum amount of heap to use, in MB. Default is 1000.
+<%= shell_config("HADOOP_HEAPSIZE") %>
+
+# Extra Java runtime options. Empty by default.
+<%= shell_config("HADOOP_OPTS") %>
+
+# Command specific options appended to HADOOP_OPTS when specified
+<%= shell_config("HADOOP_NAMENODE_OPTS") %>
+<%= shell_config("HADOOP_SECONDARYNAMENODE_OPTS") %>
+<%= shell_config("HADOOP_DATANODE_OPTS") %>
+<%= shell_config("HADOOP_BALANCER_OPTS") %>
+<%= shell_config("HADOOP_JOBTRACKER_OPTS") %>
+<%= shell_config("HADOOP_TASKTRACKER_OPTS") %>
+
+# The following applies to multiple commands (fs, dfs, fsck, distcp etc)
+<%= shell_config("HADOOP_CLIENT_OPTS") %>
+
+# Extra ssh options. Empty by default.
+<%= shell_config("HADOOP_SSH_OPTS") %>
+
+# Where log files are stored. $HADOOP_HOME/logs by default.
+<%= shell_config("HADOOP_LOG_DIR") %>
+
+# File naming remote slave hosts. $HADOOP_HOME/conf/slaves by default.
+<%= shell_config("HADOOP_SLAVES") %>
+
+# host:path where hadoop code should be rsync'd from. Unset by default.
+<%= shell_config("HADOOP_MASTER") %>
+
+# Seconds to sleep between slave commands. Unset by default. This
+# can be useful in large clusters, where, e.g., slave rsyncs can
+# otherwise arrive faster than the master can service them.
+<%= shell_config("HADOOP_SLAVE_SLEEP") %>
+
+# The directory where pid files are stored. /tmp by default.
+<%= shell_config("HADOOP_PID_DIR") %>
+
+# A string representing this instance of hadoop. $USER by default.
+<%= shell_config("HADOOP_IDENT_STRING") %>
+
+# The scheduling priority for daemon processes. See 'man nice'.
+<%= shell_config("HADOOP_NICENESS") %>
+
+### WARNING: the following is NOT really optional. It is a shame that stock Hadoop
+### hadoop_env.sh doesn't make it clear -- you can NOT turn com.sun.management.jmxremote off
+### and have a working Hadoop cluster.
+export HADOOP_NAMENODE_OPTS="-Dcom.sun.management.jmxremote $HADOOP_NAMENODE_OPTS"
+export HADOOP_SECONDARYNAMENODE_OPTS="-Dcom.sun.management.jmxremote $HADOOP_SECONDARYNAMENODE_OPTS"
+export HADOOP_DATANODE_OPTS="-Dcom.sun.management.jmxremote $HADOOP_DATANODE_OPTS"
+export HADOOP_BALANCER_OPTS="-Dcom.sun.management.jmxremote $HADOOP_BALANCER_OPTS"
+export HADOOP_JOBTRACKER_OPTS="-Dcom.sun.management.jmxremote $HADOOP_JOBTRACKER_OPTS"
Added: incubator/bigtop/branches/hadoop-0.23/bigtop-deploy/puppet/modules/hadoop/templates/hdfs-site.xml
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-deploy/puppet/modules/hadoop/templates/hdfs-site.xml?rev=1196308&view=auto
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-deploy/puppet/modules/hadoop/templates/hdfs-site.xml (added)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-deploy/puppet/modules/hadoop/templates/hdfs-site.xml Tue Nov 1 21:53:04 2011
@@ -0,0 +1,165 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<!-- Licensed to the Apache Software Foundation (ASF) under one or more -->
+<!-- contributor license agreements. See the NOTICE file distributed with -->
+<!-- this work for additional information regarding copyright ownership. -->
+<!-- The ASF licenses this file to You under the Apache License, Version 2.0 -->
+<!-- (the "License"); you may not use this file except in compliance with -->
+<!-- the License. You may obtain a copy of the License at -->
+<!-- -->
+<!-- http://www.apache.org/licenses/LICENSE-2.0 -->
+<!-- -->
+<!-- Unless required by applicable law or agreed to in writing, software -->
+<!-- distributed under the License is distributed on an "AS IS" BASIS, -->
+<!-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -->
+<!-- See the License for the specific language governing permissions and -->
+<!-- limitations under the License. -->
+
+<configuration>
+
+<% if hadoop_security_authentication == "kerberos" %>
+ <property>
+ <name>dfs.block.access.token.enable</name>
+ <value>true</value>
+ </property>
+
+ <!-- NameNode security config -->
+ <property>
+ <name>dfs.https.address</name>
+ <value><%= hadoop_namenode_host %>:50475</value>
+ </property>
+ <property>
+ <name>dfs.https.port</name>
+ <value>50475</value>
+ </property>
+ <property>
+ <name>dfs.namenode.keytab.file</name>
+ <value>/etc/hdfs.keytab</value> <!-- path to the HDFS keytab -->
+ </property>
+ <property>
+ <name>dfs.namenode.kerberos.principal</name>
+ <value>hdfs/_HOST@<%= kerberos_realm %></value>
+ </property>
+ <property>
+ <name>dfs.namenode.kerberos.https.principal</name>
+ <value>host/_HOST@<%= kerberos_realm %></value>
+ </property>
+
+ <!-- Secondary NameNode security config -->
+ <property>
+ <name>dfs.secondary.http.address</name>
+ <value><%= hadoop_namenode_host %>:0</value>
+ </property>
+ <property>
+ <name>dfs.secondary.https.address</name>
+ <value><%= hadoop_namenode_host %>:50495</value>
+ </property>
+ <property>
+ <name>dfs.secondary.https.port</name>
+ <value>50495</value>
+ </property>
+ <property>
+ <name>dfs.secondary.namenode.keytab.file</name>
+ <value>/etc/hdfs.keytab</value> <!-- path to the HDFS keytab -->
+ </property>
+ <property>
+ <name>dfs.secondary.namenode.kerberos.principal</name>
+ <value>hdfs/_HOST@<%= kerberos_realm %></value>
+ </property>
+ <property>
+ <name>dfs.secondary.namenode.kerberos.https.principal</name>
+ <value>host/_HOST@<%= kerberos_realm %></value>
+ </property>
+
+ <!-- DataNode security config -->
+ <property>
+ <name>dfs.datanode.data.dir.perm</name>
+ <value>700</value>
+ </property>
+ <property>
+ <name>dfs.datanode.address</name>
+ <value>0.0.0.0:1004</value>
+ </property>
+ <property>
+ <name>dfs.datanode.http.address</name>
+ <value>0.0.0.0:1006</value>
+ </property>
+ <property>
+ <name>dfs.datanode.keytab.file</name>
+ <value>/etc/hdfs.keytab</value> <!-- path to the HDFS keytab -->
+ </property>
+ <property>
+ <name>dfs.datanode.kerberos.principal</name>
+ <value>hdfs/_HOST@<%= kerberos_realm %></value>
+ </property>
+ <property>
+ <name>dfs.datanode.kerberos.https.principal</name>
+ <value>host/_HOST@<%= kerberos_realm %></value>
+ </property>
+<% end %>
+
+ <!-- name node -->
+ <property>
+ <!-- URI of NN. Fully qualified. No IP.-->
+ <name>fs.default.name</name>
+ <value>hdfs://<%= hadoop_namenode_host %>:<%= hadoop_namenode_port%></value>
+ </property>
+
+ <property>
+ <name>dfs.data.dir</name>
+ <value><% hadoop_storage_locations.split(";").each do |storage_location| %><%= storage_location%>/hdfs,<% end %></value>
+ </property>
+
+ <property>
+ <name>dfs.name.dir</name>
+ <value><% hadoop_storage_locations.split(";").each do |storage_location| %><%= storage_location%>/namenode,<% end %></value>
+ </property>
+
+ <!-- Enable Hue plugins -->
+<% if has_variable?("hadoop_dfs_namenode_plugins") %>
+ <property>
+ <name>dfs.namenode.plugins</name>
+ <value><%= hadoop_dfs_namenode_plugins %></value>
+ <description>Comma-separated list of namenode plug-ins to be activated.
+ </description>
+ </property>
+<% end %>
+<% if has_variable?("hadoop_dfs_datanode_plugins") %>
+ <property>
+ <name>dfs.datanode.plugins</name>
+ <value><%= hadoop_dfs_datanode_plugins %></value>
+ <description>Comma-separated list of datanode plug-ins to be activated.
+ </description>
+ </property>
+<% end %>
+<% if has_variable?("hadoop_namenode_thrift_port") %>
+ <property>
+ <name>dfs.thrift.address</name>
+ <value>0.0.0.0:<%= hadoop_namenode_thrift_port %></value>
+ </property>
+<% end %>
+
+ <!-- increase the number of datanode transceivers way above the default of 256
+ - this is for hbase -->
+ <property>
+ <name>dfs.datanode.max.xcievers</name>
+ <value>4096</value>
+ </property>
+
+ <!-- Configurations for large cluster -->
+<% if has_variable?("hadoop_config_dfs_block_size") %>
+ <property>
+ <name>dfs.block.size</name>
+ <value><%= hadoop_config_dfs_block_size %></value>
+ </property>
+<% end %>
+
+<% if has_variable?("hadoop_config_namenode_handler_count") %>
+ <property>
+ <name>dfs.namenode.handler.count</name>
+ <value><%= hadoop_config_namenode_handler_count %></value>
+ </property>
+<% end %>
+
+</configuration>
Added: incubator/bigtop/branches/hadoop-0.23/bigtop-deploy/puppet/modules/hadoop/templates/mapred-site.xml
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-deploy/puppet/modules/hadoop/templates/mapred-site.xml?rev=1196308&view=auto
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-deploy/puppet/modules/hadoop/templates/mapred-site.xml (added)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-deploy/puppet/modules/hadoop/templates/mapred-site.xml Tue Nov 1 21:53:04 2011
@@ -0,0 +1,290 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<!-- Licensed to the Apache Software Foundation (ASF) under one or more -->
+<!-- contributor license agreements. See the NOTICE file distributed with -->
+<!-- this work for additional information regarding copyright ownership. -->
+<!-- The ASF licenses this file to You under the Apache License, Version 2.0 -->
+<!-- (the "License"); you may not use this file except in compliance with -->
+<!-- the License. You may obtain a copy of the License at -->
+<!-- -->
+<!-- http://www.apache.org/licenses/LICENSE-2.0 -->
+<!-- -->
+<!-- Unless required by applicable law or agreed to in writing, software -->
+<!-- distributed under the License is distributed on an "AS IS" BASIS, -->
+<!-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -->
+<!-- See the License for the specific language governing permissions and -->
+<!-- limitations under the License. -->
+
+<configuration>
+
+<% if hadoop_security_authentication == "kerberos" %>
+ <!-- JobTracker security configs -->
+ <property>
+ <name>mapreduce.jobtracker.kerberos.principal</name>
+ <value>mapred/_HOST@<%= kerberos_realm %></value>
+ </property>
+ <property>
+ <name>mapreduce.jobtracker.kerberos.https.principal</name>
+ <value>host/_HOST@<%= kerberos_realm %></value>
+ </property>
+ <property>
+ <name>mapreduce.jobtracker.keytab.file</name>
+ <value>/etc/mapred.keytab</value> <!-- path to the MapReduce keytab -->
+ </property>
+
+ <!-- TaskTracker security configs -->
+ <property>
+ <name>mapreduce.tasktracker.kerberos.principal</name>
+ <value>mapred/_HOST@<%= kerberos_realm %></value>
+ </property>
+ <property>
+ <name>mapreduce.tasktracker.kerberos.https.principal</name>
+ <value>host/_HOST@<%= kerberos_realm %></value>
+ </property>
+ <property>
+ <name>mapreduce.tasktracker.keytab.file</name>
+ <value>/etc/mapred.keytab</value> <!-- path to the MapReduce keytab -->
+ </property>
+
+ <!-- TaskController settings -->
+ <property>
+ <name>mapred.task.tracker.task-controller</name>
+ <value>org.apache.hadoop.mapred.LinuxTaskController</value>
+ </property>
+ <property>
+ <name>mapreduce.tasktracker.group</name>
+ <value>mapred</value>
+ </property>
+<% end %>
+
+<% if has_variable?("mapred_acls_enabled") %>
+ <property>
+ <name>mapred.acls.enabled</name>
+ <value><%= mapred_acls_enabled %></value>
+ </property>
+<% end %>
+
+<!-- specify JobTracker TaskScheduler -->
+<% if has_variable?("hadoop_jobtracker_taskscheduler") %>
+ <property>
+ <name>mapred.jobtracker.taskScheduler</name>
+ <value><%= hadoop_jobtracker_taskscheduler %></value>
+ </property>
+<% end %>
+
+<% if has_variable?("hadoop_config_mapred_fairscheduler_assignmultiple") %>
+ <property>
+ <name>mapred.fairscheduler.assignmultiple</name>
+ <value><%= hadoop_config_mapred_fairscheduler_assignmultiple %></value>
+ </property>
+<% end %>
+
+<% if has_variable?("hadoop_config_mapred_fairscheduler_sizebasedweight") %>
+ <property>
+ <name>mapred.fairscheduler.sizebasedweight</name>
+ <value><%= hadoop_config_mapred_fairscheduler_assignmultiple %></value>
+ </property>
+<% end %>
+
+<% if has_variable?("hadoop_jobtracker_fairscheduler_weightadjuster") %>
+ <property>
+ <name>mapred.fairscheduler.weightadjuster</name>
+ <value><%= hadoop_jobtracker_fairscheduler_weightadjuster %></value>
+ </property>
+<% end %>
+
+ <property>
+ <name>mapred.job.tracker</name>
+ <value><%= hadoop_jobtracker_host %>:<%= hadoop_jobtracker_port%></value>
+ </property>
+
+ <property>
+ <name>mapred.local.dir</name>
+ <value><% hadoop_storage_locations.split(";").each do |storage_location| %><%= storage_location%>/scratch,<% end %></value>
+ <final>true</final>
+ </property>
+
+ <property>
+ <name>mapred.child.java.opts</name>
+ <value>-Xmx1024m</value>
+ </property>
+
+ <property>
+ <name>mapreduce.jobtracker.staging.root.dir</name>
+ <value>/user</value>
+ </property>
+
+ <property>
+ <name>mapred.system.dir</name>
+ <value>/mapred/system</value>
+ </property>
+
+<% if has_variable?("hadoop_config_mapred_child_ulimit") %>
+ <property>
+ <!-- set this to ~1.5x the heap size in mapred.child.java.opts -->
+ <name>mapred.child.ulimit</name>
+ <value><%= hadoop_config_mapred_child_ulimit %></value>
+ </property>
+<% else %>
+ <property>
+ <!-- set this to ~1.5x the heap size in mapred.child.java.opts -->
+ <name>mapred.child.ulimit</name>
+ <value>unlimited</value>
+ </property>
+<% end %>
+
+<% if has_variable?("hadoop_config_io_sort_mb") %>
+ <property>
+ <name>io.sort.mb</name>
+ <value><%= hadoop_config_io_sort_mb %></value>
+ </property>
+<% else %>
+ <property>
+ <name>io.sort.mb</name>
+ <value>256</value>
+ </property>
+<% end %>
+
+<% if has_variable?("hadoop_config_io_sort_factor") %>
+ <property>
+ <name>io.sort.factor</name>
+ <value><%= hadoop_config_io_sort_factor %></value>
+ </property>
+<% else %>
+ <property>
+ <name>io.sort.factor</name>
+ <value>64</value>
+ </property>
+<% end %>
+
+<% if has_variable?("hadoop_config_mapred_job_tracker_handler_count") %>
+ <property>
+ <name>mapred.job.tracker.handler.count</name>
+ <value><%= hadoop_config_mapred_job_tracker_handler_count %></value>
+ <final>true</final>
+ </property>
+<% else %>
+ <property>
+ <name>mapred.job.tracker.handler.count</name>
+ <value>10</value>
+ <final>true</final>
+ </property>
+<% end %>
+
+ <property>
+ <name>mapred.map.tasks.speculative.execution</name>
+ <value>true</value>
+ </property>
+
+<% if has_variable?("hadoop_config_mapred_reduce_parallel_copies") %>
+ <property>
+ <name>mapred.reduce.parallel.copies</name>
+ <!-- set this to somewhere between sqrt(nodes) and nodes/2.
+ for <20 nodes, set == |nodes| -->
+ <value><%= hadoop_config_mapred_reduce_parallel_copies %></value>
+ </property>
+<% else %>
+ <property>
+ <name>mapred.reduce.parallel.copies</name>
+ <!-- set this to somewhere between sqrt(nodes) and nodes/2.
+ for <20 nodes, set == |nodes| -->
+ <value>5</value>
+ </property>
+<% end %>
+
+ <property>
+ <name>mapred.reduce.tasks</name>
+ <!-- set to numnodes * mapred.tasktracker.reduce.tasks.maximum -->
+ <value>30</value>
+ </property>
+
+<% if has_variable?("hadoop_config_mapred_reduce_tasks_speculative_execution") %>
+ <property>
+ <name>mapred.reduce.tasks.speculative.execution</name>
+ <value><%= hadoop_config_mapred_reduce_tasks_speculative_execution %></value>
+ </property>
+<% else %>
+ <property>
+ <name>mapred.reduce.tasks.speculative.execution</name>
+ <value>false</value>
+ </property>
+<% end %>
+
+ <property>
+ <name>mapred.tasktracker.map.tasks.maximum</name>
+ <!-- see other kb entry about this one. -->
+ <value><%= [1, processorcount.to_i * 0.80].max.round %></value>
+ <final>true</final>
+ </property>
+
+ <property>
+ <name>mapred.tasktracker.reduce.tasks.maximum</name>
+ <!-- see other kb entry about this one. -->
+ <value><%= [1, processorcount.to_i * 0.20].max.round %></value>
+ <final>true</final>
+ </property>
+
+<% if has_variable?("hadoop_config_tasktracker_http_threads") %>
+ <property>
+ <name>tasktracker.http.threads</name>
+ <value><%= hadoop_config_tasktracker_http_threads %></value>
+ <final>true</final>
+ </property>
+<% else %>
+ <property>
+ <name>tasktracker.http.threads</name>
+ <value>60</value>
+ <final>true</final>
+ </property>
+<% end %>
+
+ <property>
+ <name>mapred.output.compression.type</name>
+ <value>BLOCK</value>
+ <description>If the job outputs are to compressed as
+ SequenceFiles, how should they be compressed? Should be one of
+ NONE, RECORD or BLOCK.</description>
+ </property>
+
+<% if has_variable?("hadoop_config_use_compression") %>
+ <property>
+ <name>mapred.compress.map.output</name>
+ <value><%= hadoop_config_use_compression %></value>
+ </property>
+<% else %>
+ <property>
+ <name>mapred.compress.map.output</name>
+ <value>false</value>
+ </property>
+<% end %>
+
+<% if has_variable?("hadoop_config_mapred_reduce_slowstart_completed_maps") %>
+ <property>
+ <name>mapred.reduce.slowstart.completed.maps</name>
+ <value><%= hadoop_config_mapred_reduce_slowstart_completed_maps %></value>
+ </property>
+<% end %>
+
+<% if has_variable?("hadoop_jobtracker_thrift_port") %>
+ <!-- Enable Hue plugins -->
+ <property>
+ <name>jobtracker.thrift.address</name>
+ <value>0.0.0.0:<%= hadoop_jobtracker_thrift_port %></value>
+ </property>
+<% end %>
+<% if has_variable?("hadoop_mapred_jobtracker_plugins") %>
+ <property>
+ <name>mapred.jobtracker.plugins</name>
+ <value><%= hadoop_mapred_jobtracker_plugins %></value>
+ <description>Comma-separated list of jobtracker plug-ins to be activated.</description>
+ </property>
+<% end %>
+<% if has_variable?("hadoop_mapred_tasktracker_plugins") %>
+ <property>
+ <name>mapred.tasktracker.instrumentation</name>
+ <value><%= hadoop_mapred_tasktracker_plugins %></value>
+ </property>
+<% end %>
+
+</configuration>
Added: incubator/bigtop/branches/hadoop-0.23/bigtop-deploy/puppet/modules/hadoop/templates/taskcontroller.cfg
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-deploy/puppet/modules/hadoop/templates/taskcontroller.cfg?rev=1196308&view=auto
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-deploy/puppet/modules/hadoop/templates/taskcontroller.cfg (added)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-deploy/puppet/modules/hadoop/templates/taskcontroller.cfg Tue Nov 1 21:53:04 2011
@@ -0,0 +1,5 @@
+mapred.local.dir=<% hadoop_storage_locations.split(";").each do |storage_location| %><%= storage_location%>/scratch,<% end %>
+hadoop.log.dir=/var/log/hadoop/
+mapreduce.tasktracker.group=mapred
+min.user.id=0
+banned.users=foo
Copied: incubator/bigtop/branches/hadoop-0.23/bigtop-deploy/puppet/modules/kerberos/lib/facter/kadm_keytab.rb (from r1196246, incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hive/do-component-build)
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-deploy/puppet/modules/kerberos/lib/facter/kadm_keytab.rb?p2=incubator/bigtop/branches/hadoop-0.23/bigtop-deploy/puppet/modules/kerberos/lib/facter/kadm_keytab.rb&p1=incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hive/do-component-build&r1=1196246&r2=1196308&rev=1196308&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hive/do-component-build (original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-deploy/puppet/modules/kerberos/lib/facter/kadm_keytab.rb Tue Nov 1 21:53:04 2011
@@ -1,4 +1,3 @@
-#!/bin/sh
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
@@ -14,6 +13,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-set -ex
-
-ant -f src/build.xml package "$@"
+require 'facter'
+Facter.add("kadm_keytab") do
+ setcode do
+ %x{[ -f /etc/kadm5.keytab ] && base64 </etc/kadm5.keytab 2>/dev/null} + "\n"
+ end
+end
Added: incubator/bigtop/branches/hadoop-0.23/bigtop-deploy/puppet/modules/kerberos/manifests/init.pp
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-deploy/puppet/modules/kerberos/manifests/init.pp?rev=1196308&view=auto
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-deploy/puppet/modules/kerberos/manifests/init.pp (added)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-deploy/puppet/modules/kerberos/manifests/init.pp Tue Nov 1 21:53:04 2011
@@ -0,0 +1,157 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+class kerberos {
+ class site {
+ # The following is our interface to the world. This is what we allow
+ # users to tweak from the outside (see tests/init.pp for a complete
+ # example) before instantiating target classes.
+ # Once we migrate to Puppet 2.6 we can potentially start using
+ # parametrized classes instead.
+ $domain = $kerberos_domain ? { '' => inline_template('<%= domain %>'),
+ default => $kerberos_domain }
+ $realm = $kerberos_realm ? { '' => inline_template('<%= domain.upcase %>'),
+ default => $kerberos_realm }
+ $kdc_server = $kerberos_kdc_server ? { '' => 'localhost',
+ default => $kerberos_kdc_server }
+ $kdc_port = $kerberos_kdc_port ? { '' => '88',
+ default => $kerberos_kdc_port }
+ $admin_port = 749 /* BUG: linux daemon packaging doesn't let us tweak this */
+
+ case $operatingsystem {
+ 'ubuntu': {
+ $package_name_kdc = 'krb5-kdc'
+ $service_name_kdc = 'krb5-kdc'
+ $package_name_admin = 'krb5-admin-server'
+ $service_name_admin = 'krb5-admin-server'
+ $package_name_client = 'krb5-user'
+ $exec_path = '/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin'
+ $kdc_etc_path = '/etc/krb5kdc/'
+ }
+ # default assumes CentOS, Redhat 5 series (just look at how random it all looks :-()
+ default: {
+ $package_name_kdc = 'krb5-server'
+ $service_name_kdc = 'krb5kdc'
+ $package_name_admin = 'krb5-libs'
+ $service_name_admin = 'kadmin'
+ $package_name_client = 'krb5-workstation'
+ $exec_path = '/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/kerberos/sbin:/usr/kerberos/bin'
+ $kdc_etc_path = '/var/kerberos/krb5kdc/'
+ }
+ }
+
+ file { "/etc/krb5.conf":
+ content => template('kerberos/krb5.conf'),
+ owner => "root",
+ group => "root",
+ mode => "0644",
+ }
+ }
+
+ class kdc inherits kerberos::site {
+ package { "$package_name_kdc":
+ ensure => installed,
+ }
+
+ file { "$kdc_etc_path":
+ ensure => directory,
+ owner => root,
+ group => root,
+ mode => "0700",
+ }
+ file { "${kdc_etc_path}/kdc.conf":
+ content => template('kerberos/kdc.conf'),
+ require => Package["$package_name_kdc"],
+ owner => "root",
+ group => "root",
+ mode => "0644",
+ }
+ file { "${kdc_etc_path}/kadm5.acl":
+ content => template('kerberos/kadm5.acl'),
+ require => Package["$package_name_kdc"],
+ owner => "root",
+ group => "root",
+ mode => "0644",
+ }
+
+ exec { "kdb5_util":
+ path => $exec_path,
+ command => "rm -f /etc/kadm5.keytab ; kdb5_util -P cthulhu -r ${realm} create -s && kadmin.local -q 'cpw -pw secure kadmin/admin'",
+
+ creates => "${kdc_etc_path}/stash",
+
+ subscribe => File["${kdc_etc_path}/kdc.conf"],
+ # refreshonly => true,
+
+ require => [Package["$package_name_kdc"], File["${kdc_etc_path}/kdc.conf"], File["/etc/krb5.conf"]],
+ }
+
+ service { "$service_name_kdc":
+ ensure => running,
+ require => [Package["$package_name_kdc"], File["${kdc_etc_path}/kdc.conf"], Exec["kdb5_util"]],
+ subscribe => File["${kdc_etc_path}/kdc.conf"],
+ hasrestart => true,
+ }
+
+
+ class admin_server inherits kerberos::kdc {
+ /* BUG: KITCHEN-751 */
+ $se_hack = "setsebool -P kadmind_disable_trans 1 ; setsebool -P krb5kdc_disable_trans 1"
+
+ package { "$package_name_admin":
+ ensure => installed,
+ require => Package["$package_name_kdc"],
+ }
+
+ service { "$service_name_admin":
+ ensure => running,
+ require => [Package["$package_name_admin"], Service["$service_name_kdc"]],
+ hasrestart => true,
+ restart => "${se_hack} ; service ${service_name_admin} restart",
+ start => "${se_hack} ; service ${service_name_admin} start",
+ }
+ }
+ }
+
+ class client inherits kerberos::site {
+ define create_princs {
+ exec { "addprinc.$title":
+ path => $kerberos::site::exec_path, # BUG: I really shouldn't need to do a FQVN here
+ command => "kadmin -w secure -p kadmin/admin -q 'addprinc -randkey $title/$fqdn'",
+ unless => "kadmin -w secure -p kadmin/admin -q listprincs | grep -q $title/$fqdn"
+ }
+ }
+
+ define host_keytab($fqdn = "$hostname.$domain", $princs_map) {
+ $princs = $princs_map[$title]
+ $keytab = "/etc/${title}.keytab"
+ $exports = inline_template("<%= princs.join('/$fqdn ') + '/$fqdn ' %>")
+
+ create_princs { $princs:
+ }
+
+ exec { "xst.$title":
+ path => $kerberos::site::exec_path, # BUG: I really shouldn't need to do a FQVN here
+ command => "kadmin -w secure -p kadmin/admin -q 'xst -k $keytab $exports' ; chown $title $keytab",
+ unless => "klist -kt $keytab 2>/dev/null | grep -q $title/$fqdn",
+ require => [ Create_princs[$princs] ],
+ }
+ }
+
+ package { "$package_name_client":
+ ensure => installed,
+ }
+ }
+}
Copied: incubator/bigtop/branches/hadoop-0.23/bigtop-deploy/puppet/modules/kerberos/templates/kadm5.acl (from r1196246, incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hive/do-component-build)
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-deploy/puppet/modules/kerberos/templates/kadm5.acl?p2=incubator/bigtop/branches/hadoop-0.23/bigtop-deploy/puppet/modules/kerberos/templates/kadm5.acl&p1=incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hive/do-component-build&r1=1196246&r2=1196308&rev=1196308&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hive/do-component-build (original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-deploy/puppet/modules/kerberos/templates/kadm5.acl Tue Nov 1 21:53:04 2011
@@ -1,4 +1,3 @@
-#!/bin/sh
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
@@ -14,6 +13,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-set -ex
-
-ant -f src/build.xml package "$@"
+# This file Is the access control list for krb5 administration.
+# When this file is edited run /etc/init.d/krb5-admin-server restart to activate
+# One common way to set up Kerberos administration is to allow any principal
+# ending in /admin is given full administrative rights.
+# To enable this, uncomment the following line:
+*/admin *
Added: incubator/bigtop/branches/hadoop-0.23/bigtop-deploy/puppet/modules/kerberos/templates/kdc.conf
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-deploy/puppet/modules/kerberos/templates/kdc.conf?rev=1196308&view=auto
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-deploy/puppet/modules/kerberos/templates/kdc.conf (added)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-deploy/puppet/modules/kerberos/templates/kdc.conf Tue Nov 1 21:53:04 2011
@@ -0,0 +1,20 @@
+default_realm = <%= realm %>
+
+[kdcdefaults]
+ v4_mode = nopreauth
+ kdc_ports = 0
+
+[realms]
+ <%= realm %> = {
+ kdc_ports = <%= kdc_port %>
+ admin_keytab = /etc/kadm5.keytab
+ database_name = <%= kdc_etc_path %>/principal
+ acl_file = <%= kdc_etc_path %>/kadm5.acl
+ key_stash_file = <%= kdc_etc_path %>/stash
+ max_life = 10h 0m 0s
+ max_renewable_life = 7d 0h 0m 0s
+ master_key_type = des3-hmac-sha1
+ supported_enctypes = arcfour-hmac:normal des3-hmac-sha1:normal des-cbc-crc:normal des:normal des:v4 des:norealm des:onlyrealm des:afs3
+# supported_enctypes = des3-hmac-sha1:normal arcfour-hmac:normal des-hmac-sha1:normal des-cbc-md5:normal des-cbc-crc:normal des-cbc-crc:v4 des-cbc-crc:afs3
+ default_principal_flags = +preauth
+ }
Added: incubator/bigtop/branches/hadoop-0.23/bigtop-deploy/puppet/modules/kerberos/templates/krb5.conf
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-deploy/puppet/modules/kerberos/templates/krb5.conf?rev=1196308&view=auto
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-deploy/puppet/modules/kerberos/templates/krb5.conf (added)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-deploy/puppet/modules/kerberos/templates/krb5.conf Tue Nov 1 21:53:04 2011
@@ -0,0 +1,26 @@
+[libdefaults]
+ default_realm = <%= realm %>
+ dns_lookup_realm = false
+ dns_lookup_kdc = false
+ ticket_lifetime = 24h
+ forwardable = true
+ udp_preference_limit = 1000000
+ default_tkt_enctypes = des-cbc-md5 des-cbc-crc des3-cbc-sha1
+ default_tgs_enctypes = des-cbc-md5 des-cbc-crc des3-cbc-sha1
+ permitted_enctypes = des-cbc-md5 des-cbc-crc des3-cbc-sha1
+
+[realms]
+ <%= realm %> = {
+ kdc = <%= kdc_server %>:<%= kdc_port %>
+ admin_server = <%= kdc_server %>:<%= admin_port %>
+ default_domain = <%= domain %>
+ }
+
+[domain_realm]
+ .<%= domain %> = <%= realm %>
+ <%= domain %> = <%= realm %>
+
+[logging]
+ kdc = FILE:/var/log/krb5kdc.log
+ admin_server = FILE:/var/log/kadmin.log
+ default = FILE:/var/log/krb5lib.log
Copied: incubator/bigtop/branches/hadoop-0.23/bigtop-deploy/puppet/modules/kerberos/tests/init.pp (from r1196246, incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/deb/whirr/control)
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-deploy/puppet/modules/kerberos/tests/init.pp?p2=incubator/bigtop/branches/hadoop-0.23/bigtop-deploy/puppet/modules/kerberos/tests/init.pp&p1=incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/deb/whirr/control&r1=1196246&r2=1196308&rev=1196308&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/deb/whirr/control (original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-deploy/puppet/modules/kerberos/tests/init.pp Tue Nov 1 21:53:04 2011
@@ -12,22 +12,20 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-Source: whirr
-Section: misc
-Priority: extra
-Maintainer: Tom White <to...@cloudera.com>
-Build-Depends: debhelper (>= 6), git-core
-Standards-Version: 3.8.0
-Homepage: http://incubator.apache.org/whirr
-Package: whirr
-Architecture: all
-Description: Scripts and libraries for running software services on cloud infrastructure
- Whirr provides
- .
- * A cloud-neutral way to run services. You don't have to worry about the
- idiosyncrasies of each provider.
- * A common service API. The details of provisioning are particular to the
- service.
- * Smart defaults for services. You can get a properly configured system
- running quickly, while still being able to override settings as needed.
+$kerberos_domain = "krb.test.com"
+$kerberos_realm = "KRB.TEST.COM"
+$kerberos_kdc_server = "localhost"
+$kerberos_kdc_port = 88
+# the following turns a node into a fully functional KDC
+include kerberos::kdc
+# the following opens up KDC principle datbase for remote
+# administration (it really should be optional, but it is
+# required for now in order to make kerberos::client::host_keytab
+# work)
+include kerberos::kdc::admin_server
+
+# the following turns a node into a Kerberos client hosts with..
+include kerberos::client
+# ...an optional host_keytab for as many services as you want:
+kerberos::client::host_keytab { ["host", "hdfs", "mapred"]: }
Added: incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/bigtop-utils/LICENSE
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/bigtop-utils/LICENSE?rev=1196308&view=auto
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/bigtop-utils/LICENSE (added)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/bigtop-utils/LICENSE Tue Nov 1 21:53:04 2011
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
Copied: incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/bigtop-utils/bigtop-detect-javahome (from r1196246, incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/deb/whirr/control)
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/bigtop-utils/bigtop-detect-javahome?p2=incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/bigtop-utils/bigtop-detect-javahome&p1=incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/deb/whirr/control&r1=1196246&r2=1196308&rev=1196308&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/deb/whirr/control (original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/bigtop-utils/bigtop-detect-javahome Tue Nov 1 21:53:04 2011
@@ -1,3 +1,4 @@
+#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
@@ -12,22 +13,32 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-Source: whirr
-Section: misc
-Priority: extra
-Maintainer: Tom White <to...@cloudera.com>
-Build-Depends: debhelper (>= 6), git-core
-Standards-Version: 3.8.0
-Homepage: http://incubator.apache.org/whirr
-Package: whirr
-Architecture: all
-Description: Scripts and libraries for running software services on cloud infrastructure
- Whirr provides
- .
- * A cloud-neutral way to run services. You don't have to worry about the
- idiosyncrasies of each provider.
- * A common service API. The details of provisioning are particular to the
- service.
- * Smart defaults for services. You can get a properly configured system
- running quickly, while still being able to override settings as needed.
+
+# Uncomment and set the following variable in order for the
+# guesswork step to be skipped
+#
+# JAVA_HOME=/usr/java/latest
+
+# attempt to find java
+if [ -z "$JAVA_HOME" ]; then
+ for candidate in \
+ /usr/lib/jvm/java-6-sun \
+ /usr/lib/jvm/java-1.6.0-sun-1.6.0.*/jre/ \
+ /usr/lib/jvm/java-1.6.0-sun-1.6.0.* \
+ /usr/lib/j2sdk1.6-sun \
+ /usr/java/jdk1.6* \
+ /usr/java/jre1.6* \
+ /Library/Java/Home \
+ /usr/java/default \
+ /usr/lib/jvm/default-java \
+ /usr/lib/jvm/java-openjdk \
+ /usr/lib/jvm/jre-openjdk \
+ /usr/lib/jvm/java-1.6.0-openjdk-1.6.* \
+ /usr/lib/jvm/jre-1.6.0-openjdk* ; do
+ if [ -e $candidate/bin/java ]; then
+ export JAVA_HOME=$candidate
+ break
+ fi
+ done
+fi
Modified: incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/flume/install_flume.sh
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/flume/install_flume.sh?rev=1196308&r1=1196307&r2=1196308&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/flume/install_flume.sh (original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/flume/install_flume.sh Tue Nov 1 21:53:04 2011
@@ -121,6 +121,13 @@ mkdir -p `dirname $wrapper`
cat > $wrapper <<EOF
#!/bin/sh
+# Autodetect JAVA_HOME if not defined
+if [ -e /usr/libexec/bigtop-detect-javahome ]; then
+ . /usr/libexec/bigtop-detect-javahome
+elif [ -e /usr/lib/bigtop-utils/bigtop-detect-javahome ]; then
+ . /usr/lib/bigtop-utils/bigtop-detect-javahome
+fi
+
exec /usr/lib/flume/bin/flume "\$@"
EOF
chmod 755 $wrapper
Modified: incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hbase/do-component-build
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hbase/do-component-build?rev=1196308&r1=1196307&r2=1196308&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hbase/do-component-build (original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hbase/do-component-build Tue Nov 1 21:53:04 2011
@@ -17,3 +17,4 @@
set -ex
mvn clean -DskipTests -Dhbase.version=${HBASE_VERSION} -Dhadoop.profile=23 install site assembly:assembly "$@"
+tar --strip-components=2 -xzf target/hbase*.tar.gz
Added: incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hbase/hbase.nofiles.conf
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hbase/hbase.nofiles.conf?rev=1196308&view=auto
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hbase/hbase.nofiles.conf (added)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hbase/hbase.nofiles.conf Tue Nov 1 21:53:04 2011
@@ -0,0 +1 @@
+hbase - nofile 32768
Modified: incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hbase/install_hbase.sh
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hbase/install_hbase.sh?rev=1196308&r1=1196307&r2=1196308&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hbase/install_hbase.sh (original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hbase/install_hbase.sh Tue Nov 1 21:53:04 2011
@@ -120,12 +120,22 @@ wrapper=$PREFIX/usr/bin/hbase
mkdir -p `dirname $wrapper`
cat > $wrapper <<EOF
#!/bin/sh
+
+. /etc/default/hadoop
+. /etc/default/hbase
+
+# Autodetect JAVA_HOME if not defined
+if [ -e /usr/libexec/bigtop-detect-javahome ]; then
+ . /usr/libexec/bigtop-detect-javahome
+elif [ -e /usr/lib/bigtop-utils/bigtop-detect-javahome ]; then
+ . /usr/lib/bigtop-utils/bigtop-detect-javahome
+fi
+
export ZOOKEEPER_CONF=\${ZOOKEEPER_CONF:-/etc/zookeeper}
export HADOOP_CONF=\${HADOOP_CONF:-/etc/hadoop/conf}
export ZOOKEEPER_HOME=\${ZOOKEEPER_HOME:-/usr/lib/zookeeper}
-export HADOOP_HOME=\${HADOOP_HOME:-/usr/lib/hadoop}
export HBASE_CLASSPATH=\$ZOOKEEPER_CONF:\$HADOOP_CONF:\$HADOOP_HOME/*:\$HADOOP_HOME/lib/*:\$ZOOKEEPER_HOME/*:\$ZOOKEEPER_HOME/lib/*:\$HBASE_CLASSPATH
-export HBASE_PID_DIR=/var/run/hbase
+
exec /usr/lib/hbase/bin/hbase "\$@"
EOF
chmod 755 $wrapper
Modified: incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hive/do-component-build
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hive/do-component-build?rev=1196308&r1=1196307&r2=1196308&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hive/do-component-build (original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hive/do-component-build Tue Nov 1 21:53:04 2011
@@ -16,4 +16,4 @@
set -ex
-ant -f src/build.xml package "$@"
+ant -f src/build.xml -Dhadoop.mirror=http://archive.cloudera.com/hive-deps package "$@"
Modified: incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hive/hadoop-hive.sh
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hive/hadoop-hive.sh?rev=1196308&r1=1196307&r2=1196308&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hive/hadoop-hive.sh (original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hive/hadoop-hive.sh Tue Nov 1 21:53:04 2011
@@ -30,6 +30,14 @@
# Short-Description: hive
### END INIT INFO
+
+# Autodetect JAVA_HOME if not defined
+if [ -e /usr/libexec/bigtop-detect-javahome ]; then
+ . /usr/libexec/bigtop-detect-javahome
+elif [ -e /usr/lib/bigtop-utils/bigtop-detect-javahome ]; then
+ . /usr/lib/bigtop-utils/bigtop-detect-javahome
+fi
+
# Modelled after $HADOOP_HOME/bin/hadoop-daemon.sh
PATH=/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin
Modified: incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hive/hadoop-hive.sh.suse
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hive/hadoop-hive.sh.suse?rev=1196308&r1=1196307&r2=1196308&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hive/hadoop-hive.sh.suse (original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hive/hadoop-hive.sh.suse Tue Nov 1 21:53:04 2011
@@ -32,7 +32,14 @@
# Short-Description: Hive @HIVE_DAEMON@
### END INIT INFO
-source /lib/lsb/init-functions
+. /lib/lsb/init-functions
+
+# Autodetect JAVA_HOME if not defined
+if [ -e /usr/libexec/bigtop-detect-javahome ]; then
+ . /usr/libexec/bigtop-detect-javahome
+elif [ -e /usr/lib/bigtop-utils/bigtop-detect-javahome ]; then
+ . /usr/lib/bigtop-utils/bigtop-detect-javahome
+fi
# Modelled after $HADOOP_HOME/bin/hadoop-daemon.sh
Modified: incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hive/install_hive.sh
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hive/install_hive.sh?rev=1196308&r1=1196307&r2=1196308&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hive/install_hive.sh (original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hive/install_hive.sh Tue Nov 1 21:53:04 2011
@@ -119,8 +119,15 @@ do
wrapper=$BIN_DIR/$file
cat >>$wrapper <<EOF
#!/bin/sh
+. /etc/default/hadoop
+
+# Autodetect JAVA_HOME if not defined
+if [ -e /usr/libexec/bigtop-detect-javahome ]; then
+ . /usr/libexec/bigtop-detect-javahome
+elif [ -e /usr/lib/bigtop-utils/bigtop-detect-javahome ]; then
+ . /usr/lib/bigtop-utils/bigtop-detect-javahome
+fi
-export HADOOP_HOME=\${HADOOP_HOME:-/usr/lib/hadoop}
export HIVE_HOME=$INSTALLED_LIB_DIR
exec $INSTALLED_LIB_DIR/bin/$file "\$@"
EOF
Modified: incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/mahout/install_mahout.sh
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/mahout/install_mahout.sh?rev=1196308&r1=1196307&r2=1196308&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/mahout/install_mahout.sh (original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/mahout/install_mahout.sh Tue Nov 1 21:53:04 2011
@@ -116,14 +116,24 @@ install -d -m 0755 $PREFIX/$CONF_DIR
cp -a ${BUILD_DIR}/conf/* $PREFIX/$CONF_DIR
ln -s /etc/mahout/conf $PREFIX/$LIB_DIR/conf
+# Copy in the example files
+cp -a ${BUILD_DIR}/examples/ $PREFIX/$DOC_DIR/
+
# Copy in the /usr/bin/mahout wrapper
install -d -m 0755 $PREFIX/$BIN_DIR
cat > $PREFIX/$BIN_DIR/mahout <<EOF
#!/bin/sh
+. /etc/default/hadoop
+
+# Autodetect JAVA_HOME if not defined
+if [ -e /usr/libexec/bigtop-detect-javahome ]; then
+ . /usr/libexec/bigtop-detect-javahome
+elif [ -e /usr/lib/bigtop-utils/bigtop-detect-javahome ]; then
+ . /usr/lib/bigtop-utils/bigtop-detect-javahome
+fi
+
export MAHOUT_HOME=\${MAHOUT_HOME:-$INSTALLED_LIB_DIR}
export MAHOUT_CONF_DIR=\${MAHOUT_CONF_DIR:-$CONF_DIR}
-export HADOOP_CONF_DIR=\${HADOOP_CONF_DIR:-/etc/hadoop/conf}
-export HADOOP_HOME=\${HADOOP_HOME:-/usr/lib/hadoop}
exec $INSTALLED_LIB_DIR/bin/mahout "\$@"
EOF
chmod 755 $PREFIX/$BIN_DIR/mahout
Modified: incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/oozie/create-package-layout
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/oozie/create-package-layout?rev=1196308&r1=1196307&r2=1196308&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/oozie/create-package-layout (original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/oozie/create-package-layout Tue Nov 1 21:53:04 2011
@@ -26,8 +26,8 @@ usage() {
echo "
usage: $0 <options>
Required not-so-options:
- --extra-dir=DIR path to cloudera distribution files
- --build-dir=DIR path to cloudera distribution files
+ --extra-dir=DIR path to Bigtop distribution files
+ --build-dir=DIR path to Bigtop distribution files
--server-dir=DIR path to server package root
--client-dir=DIR path to the client package root
--initd-dir=DIR path to the server init.d directory
@@ -156,24 +156,27 @@ failIfNotOK
cat > ${BIN_DIR}/oozie <<EOF
#!/bin/sh
#
-# Licensed to Cloudera, Inc. under one or more contributor license
-# agreements. See the NOTICE file distributed with this work for
-# additional information regarding copyright ownership. Cloudera,
-# Inc. licenses this file to you under the Apache License, Version
-# 2.0 (the "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-#
-# Copyright (c) 2010-2011 Cloudera, inc.
+# http://www.apache.org/licenses/LICENSE-2.0
#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Autodetect JAVA_HOME if not defined
+if [ -e /usr/libexec/bigtop-detect-javahome ]; then
+ . /usr/libexec/bigtop-detect-javahome
+elif [ -e /usr/lib/bigtop-utils/bigtop-detect-javahome ]; then
+ . /usr/lib/bigtop-utils/bigtop-detect-javahome
+fi
exec /usr/lib/oozie/bin/oozie "\$@"
EOF
@@ -206,6 +209,8 @@ if [ "${OOZIE_INITD}" != "" ]; then
failIfNotOK
cp -R ${EXTRADIR}/oozie.init ${OOZIE_INITD}/oozie
failIfNotOK
+ chmod 755 ${OOZIE_INITD}/oozie
+ failIfNotOK
fi
cp -R ${OOZIE_BUILD_DIR}/oozie-sharelib*.tar.gz ${OOZIE_SERVER_DIR}/oozie-sharelib.tar.gz
failIfNotOK
Modified: incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/oozie/do-component-build
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/oozie/do-component-build?rev=1196308&r1=1196307&r2=1196308&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/oozie/do-component-build (original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/oozie/do-component-build Tue Nov 1 21:53:04 2011
@@ -84,7 +84,7 @@ if [ "${DO_MAVEN_DEPLOY}" = "deploy" ];
fi
# Invoke Oozie build script with CDH options
-${WORKDIR}/bin/mkdistro.sh -DincludeHadoopJars -DskipTests -Dnot.cdh.release.build=false ${EXTRA_GOALS} "$@"
+${WORKDIR}/bin/mkdistro.sh -DskipTests -Dnot.cdh.release.build=false ${EXTRA_GOALS} "$@"
MKDISTRO_RESULT=$?
if [ "${MKDISTRO_RESULT}" != "0" ]; then
echo "ERROR: mkdistro.sh failed with error ${MKDISTRO_RESULT}"
Modified: incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/oozie/oozie.init
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/oozie/oozie.init?rev=1196308&r1=1196307&r2=1196308&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/oozie/oozie.init (original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/oozie/oozie.init Tue Nov 1 21:53:04 2011
@@ -31,6 +31,13 @@
# Short-Description: Oozie server daemon
### END INIT INFO
+# Autodetect JAVA_HOME if not defined
+if [ -e /usr/libexec/bigtop-detect-javahome ]; then
+ . /usr/libexec/bigtop-detect-javahome
+elif [ -e /usr/lib/bigtop-utils/bigtop-detect-javahome ]; then
+ . /usr/lib/bigtop-utils/bigtop-detect-javahome
+fi
+
is_oozie_alive() {
if [ ! -f "$OOZIE_PID" ]; then
#not running
@@ -54,7 +61,7 @@ start_oozie() {
stop_oozie() {
is_oozie_alive
if [ "${STATUS}" = "0" ]; then
- su --shell=/bin/sh -l oozie -c /usr/lib/oozie/bin/oozie-stop.sh
+ su --shell=/bin/sh -l oozie -c '/usr/lib/oozie/bin/oozied.sh stop -force'
if [ $? -eq 0 ]; then
is_oozie_alive
if [ "${STATUS}" = "1" ]; then
Modified: incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/oozie/patch
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/oozie/patch?rev=1196308&r1=1196307&r2=1196308&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/oozie/patch (original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/oozie/patch Tue Nov 1 21:53:04 2011
@@ -104,7 +104,11 @@ Index: oozie-2.3.2/pom.xml
- <version>0.20.104.2</version>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+<<<<<<< HEAD
++ <version>0.22.0-SNAPSHOT</version>
+=======
+ <version>0.23.0-SNAPSHOT</version>
+>>>>>>> Enabling Oozie
+ <exclusions>
+ <exclusion>
+ <groupId>org.apache.commons</groupId>
@@ -115,7 +119,11 @@ Index: oozie-2.3.2/pom.xml
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-hdfs</artifactId>
+<<<<<<< HEAD
++ <version>0.22.0-SNAPSHOT</version>
+=======
+ <version>0.23.0-SNAPSHOT</version>
+>>>>>>> Enabling Oozie
+ <exclusions>
+ <exclusion>
+ <groupId>org.apache.commons</groupId>
@@ -126,7 +134,11 @@ Index: oozie-2.3.2/pom.xml
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapred</artifactId>
+<<<<<<< HEAD
++ <version>0.22.0-SNAPSHOT</version>
+=======
+ <version>0.23.0-SNAPSHOT</version>
+>>>>>>> Enabling Oozie
<exclusions>
<exclusion>
<groupId>org.apache.commons</groupId>
@@ -143,7 +155,11 @@ Index: oozie-2.3.2/pom.xml
- <version>0.20.104.2</version>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common-test</artifactId>
+<<<<<<< HEAD
++ <version>0.22.0-SNAPSHOT</version>
+=======
+ <version>0.23.0-SNAPSHOT</version>
+>>>>>>> Enabling Oozie
<exclusions>
<exclusion>
<groupId>org.apache.commons</groupId>
@@ -157,7 +173,11 @@ Index: oozie-2.3.2/pom.xml
- <groupId>com.yahoo.hadoop</groupId>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-hdfs-test</artifactId>
+<<<<<<< HEAD
++ <version>0.22.0-SNAPSHOT</version>
+=======
+ <version>0.23.0-SNAPSHOT</version>
+>>>>>>> Enabling Oozie
+ <exclusions>
+ <exclusion>
+ <groupId>org.apache.commons</groupId>
@@ -181,7 +201,11 @@ Index: oozie-2.3.2/pom.xml
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapred-test</artifactId>
+<<<<<<< HEAD
++ <version>0.22.0-SNAPSHOT</version>
+=======
+ <version>0.23.0-SNAPSHOT</version>
+>>>>>>> Enabling Oozie
+ <exclusions>
+ <exclusion>
+ <groupId>org.apache.commons</groupId>
@@ -206,7 +230,11 @@ Index: oozie-2.3.2/pom.xml
+ <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-streaming</artifactId>
- <version>0.20.104.2</version>
+<<<<<<< HEAD
++ <version>0.22-SNAPSHOT</version>
+=======
+ <version>0.23-SNAPSHOT</version>
+>>>>>>> Enabling Oozie
<exclusions>
<exclusion>
<groupId>org.apache.commons</groupId>
Modified: incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/pig/do-component-build
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/pig/do-component-build?rev=1196308&r1=1196307&r2=1196308&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/pig/do-component-build (original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/pig/do-component-build Tue Nov 1 21:53:04 2011
@@ -16,6 +16,6 @@
set -ex
-ant -Djavac.version=1.6 -Djava5.home=${JAVA5_HOME} -Dforrest.home=${FORREST_HOME} -Ddist.dir=debian/tmp -Dversion=${PIG_BASE_VERSION} package "$@"
+ant -Djavac.version=1.6 -Djava5.home=${JAVA5_HOME} -Dforrest.home=${FORREST_HOME} -Dversion=${PIG_BASE_VERSION} package "$@"
Modified: incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/pig/install_pig.sh
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/pig/install_pig.sh?rev=1196308&r1=1196307&r2=1196308&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/pig/install_pig.sh (original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/pig/install_pig.sh Tue Nov 1 21:53:04 2011
@@ -120,6 +120,14 @@ ln -s /etc/pig/conf $LIB_DIR/conf
install -d -m 0755 $BIN_DIR
cat > $BIN_DIR/pig <<EOF
#!/bin/sh
+. /etc/default/hadoop
+
+# Autodetect JAVA_HOME if not defined
+if [ -e /usr/libexec/bigtop-detect-javahome ]; then
+ . /usr/libexec/bigtop-detect-javahome
+elif [ -e /usr/lib/bigtop-utils/bigtop-detect-javahome ]; then
+ . /usr/lib/bigtop-utils/bigtop-detect-javahome
+fi
exec $INSTALLED_LIB_DIR/bin/pig "\$@"
EOF
@@ -133,6 +141,9 @@ install -d -m 0755 $DOC_DIR
(cd $BUILD_DIR/docs && tar -cf - .)|(cd $DOC_DIR && tar -xf -)
install -d -m 0755 $EXAMPLES_DIR
+(cd $LIB_DIR ; mv pig*withouthadoop.jar `echo pig*withouthadoop.jar | sed -e 's#withouthadoop#core#'`)
+# FIXME: workaround for BIGTOP-161
+(cd $LIB_DIR ; ln -s pig-*-core.jar pig-withouthadoop.jar)
PIG_JAR=$(basename $(ls $LIB_DIR/pig*core.jar))
sed -i -e "s|../pig.jar|/usr/lib/pig/$PIG_JAR|" $BUILD_DIR/tutorial/build.xml
(cd $BUILD_DIR/tutorial && tar -cf - .)|(cd $EXAMPLES_DIR && tar -xf -)
Modified: incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/sqoop/install_sqoop.sh
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/sqoop/install_sqoop.sh?rev=1196308&r1=1196307&r2=1196308&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/sqoop/install_sqoop.sh (original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/sqoop/install_sqoop.sh Tue Nov 1 21:53:04 2011
@@ -135,6 +135,15 @@ for i in sqoop sqoop-codegen sqoop-expor
mkdir -p `dirname $wrapper`
cat > $wrapper <<EOF
#!/bin/sh
+. /etc/default/hadoop
+
+# Autodetect JAVA_HOME if not defined
+if [ -e /usr/libexec/bigtop-detect-javahome ]; then
+ . /usr/libexec/bigtop-detect-javahome
+elif [ -e /usr/lib/bigtop-utils/bigtop-detect-javahome ]; then
+ . /usr/lib/bigtop-utils/bigtop-detect-javahome
+fi
+
export SQOOP_HOME=$LIB_DIR
exec $BIN_DIR/$i "\$@"
EOF
@@ -143,7 +152,6 @@ done
install -d -m 0755 $PREFIX/$ETC_DIR/conf
(cd ${BUILD_DIR}/conf && tar cf - .) | (cd $PREFIX/$ETC_DIR/conf && tar xf -)
-rm $PREFIX/$ETC_DIR/conf/.gitignore
unlink $PREFIX/$LIB_DIR/conf || /bin/true
ln -s /etc/sqoop/conf $PREFIX/$LIB_DIR/conf
Modified: incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/sqoop/sqoop-metastore.sh
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/sqoop/sqoop-metastore.sh?rev=1196308&r1=1196307&r2=1196308&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/sqoop/sqoop-metastore.sh (original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/sqoop/sqoop-metastore.sh Tue Nov 1 21:53:04 2011
@@ -34,6 +34,13 @@
### END INIT INFO
set -e
+# Autodetect JAVA_HOME if not defined
+if [ -e /usr/libexec/bigtop-detect-javahome ]; then
+ . /usr/libexec/bigtop-detect-javahome
+elif [ -e /usr/lib/bigtop-utils/bigtop-detect-javahome ]; then
+ . /usr/lib/bigtop-utils/bigtop-detect-javahome
+fi
+
PATH=/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin
NAME=sqoop-metastore