You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by dd...@apache.org on 2011/12/17 02:29:26 UTC

svn commit: r1215378 - /incubator/ambari/trunk/controller/src/main/resources/org/apache/ambari/acd/hdfs-0.1.0.acd

Author: ddas
Date: Sat Dec 17 01:29:26 2011
New Revision: 1215378

URL: http://svn.apache.org/viewvc?rev=1215378&view=rev
Log:
AMBARI-165. ACD fix

Modified:
    incubator/ambari/trunk/controller/src/main/resources/org/apache/ambari/acd/hdfs-0.1.0.acd

Modified: incubator/ambari/trunk/controller/src/main/resources/org/apache/ambari/acd/hdfs-0.1.0.acd
URL: http://svn.apache.org/viewvc/incubator/ambari/trunk/controller/src/main/resources/org/apache/ambari/acd/hdfs-0.1.0.acd?rev=1215378&r1=1215377&r2=1215378&view=diff
==============================================================================
--- incubator/ambari/trunk/controller/src/main/resources/org/apache/ambari/acd/hdfs-0.1.0.acd (original)
+++ incubator/ambari/trunk/controller/src/main/resources/org/apache/ambari/acd/hdfs-0.1.0.acd Sat Dec 17 01:29:26 2011
@@ -4,44 +4,33 @@
   <roles name="namenode"/>
   <roles name="secondarynamenode"/>
   <roles name="datanode"/>
-  <configure>
-<![CDATA[
-import ambari_component
-import sys
-
-[pgm, cluster, role] = sys.argv
-
-opts = {}
-if role == "datanode":
-  opts['dfs.data.dir'] = ambari_component.getDataDirs()
-
-ambari_component.copyXml('hadoop/hdfs-site', opts)
-]]>
-  </configure>
-  <install>
-<![CDATA[
-print "TO BE DONE"
-]]>
-  </install>
   <start>
 <![CDATA[
 import os
 import sys
 
 [pgm, cluster, role] = sys.argv
-
-os.execl("stack/bin/hadoop", "stack/bin/hadoop", role)
+os.environ['HADOOP_CONF_DIR']=os.getcwd() + "/etc/hadoop"
+os.execlp("hadoop", "hadoop", role)
 ]]>
   </start>
-  <check runOn="namenode" user="${hdfs.user}">
+  <check runOn="namenode">
 <![CDATA[
 import os
+import sys
+
+os.environ['HADOOP_CONF_DIR']=os.getcwd() + "/etc/hadoop"
+sys.exit(os.system('hadoop dfsadmin -safemode get | grep "Safe mode is OFF"'))
 
-os.exec('stack/bin/hadoop', 'stack/bin/hadoop', 'dfsadmin', '-get')
 ]]>
   </check>
 
-  <prestart runOn="namenode" user="{hdfs.user}">
+  <prestart runOn="namenode">
 <![CDATA[]]>
+import subprocess
+
+proc = subprocess.Popen(['hadoop', 'namenode', '-format'], stdin=subprocess.PIPE)
+proc.communicate('N\n')
+proc.wait()
   </prestart>
 </component>