You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by sm...@apache.org on 2013/04/18 19:21:25 UTC

svn commit: r1469467 - in /incubator/ambari/trunk: ./ ambari-agent/src/main/puppet/modules/hdp-hadoop/templates/ ambari-server/ ambari-server/src/main/java/org/apache/ambari/eventdb/db/ ambari-server/src/main/java/org/apache/ambari/eventdb/webservice/ ...

Author: smohanty
Date: Thu Apr 18 17:21:25 2013
New Revision: 1469467

URL: http://svn.apache.org/r1469467
Log:
AMBARI-1973. log4j Appender for RCA should be able to write the same database being used for Ambari Server (oracle/MySql). (smohanty)

Added:
    incubator/ambari/trunk/ambari-server/src/main/java/org/apache/ambari/eventdb/db/MySQLConnector.java
Modified:
    incubator/ambari/trunk/CHANGES.txt
    incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-hadoop/templates/hadoop-env.sh.erb
    incubator/ambari/trunk/ambari-server/pom.xml
    incubator/ambari/trunk/ambari-server/src/main/java/org/apache/ambari/eventdb/db/PostgresConnector.java
    incubator/ambari/trunk/ambari-server/src/main/java/org/apache/ambari/eventdb/webservice/WorkflowJsonService.java
    incubator/ambari/trunk/ambari-server/src/main/python/ambari-server.py
    incubator/ambari/trunk/ambari-server/src/main/resources/mysql-ddl.sql
    incubator/ambari/trunk/ambari-server/src/test/python/TestAmbaryServer.py

Modified: incubator/ambari/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/incubator/ambari/trunk/CHANGES.txt?rev=1469467&r1=1469466&r2=1469467&view=diff
==============================================================================
--- incubator/ambari/trunk/CHANGES.txt (original)
+++ incubator/ambari/trunk/CHANGES.txt Thu Apr 18 17:21:25 2013
@@ -760,6 +760,9 @@ Trunk (unreleased changes):
 
  BUG FIXES
 
+ AMBARI-1973. log4j Appender for RCA should be able to write the same database
+ being used for Ambari Server (oracle/MySql). (smohanty)
+
  AMBARI-1972. Stacks2 api implemenation using the standard framework is not
  complete - does not show configuration tags. (smohanty)
 

Modified: incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-hadoop/templates/hadoop-env.sh.erb
URL: http://svn.apache.org/viewvc/incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-hadoop/templates/hadoop-env.sh.erb?rev=1469467&r1=1469466&r2=1469467&view=diff
==============================================================================
--- incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-hadoop/templates/hadoop-env.sh.erb (original)
+++ incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-hadoop/templates/hadoop-env.sh.erb Thu Apr 18 17:21:25 2013
@@ -87,3 +87,6 @@ export HADOOP_IDENT_STRING=$USER
 # The scheduling priority for daemon processes.  See 'man nice'.
 
 # export HADOOP_NICENESS=10
+
+# Use libraries from standard classpath
+export HADOOP_CLASSPATH=/usr/share/java/*

Modified: incubator/ambari/trunk/ambari-server/pom.xml
URL: http://svn.apache.org/viewvc/incubator/ambari/trunk/ambari-server/pom.xml?rev=1469467&r1=1469466&r2=1469467&view=diff
==============================================================================
--- incubator/ambari/trunk/ambari-server/pom.xml (original)
+++ incubator/ambari/trunk/ambari-server/pom.xml Thu Apr 18 17:21:25 2013
@@ -400,7 +400,7 @@
               </environmentVariables>
               <skip>${skipTests}</skip>
             </configuration>
-            <id>python-test</id>
+            <id>default-cli</id>
             <phase>test</phase>
             <goals>
               <goal>exec</goal>

Added: incubator/ambari/trunk/ambari-server/src/main/java/org/apache/ambari/eventdb/db/MySQLConnector.java
URL: http://svn.apache.org/viewvc/incubator/ambari/trunk/ambari-server/src/main/java/org/apache/ambari/eventdb/db/MySQLConnector.java?rev=1469467&view=auto
==============================================================================
--- incubator/ambari/trunk/ambari-server/src/main/java/org/apache/ambari/eventdb/db/MySQLConnector.java (added)
+++ incubator/ambari/trunk/ambari-server/src/main/java/org/apache/ambari/eventdb/db/MySQLConnector.java Thu Apr 18 17:21:25 2013
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.eventdb.db;
+
+import org.apache.ambari.eventdb.model.Workflows;
+
+import java.io.IOException;
+import java.sql.PreparedStatement;
+
+public class MySQLConnector extends PostgresConnector {
+  public MySQLConnector(String connectionURL, String driverName, String username, String password) throws IOException {
+    super(connectionURL, driverName, username, password);
+  }
+
+  @Override
+  protected PreparedStatement getQualifiedPS(Statements statement, String searchClause, Workflows.WorkflowDBEntry.WorkflowFields field, boolean sortAscending, int offset, int limit) throws IOException {
+    if (db == null)
+      throw new IOException("postgres db not initialized");
+    String limitClause = " ORDER BY " + field.toString() + " " + (sortAscending ? SORT_ASC : SORT_DESC) + " LIMIT " + (limit >= 0 ? limit : DEFAULT_LIMIT) + " OFFSET " + offset;
+    return getQualifiedPS(statement, searchClause + limitClause);
+  }
+}

Modified: incubator/ambari/trunk/ambari-server/src/main/java/org/apache/ambari/eventdb/db/PostgresConnector.java
URL: http://svn.apache.org/viewvc/incubator/ambari/trunk/ambari-server/src/main/java/org/apache/ambari/eventdb/db/PostgresConnector.java?rev=1469467&r1=1469466&r2=1469467&view=diff
==============================================================================
--- incubator/ambari/trunk/ambari-server/src/main/java/org/apache/ambari/eventdb/db/PostgresConnector.java (original)
+++ incubator/ambari/trunk/ambari-server/src/main/java/org/apache/ambari/eventdb/db/PostgresConnector.java Thu Apr 18 17:21:25 2013
@@ -54,6 +54,7 @@ public class PostgresConnector implement
   private static final String TASK_ATTEMPT_TABLE_NAME = "taskattempt";
   public static final String SORT_ASC = "ASC";
   public static final String SORT_DESC = "DESC";
+  protected static final int DEFAULT_LIMIT = 10;
   
   private static final ObjectMapper jsonMapper = new ObjectMapper();
   
@@ -482,7 +483,7 @@ public class PostgresConnector implement
     return preparedStatements.get(statement);
   }
   
-  private PreparedStatement getQualifiedPS(Statements statement, String searchClause) throws IOException {
+  protected PreparedStatement getQualifiedPS(Statements statement, String searchClause) throws IOException {
     if (db == null)
       throw new IOException("postgres db not initialized");
     try {

Modified: incubator/ambari/trunk/ambari-server/src/main/java/org/apache/ambari/eventdb/webservice/WorkflowJsonService.java
URL: http://svn.apache.org/viewvc/incubator/ambari/trunk/ambari-server/src/main/java/org/apache/ambari/eventdb/webservice/WorkflowJsonService.java?rev=1469467&r1=1469466&r2=1469467&view=diff
==============================================================================
--- incubator/ambari/trunk/ambari-server/src/main/java/org/apache/ambari/eventdb/webservice/WorkflowJsonService.java (original)
+++ incubator/ambari/trunk/ambari-server/src/main/java/org/apache/ambari/eventdb/webservice/WorkflowJsonService.java Thu Apr 18 17:21:25 2013
@@ -33,6 +33,7 @@ import javax.ws.rs.QueryParam;
 import javax.ws.rs.core.Context;
 import javax.ws.rs.core.MediaType;
 
+import org.apache.ambari.eventdb.db.MySQLConnector;
 import org.apache.ambari.eventdb.db.OracleConnector;
 import org.apache.ambari.eventdb.db.PostgresConnector;
 import org.apache.ambari.eventdb.model.DataTable;
@@ -48,6 +49,8 @@ import org.apache.ambari.eventdb.model.W
 import org.apache.ambari.eventdb.model.Workflows.WorkflowDBEntry.WorkflowFields;
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.commons.lang.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 @Path("/jobhistory")
 public class WorkflowJsonService {
@@ -57,8 +60,8 @@ public class WorkflowJsonService {
   private static final String USERNAME = PREFIX + "db.user";
   private static final String PASSWORD = PREFIX + "db.password";
   
-  private static String DEFAULT_DRIVER = "localhost";
-  private static String DEFAULT_URL = "ambarirca";
+  private static String DEFAULT_DRIVER;
+  private static String DEFAULT_URL;
   private static String DEFAULT_USERNAME = "mapred";
   private static String DEFAULT_PASSWORD = "mapred";
   
@@ -68,13 +71,17 @@ public class WorkflowJsonService {
     List<WorkflowDBEntry> emptyWorkflows = Collections.emptyList();
     EMPTY_WORKFLOWS.setWorkflows(emptyWorkflows);
   }
+
+  private static final Logger LOG = LoggerFactory.getLogger(WorkflowJsonService.class);
   
   PostgresConnector getConnector() throws IOException {
     //TODO fix temp hack
     if (StringUtils.contains(DEFAULT_DRIVER, "oracle")) {
-      return new OracleConnector(DEFAULT_DRIVER, DEFAULT_URL, DEFAULT_USERNAME, DEFAULT_PASSWORD);
+      return new OracleConnector(DEFAULT_URL, DEFAULT_DRIVER, DEFAULT_USERNAME, DEFAULT_PASSWORD);
+    }else if (StringUtils.contains(DEFAULT_DRIVER, "mysql")) {
+      return new MySQLConnector(DEFAULT_URL, DEFAULT_DRIVER, DEFAULT_USERNAME, DEFAULT_PASSWORD);
     } else {
-      return new PostgresConnector(DEFAULT_DRIVER, DEFAULT_URL, DEFAULT_USERNAME, DEFAULT_PASSWORD);
+      return new PostgresConnector(DEFAULT_URL, DEFAULT_DRIVER, DEFAULT_USERNAME, DEFAULT_PASSWORD);
     }
   }
 
@@ -109,7 +116,7 @@ public class WorkflowJsonService {
         workflows = conn.fetchWorkflows(WorkflowFields.valueOf(field), sortDir.toUpperCase().equals(PostgresConnector.SORT_ASC), offset, limit);
       }
     } catch (IOException e) {
-      e.printStackTrace();
+      LOG.error("Error interacting with RCA database ", e);
       workflows = EMPTY_WORKFLOWS;
     } finally {
       if (conn != null) {
@@ -185,7 +192,7 @@ public class WorkflowJsonService {
       table = conn.fetchWorkflows(start, amount, searchTerm, echo, field, sortAscending, workflowId, workflowName, workflowType, userName, minJobs, maxJobs,
           minInputBytes, maxInputBytes, minOutputBytes, maxOutputBytes, minDuration, maxDuration, minStartTime, maxStartTime, minFinishTime, maxFinishTime);
     } catch (IOException e) {
-      e.printStackTrace();
+      LOG.error("Error interacting with RCA database ", e);
     } finally {
       if (conn != null) {
         conn.close();
@@ -208,7 +215,7 @@ public class WorkflowJsonService {
       else if (maxStartTime >= minFinishTime)
         jobs.setJobs(conn.fetchJobDetails(minFinishTime, maxStartTime));
     } catch (IOException e) {
-      e.printStackTrace();
+      LOG.error("Error interacting with RCA database ", e);
       jobs.setJobs(EMPTY_JOBS);
     } finally {
       if (conn != null) {
@@ -255,7 +262,7 @@ public class WorkflowJsonService {
           getTaskDetails(taskAttempts, points, submitTimeSecs, finishTimeSecs, step);
       }
     } catch (IOException e) {
-      e.printStackTrace();
+      LOG.error("Error interacting with RCA database ", e);
     } finally {
       if (conn != null) {
         conn.close();
@@ -278,7 +285,7 @@ public class WorkflowJsonService {
         taskAttempts = conn.fetchWorkflowTaskAttempts(workflowId);
       }
     } catch (IOException e) {
-      e.printStackTrace();
+      LOG.error("Error interacting with RCA database ", e);
     } finally {
       if (conn != null) {
         conn.close();
@@ -307,7 +314,7 @@ public class WorkflowJsonService {
         getExactTaskAttemptsByLocality(conn.fetchWorkflowTaskAttempts(workflowId), data, minr, maxr);
       }
     } catch (IOException e) {
-      e.printStackTrace();
+      LOG.error("Error interacting with RCA database ", e);
     } finally {
       if (conn != null) {
         conn.close();

Modified: incubator/ambari/trunk/ambari-server/src/main/python/ambari-server.py
URL: http://svn.apache.org/viewvc/incubator/ambari/trunk/ambari-server/src/main/python/ambari-server.py?rev=1469467&r1=1469466&r2=1469467&view=diff
==============================================================================
--- incubator/ambari/trunk/ambari-server/src/main/python/ambari-server.py (original)
+++ incubator/ambari/trunk/ambari-server/src/main/python/ambari-server.py Thu Apr 18 17:21:25 2013
@@ -70,6 +70,7 @@ ambari_provider_module = os.environ.get(
 
 # constants
 STACK_NAME_VER_SEP = "-"
+JAVA_SHARE_PATH="/usr/share/java"
 
 if ambari_provider_module is not None:
   ambari_provider_module_option = "-Dprovider.module.class=" +\
@@ -82,17 +83,15 @@ SERVER_START_CMD="{0}" + os.sep + "bin" 
                  ambari_provider_module_option +\
                  os.getenv('AMBARI_JVM_ARGS','-Xms512m -Xmx2048m') +\
                  " -cp {1}"+ os.pathsep + "{2}" +\
-                 "/* org.apache.ambari.server.controller.AmbariServer "\
+                 " org.apache.ambari.server.controller.AmbariServer "\
                  ">/var/log/ambari-server/ambari-server.out 2>&1"
 SERVER_START_CMD_DEBUG="{0}" + os.sep + "bin" + os.sep +\
                        "java -server -XX:NewRatio=2 -XX:+UseConcMarkSweepGC " +\
                        ambari_provider_module_option +\
                        os.getenv('AMBARI_JVM_ARGS','-Xms512m -Xmx2048m') +\
                        " -Xdebug -Xrunjdwp:transport=dt_socket,address=5005,"\
-                       "server=y,suspend=n -cp {1}"+ os.pathsep + ".." +\
-                       os.sep + "lib" + os.sep + "ambari-server" +\
-                       os.sep +\
-                       "* org.apache.ambari.server.controller.AmbariServer"
+                       "server=y,suspend=n -cp {1}"+ os.pathsep + "{2}" +\
+                       " org.apache.ambari.server.controller.AmbariServer"
 
 AMBARI_CONF_VAR="AMBARI_CONF_DIR"
 AMBARI_SERVER_LIB="AMBARI_SERVER_LIB"
@@ -453,6 +452,14 @@ def get_ambari_jars():
     return default_jar_location
 
 
+def get_share_jars():
+  return JAVA_SHARE_PATH
+
+def get_ambari_classpath():
+  ambari_cp = get_ambari_jars()+os.sep+"*"
+  share_cp = get_share_jars()+os.sep+"*"
+  return ambari_cp+os.pathsep+share_cp
+
 
 def get_conf_dir():
   try:
@@ -975,7 +982,7 @@ def start(args):
     print_error_msg ("Failed to stop iptables. Exiting")
     sys.exit(retcode)
 
-  command = SERVER_START_CMD.format(jdk_path, conf_dir, get_ambari_jars())
+  command = SERVER_START_CMD.format(jdk_path, conf_dir, get_ambari_classpath())
   print "Running server: " + command
   server_process = subprocess.Popen(["/bin/sh", "-c", command])
   f = open(PID_DIR + os.sep + PID_NAME, "w")

Modified: incubator/ambari/trunk/ambari-server/src/main/resources/mysql-ddl.sql
URL: http://svn.apache.org/viewvc/incubator/ambari/trunk/ambari-server/src/main/resources/mysql-ddl.sql?rev=1469467&r1=1469466&r2=1469467&view=diff
==============================================================================
--- incubator/ambari/trunk/ambari-server/src/main/resources/mysql-ddl.sql (original)
+++ incubator/ambari/trunk/ambari-server/src/main/resources/mysql-ddl.sql Thu Apr 18 17:21:25 2013
@@ -1,13 +1,11 @@
 DROP DATABASE IF EXISTS `ambari`;
-DROP DATABASE IF EXISTS `ambarirca`;
-DROP USER `ambari-server`;
--- DROP USER `mapred`;
+DROP USER `ambari`;
 
 delimiter ;
 
 CREATE DATABASE `ambari` /*!40100 DEFAULT CHARACTER SET utf8 */;
 
-CREATE USER 'ambari-server' IDENTIFIED BY 'bigdata';
+CREATE USER 'ambari' IDENTIFIED BY 'bigdata';
 
 USE ambari;
 
@@ -26,7 +24,7 @@ CREATE TABLE servicecomponentdesiredstat
 CREATE TABLE serviceconfigmapping (config_type VARCHAR(255) NOT NULL, cluster_id BIGINT NOT NULL, config_tag VARCHAR(255) NOT NULL, service_name VARCHAR(255) NOT NULL, timestamp BIGINT NOT NULL, PRIMARY KEY (config_type, cluster_id, service_name));
 CREATE TABLE servicedesiredstate (cluster_id BIGINT NOT NULL, desired_host_role_mapping INTEGER NOT NULL, desired_stack_version VARCHAR(255) NOT NULL, desired_state VARCHAR(255) NOT NULL, service_name VARCHAR(255) NOT NULL, PRIMARY KEY (cluster_id, service_name));
 CREATE TABLE roles (role_name VARCHAR(255) NOT NULL, PRIMARY KEY (role_name));
-CREATE TABLE users (user_id INTEGER NOT NULL, create_time DATETIME, ldap_user INTEGER, user_name VARCHAR(255), user_password VARCHAR(255), PRIMARY KEY (user_id));
+CREATE TABLE users (user_id INTEGER NOT NULL, create_time DATETIME DEFAULT NOW(), ldap_user INTEGER NOT NULL DEFAULT 0, user_name VARCHAR(255), user_password VARCHAR(255), PRIMARY KEY (user_id));
 CREATE TABLE execution_command (task_id BIGINT NOT NULL, command LONGBLOB, PRIMARY KEY (task_id));
 CREATE TABLE host_role_command (task_id BIGINT NOT NULL, attempt_count SMALLINT NOT NULL, event LONGTEXT NOT NULL, exitcode INTEGER NOT NULL, host_name VARCHAR(255) NOT NULL, last_attempt_time BIGINT NOT NULL, request_id BIGINT NOT NULL, role VARCHAR(255), role_command VARCHAR(255), stage_id BIGINT NOT NULL, start_time BIGINT NOT NULL, status VARCHAR(255), std_error LONGBLOB, std_out LONGBLOB, PRIMARY KEY (task_id));
 CREATE TABLE role_success_criteria (role VARCHAR(255) NOT NULL, request_id BIGINT NOT NULL, stage_id BIGINT NOT NULL, success_factor DOUBLE NOT NULL, PRIMARY KEY (role, request_id, stage_id));
@@ -76,24 +74,24 @@ INSERT INTO ambari_sequences(sequence_na
 INSERT INTO ambari_sequences(sequence_name, value) values ('user_id_seq', 1);
 
 insert into ambari.Roles(role_name)
-select 'admin'
-union all
-select 'user';
+  select 'admin'
+  union all
+  select 'user';
 
 insert into ambari.Users(user_id, user_name, user_password)
-select 1,'admin','538916f8943ec225d97a9a86a2c6ec0818c1cd400e09e03b660fdaaec4af29ddbb6f2b1033b81b00';
+  select 1,'admin','538916f8943ec225d97a9a86a2c6ec0818c1cd400e09e03b660fdaaec4af29ddbb6f2b1033b81b00';
 
 insert into ambari.user_roles(role_name, user_id)
-select 'admin',1;
+  select 'admin',1;
 
 insert into ambari.metainfo(`metainfo_key`, `metainfo_value`)
-select 'version','1.3.0';
+  select 'version','1.3.0';
 
 
 
 CREATE TABLE workflow (
   workflowId VARCHAR(255), workflowName TEXT,
-  parentWorkflowId VARCHAR(255),  
+  parentWorkflowId VARCHAR(255),
   workflowContext TEXT, userName TEXT,
   startTime BIGINT, lastUpdateTime BIGINT,
   numJobsTotal INTEGER, numJobsCompleted INTEGER,
@@ -105,37 +103,37 @@ CREATE TABLE workflow (
 
 CREATE TABLE job (
   jobId VARCHAR(255), workflowId VARCHAR(255), jobName TEXT, workflowEntityName TEXT,
-  userName TEXT, queue TEXT, acls TEXT, confPath TEXT, 
-  submitTime BIGINT, launchTime BIGINT, finishTime BIGINT, 
-  maps INTEGER, reduces INTEGER, status TEXT, priority TEXT, 
-  finishedMaps INTEGER, finishedReduces INTEGER, 
-  failedMaps INTEGER, failedReduces INTEGER, 
+  userName TEXT, queue TEXT, acls TEXT, confPath TEXT,
+  submitTime BIGINT, launchTime BIGINT, finishTime BIGINT,
+  maps INTEGER, reduces INTEGER, status TEXT, priority TEXT,
+  finishedMaps INTEGER, finishedReduces INTEGER,
+  failedMaps INTEGER, failedReduces INTEGER,
   mapsRuntime BIGINT, reducesRuntime BIGINT,
-  mapCounters TEXT, reduceCounters TEXT, jobCounters TEXT, 
+  mapCounters TEXT, reduceCounters TEXT, jobCounters TEXT,
   inputBytes BIGINT, outputBytes BIGINT,
   PRIMARY KEY(jobId),
   FOREIGN KEY(workflowId) REFERENCES workflow(workflowId)
 );
 
 CREATE TABLE task (
-  taskId VARCHAR(255), jobId VARCHAR(255), taskType TEXT, splits TEXT, 
-  startTime BIGINT, finishTime BIGINT, status TEXT, error TEXT, counters TEXT, 
-  failedAttempt TEXT, 
-  PRIMARY KEY(taskId), 
+  taskId VARCHAR(255), jobId VARCHAR(255), taskType TEXT, splits TEXT,
+  startTime BIGINT, finishTime BIGINT, status TEXT, error TEXT, counters TEXT,
+  failedAttempt TEXT,
+  PRIMARY KEY(taskId),
   FOREIGN KEY(jobId) REFERENCES job(jobId)
 );
 
 CREATE TABLE taskAttempt (
-  taskAttemptId VARCHAR(255), taskId VARCHAR(255), jobId VARCHAR(255), taskType TEXT, taskTracker TEXT, 
-  startTime BIGINT, finishTime BIGINT, 
-  mapFinishTime BIGINT, shuffleFinishTime BIGINT, sortFinishTime BIGINT, 
-  locality TEXT, avataar TEXT, 
-  status TEXT, error TEXT, counters TEXT, 
+  taskAttemptId VARCHAR(255), taskId VARCHAR(255), jobId VARCHAR(255), taskType TEXT, taskTracker TEXT,
+  startTime BIGINT, finishTime BIGINT,
+  mapFinishTime BIGINT, shuffleFinishTime BIGINT, sortFinishTime BIGINT,
+  locality TEXT, avataar TEXT,
+  status TEXT, error TEXT, counters TEXT,
   inputBytes BIGINT, outputBytes BIGINT,
-  PRIMARY KEY(taskAttemptId), 
-  FOREIGN KEY(jobId) REFERENCES job(jobId), 
+  PRIMARY KEY(taskAttemptId),
+  FOREIGN KEY(jobId) REFERENCES job(jobId),
   FOREIGN KEY(taskId) REFERENCES task(taskId)
-); 
+);
 
 CREATE TABLE hdfsEvent (
   timestamp BIGINT,
@@ -159,9 +157,9 @@ CREATE TABLE mapreduceEvent (
 );
 
 CREATE TABLE clusterEvent (
-  timestamp BIGINT, 
-  service TEXT, status TEXT, 
-  error TEXT, data TEXT , 
+  timestamp BIGINT,
+  service TEXT, status TEXT,
+  error TEXT, data TEXT ,
   host TEXT, rack TEXT
 );
 

Modified: incubator/ambari/trunk/ambari-server/src/test/python/TestAmbaryServer.py
URL: http://svn.apache.org/viewvc/incubator/ambari/trunk/ambari-server/src/test/python/TestAmbaryServer.py?rev=1469467&r1=1469466&r2=1469467&view=diff
==============================================================================
--- incubator/ambari/trunk/ambari-server/src/test/python/TestAmbaryServer.py (original)
+++ incubator/ambari/trunk/ambari-server/src/test/python/TestAmbaryServer.py Thu Apr 18 17:21:25 2013
@@ -619,6 +619,20 @@ class TestAmbariServer(TestCase):
 
 
   @patch.object(ambari_server, "print_info_msg")
+  def test_get_share_jars(self, printInfoMsg_mock):
+    expected = "/usr/share/java"
+    result = ambari_server.get_share_jars()
+    self.assertEqual(expected, result)
+
+  @patch.object(ambari_server, "print_info_msg")
+  def test_get_ambari_classpath(self, printInfoMsg_mock):
+    result = ambari_server.get_ambari_classpath()
+    print result
+    self.assertTrue(ambari_server.get_ambari_jars() in result)
+    self.assertTrue(ambari_server.get_share_jars() in result)
+
+
+  @patch.object(ambari_server, "print_info_msg")
   def test_get_conf_dir(self, printInfoMsg_mock):
 
     env = "/ambari/conf"