You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hcatalog-commits@incubator.apache.org by tr...@apache.org on 2012/09/10 23:29:03 UTC

svn commit: r1383152 [25/27] - in /incubator/hcatalog/trunk: ./ hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/ hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/drivers/ hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/ ...

Modified: incubator/hcatalog/trunk/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatClientHMSImpl.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatClientHMSImpl.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatClientHMSImpl.java (original)
+++ incubator/hcatalog/trunk/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatClientHMSImpl.java Mon Sep 10 23:28:55 2012
@@ -53,12 +53,12 @@ import org.apache.thrift.TException;
 public class HCatClientHMSImpl extends HCatClient {
 
     private HiveMetaStoreClient hmsClient;
-    private Configuration  config;
+    private Configuration config;
     private HiveConf hiveConfig;
 
     @Override
     public List<String> listDatabaseNamesByPattern(String pattern)
-            throws HCatException, ConnectionFailureException {
+        throws HCatException, ConnectionFailureException {
         List<String> dbNames = null;
         try {
             dbNames = hmsClient.getDatabases(pattern);
@@ -70,7 +70,7 @@ public class HCatClientHMSImpl extends H
 
     @Override
     public HCatDatabase getDatabase(String dbName) throws HCatException,
-            ConnectionFailureException {
+        ConnectionFailureException {
         HCatDatabase db = null;
         try {
             Database hiveDB = hmsClient.getDatabase(checkDB(dbName));
@@ -79,42 +79,42 @@ public class HCatClientHMSImpl extends H
             }
         } catch (NoSuchObjectException exp) {
             throw new HCatException(
-                    "NoSuchObjectException while fetching database", exp);
+                "NoSuchObjectException while fetching database", exp);
         } catch (MetaException exp) {
             throw new HCatException("MetaException while fetching database",
-                    exp);
+                exp);
         } catch (TException exp) {
             throw new ConnectionFailureException(
-                    "TException while fetching database", exp);
+                "TException while fetching database", exp);
         }
         return db;
     }
 
     @Override
     public void createDatabase(HCatCreateDBDesc dbInfo) throws HCatException,
-            ConnectionFailureException {
+        ConnectionFailureException {
         try {
             hmsClient.createDatabase(dbInfo.toHiveDb());
         } catch (AlreadyExistsException exp) {
             if (!dbInfo.getIfNotExists()) {
                 throw new HCatException(
-                        "AlreadyExistsException while creating database", exp);
+                    "AlreadyExistsException while creating database", exp);
             }
         } catch (InvalidObjectException exp) {
             throw new HCatException(
-                    "InvalidObjectException while creating database", exp);
+                "InvalidObjectException while creating database", exp);
         } catch (MetaException exp) {
             throw new HCatException("MetaException while creating database",
-                    exp);
+                exp);
         } catch (TException exp) {
             throw new ConnectionFailureException(
-                    "TException while creating database", exp);
+                "TException while creating database", exp);
         }
     }
 
     @Override
     public void dropDatabase(String dbName, boolean ifExists, DROP_DB_MODE mode)
-            throws HCatException, ConnectionFailureException {
+        throws HCatException, ConnectionFailureException {
         boolean isCascade;
         if (mode.toString().equalsIgnoreCase("cascade")) {
             isCascade = true;
@@ -126,35 +126,35 @@ public class HCatClientHMSImpl extends H
         } catch (NoSuchObjectException e) {
             if (!ifExists) {
                 throw new HCatException(
-                        "NoSuchObjectException while dropping db.", e);
+                    "NoSuchObjectException while dropping db.", e);
             }
         } catch (InvalidOperationException e) {
             throw new HCatException(
-                    "InvalidOperationException while dropping db.", e);
+                "InvalidOperationException while dropping db.", e);
         } catch (MetaException e) {
             throw new HCatException("MetaException while dropping db.", e);
         } catch (TException e) {
             throw new ConnectionFailureException("TException while dropping db.",
-                    e);
+                e);
         }
     }
 
     @Override
     public List<String> listTableNamesByPattern(String dbName,
-            String tablePattern) throws HCatException, ConnectionFailureException {
+                                                String tablePattern) throws HCatException, ConnectionFailureException {
         List<String> tableNames = null;
         try {
             tableNames = hmsClient.getTables(checkDB(dbName), tablePattern);
         } catch (MetaException e) {
             throw new HCatException(
-                    "MetaException while fetching table names.", e);
+                "MetaException while fetching table names.", e);
         }
         return tableNames;
     }
 
     @Override
     public HCatTable getTable(String dbName, String tableName)
-            throws HCatException, ConnectionFailureException {
+        throws HCatException, ConnectionFailureException {
         HCatTable table = null;
         try {
             Table hiveTable = hmsClient.getTable(checkDB(dbName), tableName);
@@ -165,35 +165,35 @@ public class HCatClientHMSImpl extends H
             throw new HCatException("MetaException while fetching table.", e);
         } catch (TException e) {
             throw new ConnectionFailureException(
-                    "TException while fetching table.", e);
+                "TException while fetching table.", e);
         } catch (NoSuchObjectException e) {
             throw new HCatException(
-                    "NoSuchObjectException while fetching table.", e);
+                "NoSuchObjectException while fetching table.", e);
         }
         return table;
     }
 
     @Override
     public void createTable(HCatCreateTableDesc createTableDesc)
-            throws HCatException, ConnectionFailureException {
+        throws HCatException, ConnectionFailureException {
         try {
             hmsClient.createTable(createTableDesc.toHiveTable(hiveConfig));
         } catch (AlreadyExistsException e) {
             if (createTableDesc.getIfNotExists() == false) {
                 throw new HCatException(
-                        "AlreadyExistsException while creating table.", e);
+                    "AlreadyExistsException while creating table.", e);
             }
         } catch (InvalidObjectException e) {
             throw new HCatException(
-                    "InvalidObjectException while creating table.", e);
+                "InvalidObjectException while creating table.", e);
         } catch (MetaException e) {
             throw new HCatException("MetaException while creating table.", e);
         } catch (NoSuchObjectException e) {
             throw new HCatException(
-                    "NoSuchObjectException while creating table.", e);
+                "NoSuchObjectException while creating table.", e);
         } catch (TException e) {
             throw new ConnectionFailureException(
-                    "TException while creating table.", e);
+                "TException while creating table.", e);
         } catch (IOException e) {
             throw new HCatException("IOException while creating hive conf.", e);
         }
@@ -202,69 +202,69 @@ public class HCatClientHMSImpl extends H
 
     @Override
     public void createTableLike(String dbName, String existingTblName,
-            String newTableName, boolean ifNotExists, boolean isExternal,
-            String location) throws HCatException, ConnectionFailureException {
+                                String newTableName, boolean ifNotExists, boolean isExternal,
+                                String location) throws HCatException, ConnectionFailureException {
 
         Table hiveTable = getHiveTableLike(checkDB(dbName), existingTblName,
-                newTableName, ifNotExists, location);
+            newTableName, ifNotExists, location);
         if (hiveTable != null) {
             try {
                 hmsClient.createTable(hiveTable);
             } catch (AlreadyExistsException e) {
                 if (!ifNotExists) {
                     throw new HCatException(
-                            "A table already exists with the name "
-                                    + newTableName, e);
+                        "A table already exists with the name "
+                            + newTableName, e);
                 }
             } catch (InvalidObjectException e) {
                 throw new HCatException(
-                        "InvalidObjectException in create table like command.",
-                        e);
+                    "InvalidObjectException in create table like command.",
+                    e);
             } catch (MetaException e) {
                 throw new HCatException(
-                        "MetaException in create table like command.", e);
+                    "MetaException in create table like command.", e);
             } catch (NoSuchObjectException e) {
                 throw new HCatException(
-                        "NoSuchObjectException in create table like command.",
-                        e);
+                    "NoSuchObjectException in create table like command.",
+                    e);
             } catch (TException e) {
                 throw new ConnectionFailureException(
-                        "TException in create table like command.", e);
+                    "TException in create table like command.", e);
             }
         }
     }
 
     @Override
     public void dropTable(String dbName, String tableName, boolean ifExists)
-            throws HCatException, ConnectionFailureException {
+        throws HCatException, ConnectionFailureException {
         try {
-            hmsClient.dropTable(checkDB(dbName), tableName,true, ifExists);
+            hmsClient.dropTable(checkDB(dbName), tableName, true, ifExists);
         } catch (NoSuchObjectException e) {
             if (!ifExists) {
                 throw new HCatException(
-                        "NoSuchObjectException while dropping table.", e);
+                    "NoSuchObjectException while dropping table.", e);
             }
         } catch (MetaException e) {
             throw new HCatException("MetaException while dropping table.", e);
         } catch (TException e) {
             throw new ConnectionFailureException(
-                    "TException while dropping table.", e);
+                "TException while dropping table.", e);
         }
     }
 
     @Override
     public void renameTable(String dbName, String oldName, String newName)
-            throws HCatException, ConnectionFailureException {
+        throws HCatException, ConnectionFailureException {
         Table tbl;
         try {
             Table oldtbl = hmsClient.getTable(checkDB(dbName), oldName);
             if (oldtbl != null) {
                 // TODO : Should be moved out.
                 if (oldtbl
-                        .getParameters()
-                        .get(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_STORAGE) != null) {
+                    .getParameters()
+                    .get(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_STORAGE) != null) {
                     throw new HCatException(
-                            "Cannot use rename command on a non-native table");
+                        "Cannot use rename command on a non-native table");
                 }
                 tbl = new Table(oldtbl);
                 tbl.setTableName(newName);
@@ -274,229 +274,229 @@ public class HCatClientHMSImpl extends H
             throw new HCatException("MetaException while renaming table", e);
         } catch (TException e) {
             throw new ConnectionFailureException(
-                    "TException while renaming table", e);
+                "TException while renaming table", e);
         } catch (NoSuchObjectException e) {
             throw new HCatException(
-                    "NoSuchObjectException while renaming table", e);
+                "NoSuchObjectException while renaming table", e);
         } catch (InvalidOperationException e) {
             throw new HCatException(
-                    "InvalidOperationException while renaming table", e);
+                "InvalidOperationException while renaming table", e);
         }
     }
 
     @Override
     public List<HCatPartition> getPartitions(String dbName, String tblName)
-            throws HCatException, ConnectionFailureException {
+        throws HCatException, ConnectionFailureException {
         List<HCatPartition> hcatPtns = new ArrayList<HCatPartition>();
         try {
             List<Partition> hivePtns = hmsClient.listPartitions(
-                    checkDB(dbName), tblName, (short) -1);
+                checkDB(dbName), tblName, (short) -1);
             for (Partition ptn : hivePtns) {
                 hcatPtns.add(new HCatPartition(ptn));
             }
         } catch (NoSuchObjectException e) {
             throw new HCatException(
-                    "NoSuchObjectException while retrieving partition.", e);
+                "NoSuchObjectException while retrieving partition.", e);
         } catch (MetaException e) {
             throw new HCatException(
-                    "MetaException while retrieving partition.", e);
+                "MetaException while retrieving partition.", e);
         } catch (TException e) {
             throw new ConnectionFailureException(
-                    "TException while retrieving partition.", e);
+                "TException while retrieving partition.", e);
         }
         return hcatPtns;
     }
 
     @Override
     public HCatPartition getPartition(String dbName, String tableName,
-            Map<String, String> partitionSpec) throws HCatException,
-            ConnectionFailureException {
+                                      Map<String, String> partitionSpec) throws HCatException,
+        ConnectionFailureException {
         HCatPartition partition = null;
         try {
             ArrayList<String> ptnValues = new ArrayList<String>();
             ptnValues.addAll(partitionSpec.values());
             Partition hivePartition = hmsClient.getPartition(checkDB(dbName),
-                    tableName, ptnValues);
+                tableName, ptnValues);
             if (hivePartition != null) {
                 partition = new HCatPartition(hivePartition);
             }
         } catch (MetaException e) {
             throw new HCatException(
-                    "MetaException while retrieving partition.", e);
+                "MetaException while retrieving partition.", e);
         } catch (TException e) {
             throw new ConnectionFailureException(
-                    "TException while retrieving partition.", e);
+                "TException while retrieving partition.", e);
         } catch (NoSuchObjectException e) {
             throw new HCatException(
-                    "NoSuchObjectException while retrieving partition.", e);
+                "NoSuchObjectException while retrieving partition.", e);
         }
         return partition;
     }
 
     @Override
     public void addPartition(HCatAddPartitionDesc partInfo)
-            throws HCatException, ConnectionFailureException {
+        throws HCatException, ConnectionFailureException {
         Table tbl = null;
         try {
             tbl = hmsClient.getTable(partInfo.getDatabaseName(),
-                    partInfo.getTableName());
+                partInfo.getTableName());
             // TODO: Should be moved out.
             if (tbl.getPartitionKeysSize() == 0) {
                 throw new HCatException("The table " + partInfo.getTableName()
-                        + " is not partitioned.");
+                    + " is not partitioned.");
             }
 
             hmsClient.add_partition(partInfo.toHivePartition(tbl));
         } catch (InvalidObjectException e) {
             throw new HCatException(
-                    "InvalidObjectException while adding partition.", e);
+                "InvalidObjectException while adding partition.", e);
         } catch (AlreadyExistsException e) {
             throw new HCatException(
-                    "AlreadyExistsException while adding partition.", e);
+                "AlreadyExistsException while adding partition.", e);
         } catch (MetaException e) {
             throw new HCatException("MetaException while adding partition.", e);
         } catch (TException e) {
             throw new ConnectionFailureException(
-                    "TException while adding partition.", e);
+                "TException while adding partition.", e);
         } catch (NoSuchObjectException e) {
             throw new HCatException("The table " + partInfo.getTableName()
-                    + " is could not be found.", e);
+                + " is could not be found.", e);
         }
     }
 
     @Override
     public void dropPartition(String dbName, String tableName,
-            Map<String, String> partitionSpec, boolean ifExists)
-            throws HCatException, ConnectionFailureException {
+                              Map<String, String> partitionSpec, boolean ifExists)
+        throws HCatException, ConnectionFailureException {
         try {
             List<String> ptnValues = new ArrayList<String>();
             ptnValues.addAll(partitionSpec.values());
             hmsClient.dropPartition(checkDB(dbName), tableName, ptnValues,
-                    ifExists);
+                ifExists);
         } catch (NoSuchObjectException e) {
             if (!ifExists) {
                 throw new HCatException(
-                        "NoSuchObjectException while dropping partition.", e);
+                    "NoSuchObjectException while dropping partition.", e);
             }
         } catch (MetaException e) {
             throw new HCatException("MetaException while dropping partition.",
-                    e);
+                e);
         } catch (TException e) {
             throw new ConnectionFailureException(
-                    "TException while dropping partition.", e);
+                "TException while dropping partition.", e);
         }
     }
 
     @Override
     public List<HCatPartition> listPartitionsByFilter(String dbName,
-            String tblName, String filter) throws HCatException,
-            ConnectionFailureException {
+                                                      String tblName, String filter) throws HCatException,
+        ConnectionFailureException {
         List<HCatPartition> hcatPtns = new ArrayList<HCatPartition>();
         try {
             List<Partition> hivePtns = hmsClient.listPartitionsByFilter(
-                    checkDB(dbName), tblName, filter, (short) -1);
+                checkDB(dbName), tblName, filter, (short) -1);
             for (Partition ptn : hivePtns) {
                 hcatPtns.add(new HCatPartition(ptn));
             }
         } catch (MetaException e) {
             throw new HCatException("MetaException while fetching partitions.",
-                    e);
+                e);
         } catch (NoSuchObjectException e) {
             throw new HCatException(
-                    "NoSuchObjectException while fetching partitions.", e);
+                "NoSuchObjectException while fetching partitions.", e);
         } catch (TException e) {
             throw new ConnectionFailureException(
-                    "TException while fetching partitions.", e);
+                "TException while fetching partitions.", e);
         }
         return hcatPtns;
     }
 
     @Override
     public void markPartitionForEvent(String dbName, String tblName,
-            Map<String, String> partKVs, PartitionEventType eventType)
-            throws HCatException, ConnectionFailureException {
+                                      Map<String, String> partKVs, PartitionEventType eventType)
+        throws HCatException, ConnectionFailureException {
         try {
             hmsClient.markPartitionForEvent(checkDB(dbName), tblName, partKVs,
-                    eventType);
+                eventType);
         } catch (MetaException e) {
             throw new HCatException(
-                    "MetaException while marking partition for event.", e);
+                "MetaException while marking partition for event.", e);
         } catch (NoSuchObjectException e) {
             throw new HCatException(
-                    "NoSuchObjectException while marking partition for event.",
-                    e);
+                "NoSuchObjectException while marking partition for event.",
+                e);
         } catch (UnknownTableException e) {
             throw new HCatException(
-                    "UnknownTableException while marking partition for event.",
-                    e);
+                "UnknownTableException while marking partition for event.",
+                e);
         } catch (UnknownDBException e) {
             throw new HCatException(
-                    "UnknownDBException while marking partition for event.", e);
+                "UnknownDBException while marking partition for event.", e);
         } catch (TException e) {
             throw new ConnectionFailureException(
-                    "TException while marking partition for event.", e);
+                "TException while marking partition for event.", e);
         } catch (InvalidPartitionException e) {
             throw new HCatException(
-                    "InvalidPartitionException while marking partition for event.",
-                    e);
+                "InvalidPartitionException while marking partition for event.",
+                e);
         } catch (UnknownPartitionException e) {
             throw new HCatException(
-                    "UnknownPartitionException while marking partition for event.",
-                    e);
+                "UnknownPartitionException while marking partition for event.",
+                e);
         }
     }
 
     @Override
     public boolean isPartitionMarkedForEvent(String dbName, String tblName,
-            Map<String, String> partKVs, PartitionEventType eventType)
-            throws HCatException, ConnectionFailureException {
+                                             Map<String, String> partKVs, PartitionEventType eventType)
+        throws HCatException, ConnectionFailureException {
         boolean isMarked = false;
         try {
             isMarked = hmsClient.isPartitionMarkedForEvent(checkDB(dbName),
-                    tblName, partKVs, eventType);
+                tblName, partKVs, eventType);
         } catch (MetaException e) {
             throw new HCatException(
-                    "MetaException while checking partition for event.", e);
+                "MetaException while checking partition for event.", e);
         } catch (NoSuchObjectException e) {
             throw new HCatException(
-                    "NoSuchObjectException while checking partition for event.",
-                    e);
+                "NoSuchObjectException while checking partition for event.",
+                e);
         } catch (UnknownTableException e) {
             throw new HCatException(
-                    "UnknownTableException while checking partition for event.",
-                    e);
+                "UnknownTableException while checking partition for event.",
+                e);
         } catch (UnknownDBException e) {
             throw new HCatException(
-                    "UnknownDBException while checking partition for event.", e);
+                "UnknownDBException while checking partition for event.", e);
         } catch (TException e) {
             throw new ConnectionFailureException(
-                    "TException while checking partition for event.", e);
+                "TException while checking partition for event.", e);
         } catch (InvalidPartitionException e) {
             throw new HCatException(
-                    "InvalidPartitionException while checking partition for event.",
-                    e);
+                "InvalidPartitionException while checking partition for event.",
+                e);
         } catch (UnknownPartitionException e) {
             throw new HCatException(
-                    "UnknownPartitionException while checking partition for event.",
-                    e);
+                "UnknownPartitionException while checking partition for event.",
+                e);
         }
         return isMarked;
     }
 
     @Override
     public String getDelegationToken(String owner,
-            String renewerKerberosPrincipalName) throws HCatException,
-            ConnectionFailureException {
+                                     String renewerKerberosPrincipalName) throws HCatException,
+        ConnectionFailureException {
         String token = null;
         try {
             token = hmsClient.getDelegationToken(owner,
-                    renewerKerberosPrincipalName);
+                renewerKerberosPrincipalName);
         } catch (MetaException e) {
             throw new HCatException(
-                    "MetaException while getting delegation token.", e);
+                "MetaException while getting delegation token.", e);
         } catch (TException e) {
             throw new ConnectionFailureException(
-                    "TException while getting delegation token.", e);
+                "TException while getting delegation token.", e);
         }
 
         return token;
@@ -504,16 +504,16 @@ public class HCatClientHMSImpl extends H
 
     @Override
     public long renewDelegationToken(String tokenStrForm) throws HCatException,
-            ConnectionFailureException {
+        ConnectionFailureException {
         long time = 0;
         try {
             time = hmsClient.renewDelegationToken(tokenStrForm);
         } catch (MetaException e) {
             throw new HCatException(
-                    "MetaException while renewing delegation token.", e);
+                "MetaException while renewing delegation token.", e);
         } catch (TException e) {
             throw new ConnectionFailureException(
-                    "TException while renewing delegation token.", e);
+                "TException while renewing delegation token.", e);
         }
 
         return time;
@@ -521,15 +521,15 @@ public class HCatClientHMSImpl extends H
 
     @Override
     public void cancelDelegationToken(String tokenStrForm)
-            throws HCatException, ConnectionFailureException {
+        throws HCatException, ConnectionFailureException {
         try {
             hmsClient.cancelDelegationToken(tokenStrForm);
         } catch (MetaException e) {
             throw new HCatException(
-                    "MetaException while canceling delegation token.", e);
+                "MetaException while canceling delegation token.", e);
         } catch (TException e) {
             throw new ConnectionFailureException(
-                    "TException while canceling delegation token.", e);
+                "TException while canceling delegation token.", e);
         }
     }
 
@@ -542,38 +542,38 @@ public class HCatClientHMSImpl extends H
      */
     @Override
     void initialize(Configuration conf) throws HCatException,
-            ConnectionFailureException {
+        ConnectionFailureException {
         this.config = conf;
         try {
             hiveConfig = HCatUtil.getHiveConf(config);
             hmsClient = HCatUtil.getHiveClient(hiveConfig);
         } catch (MetaException exp) {
             throw new HCatException("MetaException while creating HMS client",
-                    exp);
+                exp);
         } catch (IOException exp) {
             throw new HCatException("IOException while creating HMS client",
-                    exp);
+                exp);
         }
 
     }
 
     private Table getHiveTableLike(String dbName, String existingTblName,
-            String newTableName, boolean isExternal, String location)
-            throws HCatException, ConnectionFailureException {
+                                   String newTableName, boolean isExternal, String location)
+        throws HCatException, ConnectionFailureException {
         Table oldtbl = null;
         Table newTable = null;
         try {
             oldtbl = hmsClient.getTable(checkDB(dbName), existingTblName);
         } catch (MetaException e1) {
             throw new HCatException(
-                    "MetaException while retrieving existing table.", e1);
+                "MetaException while retrieving existing table.", e1);
         } catch (TException e1) {
             throw new ConnectionFailureException(
-                    "TException while retrieving existing table.", e1);
+                "TException while retrieving existing table.", e1);
         } catch (NoSuchObjectException e1) {
             throw new HCatException(
-                    "NoSuchObjectException while retrieving existing table.",
-                    e1);
+                "NoSuchObjectException while retrieving existing table.",
+                e1);
         }
         if (oldtbl != null) {
             newTable = new Table();
@@ -626,7 +626,7 @@ public class HCatClientHMSImpl extends H
      */
     @Override
     public int addPartitions(List<HCatAddPartitionDesc> partInfoList)
-            throws HCatException, ConnectionFailureException {
+        throws HCatException, ConnectionFailureException {
         int numPartitions = -1;
         if ((partInfoList == null) || (partInfoList.size() == 0)) {
             throw new HCatException("The partition list is null or empty.");
@@ -635,7 +635,7 @@ public class HCatClientHMSImpl extends H
         Table tbl = null;
         try {
             tbl = hmsClient.getTable(partInfoList.get(0).getDatabaseName(),
-                    partInfoList.get(0).getTableName());
+                partInfoList.get(0).getTableName());
             ArrayList<Partition> ptnList = new ArrayList<Partition>();
             for (HCatAddPartitionDesc desc : partInfoList) {
                 ptnList.add(desc.toHivePartition(tbl));
@@ -643,19 +643,19 @@ public class HCatClientHMSImpl extends H
             numPartitions = hmsClient.add_partitions(ptnList);
         } catch (InvalidObjectException e) {
             throw new HCatException(
-                    "InvalidObjectException while adding partition.", e);
+                "InvalidObjectException while adding partition.", e);
         } catch (AlreadyExistsException e) {
             throw new HCatException(
-                    "AlreadyExistsException while adding partition.", e);
+                "AlreadyExistsException while adding partition.", e);
         } catch (MetaException e) {
             throw new HCatException("MetaException while adding partition.", e);
         } catch (TException e) {
             throw new ConnectionFailureException(
-                    "TException while adding partition.", e);
+                "TException while adding partition.", e);
         } catch (NoSuchObjectException e) {
             throw new HCatException("The table "
-                    + partInfoList.get(0).getTableName()
-                    + " is could not be found.", e);
+                + partInfoList.get(0).getTableName()
+                + " is could not be found.", e);
         }
         return numPartitions;
     }

Modified: incubator/hcatalog/trunk/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatCreateDBDesc.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatCreateDBDesc.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatCreateDBDesc.java (original)
+++ incubator/hcatalog/trunk/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatCreateDBDesc.java Mon Sep 10 23:28:55 2012
@@ -47,7 +47,7 @@ public class HCatCreateDBDesc {
      *
      * @return the if not exists
      */
-    public boolean getIfNotExists(){
+    public boolean getIfNotExists() {
         return this.ifNotExits;
     }
 
@@ -78,19 +78,19 @@ public class HCatCreateDBDesc {
         return this.dbName;
     }
 
-    private HCatCreateDBDesc(String dbName){
-       this.dbName = dbName;
+    private HCatCreateDBDesc(String dbName) {
+        this.dbName = dbName;
     }
 
     @Override
     public String toString() {
         return "HCatCreateDBDesc ["
-                + (dbName != null ? "dbName=" + dbName + ", " : "dbName=null")
-                + (locationUri != null ? "location=" + locationUri + ", "
-                        : "location=null")
-                + (comment != null ? "comment=" + comment + ", " : "comment=null")
-                + (dbProperties != null ? "dbProperties=" + dbProperties + ", "
-                        : "dbProperties=null") + "ifNotExits=" + ifNotExits + "]";
+            + (dbName != null ? "dbName=" + dbName + ", " : "dbName=null")
+            + (locationUri != null ? "location=" + locationUri + ", "
+            : "location=null")
+            + (comment != null ? "comment=" + comment + ", " : "comment=null")
+            + (dbProperties != null ? "dbProperties=" + dbProperties + ", "
+            : "dbProperties=null") + "ifNotExits=" + ifNotExits + "]";
     }
 
     /**
@@ -99,11 +99,11 @@ public class HCatCreateDBDesc {
      * @param dbName the db name
      * @return the builder
      */
-    public static Builder create(String dbName){
+    public static Builder create(String dbName) {
         return new Builder(dbName);
     }
 
-    Database toHiveDb(){
+    Database toHiveDb() {
         Database hiveDB = new Database();
         hiveDB.setDescription(this.comment);
         hiveDB.setLocationUri(this.locationUri);
@@ -120,7 +120,7 @@ public class HCatCreateDBDesc {
         private String dbName;
         private boolean ifNotExists = false;
 
-        private Builder(String dbName){
+        private Builder(String dbName) {
             this.dbName = dbName;
         }
 
@@ -130,7 +130,7 @@ public class HCatCreateDBDesc {
          * @param value the location of the database.
          * @return the builder
          */
-        public Builder location(String value){
+        public Builder location(String value) {
             this.innerLoc = value;
             return this;
         }
@@ -141,7 +141,7 @@ public class HCatCreateDBDesc {
          * @param value comments.
          * @return the builder
          */
-        public Builder comment(String value){
+        public Builder comment(String value) {
             this.innerComment = value;
             return this;
         }
@@ -152,7 +152,7 @@ public class HCatCreateDBDesc {
          * database with the same name already exists.
          * @return the builder
          */
-        public Builder ifNotExists(boolean ifNotExists){
+        public Builder ifNotExists(boolean ifNotExists) {
             this.ifNotExists = ifNotExists;
             return this;
         }
@@ -176,7 +176,7 @@ public class HCatCreateDBDesc {
          * @throws HCatException
          */
         public HCatCreateDBDesc build() throws HCatException {
-            if(this.dbName == null){
+            if (this.dbName == null) {
                 throw new HCatException("Database name cannot be null.");
             }
             HCatCreateDBDesc desc = new HCatCreateDBDesc(this.dbName);

Modified: incubator/hcatalog/trunk/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatCreateTableDesc.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatCreateTableDesc.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatCreateTableDesc.java (original)
+++ incubator/hcatalog/trunk/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatCreateTableDesc.java Mon Sep 10 23:28:55 2012
@@ -50,7 +50,7 @@ import org.slf4j.LoggerFactory;
  * The Class HCatCreateTableDesc for defining attributes for a new table.
  */
 @SuppressWarnings("deprecation")
-public class HCatCreateTableDesc{
+public class HCatCreateTableDesc {
 
     private static final Logger LOG = LoggerFactory.getLogger(HCatCreateTableDesc.class);
 
@@ -72,7 +72,7 @@ public class HCatCreateTableDesc{
     private String serde;
     private String storageHandler;
 
-    private HCatCreateTableDesc(String dbName, String tableName, List<HCatFieldSchema> columns){
+    private HCatCreateTableDesc(String dbName, String tableName, List<HCatFieldSchema> columns) {
         this.dbName = dbName;
         this.tableName = tableName;
         this.cols = columns;
@@ -86,11 +86,11 @@ public class HCatCreateTableDesc{
      * @param columns the columns
      * @return the builder
      */
-    public static Builder create(String dbName, String tableName, List<HCatFieldSchema> columns){
+    public static Builder create(String dbName, String tableName, List<HCatFieldSchema> columns) {
         return new Builder(dbName, tableName, columns);
     }
 
-    Table toHiveTable(HiveConf conf) throws HCatException{
+    Table toHiveTable(HiveConf conf) throws HCatException {
 
         Table newTable = new Table();
         newTable.setDbName(dbName);
@@ -122,26 +122,26 @@ public class HCatCreateTableDesc{
             } else {
                 LOG.info("Using LazySimpleSerDe for table " + tableName);
                 sd.getSerdeInfo()
-                        .setSerializationLib(
-                                org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class
-                                        .getName());
+                    .setSerializationLib(
+                        org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class
+                            .getName());
             }
         } else {
             try {
                 LOG.info("Creating instance of storage handler to get input/output, serder info.");
                 HiveStorageHandler sh = HiveUtils.getStorageHandler(conf,
-                        storageHandler);
+                    storageHandler);
                 sd.setInputFormat(sh.getInputFormatClass().getName());
                 sd.setOutputFormat(sh.getOutputFormatClass().getName());
                 sd.getSerdeInfo().setSerializationLib(
-                        sh.getSerDeClass().getName());
+                    sh.getSerDeClass().getName());
                 newTable.putToParameters(
-                        org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_STORAGE,
-                        storageHandler);
+                    org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_STORAGE,
+                    storageHandler);
             } catch (HiveException e) {
                 throw new HCatException(
-                        "Exception while creating instance of storage handler",
-                        e);
+                    "Exception while creating instance of storage handler",
+                    e);
             }
         }
         newTable.setSd(sd);
@@ -175,345 +175,345 @@ public class HCatCreateTableDesc{
         return newTable;
     }
 
-      /**
-       * Gets the if not exists.
-       *
-       * @return the if not exists
-       */
-      public boolean getIfNotExists() {
-          return this.ifNotExists;
-      }
-
-     /**
-      * Gets the table name.
-      *
-      * @return the table name
-      */
-     public String getTableName() {
-          return this.tableName;
-      }
-
-      /**
-       * Gets the cols.
-       *
-       * @return the cols
-       */
-      public List<HCatFieldSchema> getCols() {
-         return this.cols;
-      }
-
-      /**
-       * Gets the partition cols.
-       *
-       * @return the partition cols
-       */
-      public List<HCatFieldSchema> getPartitionCols() {
-          return this.partCols;
-      }
-
-      /**
-       * Gets the bucket cols.
-       *
-       * @return the bucket cols
-       */
-      public List<String> getBucketCols() {
-          return this.bucketCols;
-      }
-
-      public int getNumBuckets() {
-          return this.numBuckets;
-      }
-
-      /**
-       * Gets the comments.
-       *
-       * @return the comments
-       */
-      public String getComments() {
-          return this.comment;
-      }
-
-      /**
-       * Gets the storage handler.
-       *
-       * @return the storage handler
-       */
-      public String getStorageHandler() {
-          return this.storageHandler;
-      }
-
-      /**
-       * Gets the location.
-       *
-       * @return the location
-       */
-      public String getLocation() {
-          return this.location;
-      }
-
-      /**
-       * Gets the external.
-       *
-       * @return the external
-       */
-      public boolean getExternal() {
-          return this.isExternal;
-      }
-
-      /**
-       * Gets the sort cols.
-       *
-       * @return the sort cols
-       */
-      public List<Order> getSortCols() {
-          return this.sortCols;
-      }
-
-      /**
-       * Gets the tbl props.
-       *
-       * @return the tbl props
-       */
-      public Map<String, String> getTblProps() {
-          return this.tblProps;
-      }
-
-      /**
-       * Gets the file format.
-       *
-       * @return the file format
-       */
-      public String getFileFormat(){
-          return this.fileFormat;
-      }
-
-      /**
-       * Gets the database name.
-       *
-       * @return the database name
-       */
-      public String getDatabaseName() {
-          return this.dbName;
-      }
+    /**
+     * Gets the if not exists.
+     *
+     * @return the if not exists
+     */
+    public boolean getIfNotExists() {
+        return this.ifNotExists;
+    }
+
+    /**
+     * Gets the table name.
+     *
+     * @return the table name
+     */
+    public String getTableName() {
+        return this.tableName;
+    }
+
+    /**
+     * Gets the cols.
+     *
+     * @return the cols
+     */
+    public List<HCatFieldSchema> getCols() {
+        return this.cols;
+    }
+
+    /**
+     * Gets the partition cols.
+     *
+     * @return the partition cols
+     */
+    public List<HCatFieldSchema> getPartitionCols() {
+        return this.partCols;
+    }
+
+    /**
+     * Gets the bucket cols.
+     *
+     * @return the bucket cols
+     */
+    public List<String> getBucketCols() {
+        return this.bucketCols;
+    }
+
+    public int getNumBuckets() {
+        return this.numBuckets;
+    }
+
+    /**
+     * Gets the comments.
+     *
+     * @return the comments
+     */
+    public String getComments() {
+        return this.comment;
+    }
+
+    /**
+     * Gets the storage handler.
+     *
+     * @return the storage handler
+     */
+    public String getStorageHandler() {
+        return this.storageHandler;
+    }
+
+    /**
+     * Gets the location.
+     *
+     * @return the location
+     */
+    public String getLocation() {
+        return this.location;
+    }
+
+    /**
+     * Gets the external.
+     *
+     * @return the external
+     */
+    public boolean getExternal() {
+        return this.isExternal;
+    }
 
-      @Override
+    /**
+     * Gets the sort cols.
+     *
+     * @return the sort cols
+     */
+    public List<Order> getSortCols() {
+        return this.sortCols;
+    }
+
+    /**
+     * Gets the tbl props.
+     *
+     * @return the tbl props
+     */
+    public Map<String, String> getTblProps() {
+        return this.tblProps;
+    }
+
+    /**
+     * Gets the file format.
+     *
+     * @return the file format
+     */
+    public String getFileFormat() {
+        return this.fileFormat;
+    }
+
+    /**
+     * Gets the database name.
+     *
+     * @return the database name
+     */
+    public String getDatabaseName() {
+        return this.dbName;
+    }
+
+    @Override
     public String toString() {
         return "HCatCreateTableDesc ["
-                + (tableName != null ? "tableName=" + tableName + ", " : "tableName=null")
-                + (dbName != null ? "dbName=" + dbName + ", " : "dbName=null")
-                + "isExternal="
-                + isExternal
-                + ", "
-                + (comment != null ? "comment=" + comment + ", " : "comment=null")
-                + (location != null ? "location=" + location + ", " : "location=null")
-                + (cols != null ? "cols=" + cols + ", " : "cols=null")
-                + (partCols != null ? "partCols=" + partCols + ", " : "partCols=null")
-                + (bucketCols != null ? "bucketCols=" + bucketCols + ", " : "bucketCols=null")
-                + "numBuckets="
-                + numBuckets
-                + ", "
-                + (sortCols != null ? "sortCols=" + sortCols + ", " : "sortCols=null")
-                + (tblProps != null ? "tblProps=" + tblProps + ", " : "tblProps=null")
-                + "ifNotExists="
-                + ifNotExists
-                + ", "
-                + (fileFormat != null ? "fileFormat=" + fileFormat + ", " : "fileFormat=null")
-                + (inputformat != null ? "inputformat=" + inputformat + ", "
-                        : "inputformat=null")
-                + (outputformat != null ? "outputformat=" + outputformat + ", "
-                        : "outputformat=null")
-                + (serde != null ? "serde=" + serde + ", " : "serde=null")
-                + (storageHandler != null ? "storageHandler=" + storageHandler
-                        : "storageHandler=null") + "]";
-    }
-
-    public static class Builder{
-
-          private String tableName;
-          private boolean isExternal;
-          private List<HCatFieldSchema> cols;
-          private List<HCatFieldSchema> partCols;
-          private List<String> bucketCols;
-          private List<Order> sortCols;
-          private int numBuckets;
-          private String comment;
-          private String fileFormat;
-          private String location;
-          private String storageHandler;
-          private Map<String, String> tblProps;
-          private boolean ifNotExists;
-          private String dbName;
-
-
-          private Builder(String dbName, String tableName, List<HCatFieldSchema> columns) {
-              this.dbName = dbName;
-              this.tableName = tableName;
-              this.cols = columns;
-          }
-
-
-          /**
-           * If not exists.
-           *
-           * @param ifNotExists If set to true, hive will not throw exception, if a
-           * table with the same name already exists.
-           * @return the builder
-           */
-          public Builder ifNotExists(boolean ifNotExists) {
+            + (tableName != null ? "tableName=" + tableName + ", " : "tableName=null")
+            + (dbName != null ? "dbName=" + dbName + ", " : "dbName=null")
+            + "isExternal="
+            + isExternal
+            + ", "
+            + (comment != null ? "comment=" + comment + ", " : "comment=null")
+            + (location != null ? "location=" + location + ", " : "location=null")
+            + (cols != null ? "cols=" + cols + ", " : "cols=null")
+            + (partCols != null ? "partCols=" + partCols + ", " : "partCols=null")
+            + (bucketCols != null ? "bucketCols=" + bucketCols + ", " : "bucketCols=null")
+            + "numBuckets="
+            + numBuckets
+            + ", "
+            + (sortCols != null ? "sortCols=" + sortCols + ", " : "sortCols=null")
+            + (tblProps != null ? "tblProps=" + tblProps + ", " : "tblProps=null")
+            + "ifNotExists="
+            + ifNotExists
+            + ", "
+            + (fileFormat != null ? "fileFormat=" + fileFormat + ", " : "fileFormat=null")
+            + (inputformat != null ? "inputformat=" + inputformat + ", "
+            : "inputformat=null")
+            + (outputformat != null ? "outputformat=" + outputformat + ", "
+            : "outputformat=null")
+            + (serde != null ? "serde=" + serde + ", " : "serde=null")
+            + (storageHandler != null ? "storageHandler=" + storageHandler
+            : "storageHandler=null") + "]";
+    }
+
+    public static class Builder {
+
+        private String tableName;
+        private boolean isExternal;
+        private List<HCatFieldSchema> cols;
+        private List<HCatFieldSchema> partCols;
+        private List<String> bucketCols;
+        private List<Order> sortCols;
+        private int numBuckets;
+        private String comment;
+        private String fileFormat;
+        private String location;
+        private String storageHandler;
+        private Map<String, String> tblProps;
+        private boolean ifNotExists;
+        private String dbName;
+
+
+        private Builder(String dbName, String tableName, List<HCatFieldSchema> columns) {
+            this.dbName = dbName;
+            this.tableName = tableName;
+            this.cols = columns;
+        }
+
+
+        /**
+         * If not exists.
+         *
+         * @param ifNotExists If set to true, hive will not throw exception, if a
+         * table with the same name already exists.
+         * @return the builder
+         */
+        public Builder ifNotExists(boolean ifNotExists) {
             this.ifNotExists = ifNotExists;
             return this;
-          }
+        }
+
+
+        /**
+         * Partition cols.
+         *
+         * @param partCols the partition cols
+         * @return the builder
+         */
+        public Builder partCols(ArrayList<HCatFieldSchema> partCols) {
+            this.partCols = partCols;
+            return this;
+        }
 
 
-          /**
-           * Partition cols.
-           *
-           * @param partCols the partition cols
-           * @return the builder
-           */
-          public Builder partCols(ArrayList<HCatFieldSchema> partCols) {
-              this.partCols = partCols;
-              return this;
-          }
-
-
-          /**
-           * Bucket cols.
-           *
-           * @param bucketCols the bucket cols
-           * @return the builder
-           */
-          public Builder bucketCols(ArrayList<String> bucketCols, int buckets) {
+        /**
+         * Bucket cols.
+         *
+         * @param bucketCols the bucket cols
+         * @return the builder
+         */
+        public Builder bucketCols(ArrayList<String> bucketCols, int buckets) {
             this.bucketCols = bucketCols;
             this.numBuckets = buckets;
             return this;
-          }
+        }
 
-          /**
-           * Storage handler.
-           *
-           * @param storageHandler the storage handler
-           * @return the builder
-           */
-          public Builder storageHandler(String storageHandler) {
+        /**
+         * Storage handler.
+         *
+         * @param storageHandler the storage handler
+         * @return the builder
+         */
+        public Builder storageHandler(String storageHandler) {
             this.storageHandler = storageHandler;
             return this;
-          }
+        }
 
-          /**
-           * Location.
-           *
-           * @param location the location
-           * @return the builder
-           */
-          public Builder location(String location) {
+        /**
+         * Location.
+         *
+         * @param location the location
+         * @return the builder
+         */
+        public Builder location(String location) {
             this.location = location;
             return this;
-          }
+        }
 
-          /**
-           * Comments.
-           *
-           * @param comment the comment
-           * @return the builder
-           */
-          public Builder comments(String comment) {
+        /**
+         * Comments.
+         *
+         * @param comment the comment
+         * @return the builder
+         */
+        public Builder comments(String comment) {
             this.comment = comment;
             return this;
-          }
+        }
 
-          /**
-           * Checks if is table external.
-           *
-           * @param isExternal the is external
-           * @return the builder
-           */
-          public Builder isTableExternal(boolean isExternal) {
+        /**
+         * Checks if is table external.
+         *
+         * @param isExternal the is external
+         * @return the builder
+         */
+        public Builder isTableExternal(boolean isExternal) {
             this.isExternal = isExternal;
             return this;
-          }
+        }
 
-          /**
-           * Sort cols.
-           *
-           * @param sortCols the sort cols
-           * @return the builder
-           */
-          public Builder sortCols(ArrayList<Order> sortCols) {
+        /**
+         * Sort cols.
+         *
+         * @param sortCols the sort cols
+         * @return the builder
+         */
+        public Builder sortCols(ArrayList<Order> sortCols) {
             this.sortCols = sortCols;
             return this;
-          }
+        }
 
-          /**
-           * Tbl props.
-           *
-           * @param tblProps the tbl props
-           * @return the builder
-           */
-          public Builder tblProps(Map<String, String> tblProps) {
+        /**
+         * Tbl props.
+         *
+         * @param tblProps the tbl props
+         * @return the builder
+         */
+        public Builder tblProps(Map<String, String> tblProps) {
             this.tblProps = tblProps;
             return this;
-          }
+        }
+
+        /**
+         * File format.
+         *
+         * @param format the format
+         * @return the builder
+         */
+        public Builder fileFormat(String format) {
+            this.fileFormat = format;
+            return this;
+        }
 
-          /**
-           * File format.
-           *
-           * @param format the format
-           * @return the builder
-           */
-          public Builder fileFormat(String format){
-              this.fileFormat = format;
-              return this;
-          }
-
-          /**
-           * Builds the HCatCreateTableDesc.
-           *
-           * @return HCatCreateTableDesc
-           * @throws HCatException
-           */
-          public HCatCreateTableDesc build() throws HCatException {
-              if(this.dbName == null){
+        /**
+         * Builds the HCatCreateTableDesc.
+         *
+         * @return HCatCreateTableDesc
+         * @throws HCatException
+         */
+        public HCatCreateTableDesc build() throws HCatException {
+            if (this.dbName == null) {
                 LOG.info("Database name found null. Setting db to :"
-                        + MetaStoreUtils.DEFAULT_DATABASE_NAME);
+                    + MetaStoreUtils.DEFAULT_DATABASE_NAME);
                 this.dbName = MetaStoreUtils.DEFAULT_DATABASE_NAME;
-              }
-              HCatCreateTableDesc desc = new HCatCreateTableDesc(this.dbName,
-                      this.tableName, this.cols);
-              desc.ifNotExists = this.ifNotExists;
-              desc.isExternal = this.isExternal;
-              desc.comment = this.comment;
-              desc.partCols = this.partCols;
-              desc.bucketCols = this.bucketCols;
-              desc.numBuckets = this.numBuckets;
-              desc.location = this.location;
-              desc.tblProps = this.tblProps;
-              desc.sortCols = this.sortCols;
-              desc.serde = null;
-              if (!StringUtils.isEmpty(fileFormat)) {
-                  desc.fileFormat = fileFormat;
-                  if ("SequenceFile".equalsIgnoreCase(fileFormat)) {
-                      desc.inputformat = SequenceFileInputFormat.class.getName();
-                      desc.outputformat = SequenceFileOutputFormat.class
-                              .getName();
-                  } else if ("RCFile".equalsIgnoreCase(fileFormat)) {
-                      desc.inputformat = RCFileInputFormat.class.getName();
-                      desc.outputformat = RCFileOutputFormat.class.getName();
-                      desc.serde = ColumnarSerDe.class.getName();
-                  }
-                  desc.storageHandler = StringUtils.EMPTY;
-              } else if (!StringUtils.isEmpty(storageHandler)) {
-                  desc.storageHandler = storageHandler;
-              } else {
-                  desc.fileFormat = "TextFile";
-                  LOG.info("Using text file format for the table.");
-                  desc.inputformat = TextInputFormat.class.getName();
-                  LOG.info("Table input format:" + desc.inputformat);
-                  desc.outputformat = IgnoreKeyTextOutputFormat.class
-                          .getName();
-                  LOG.info("Table output format:" + desc.outputformat);
-              }
-              return desc;
-          }
-      }
+            }
+            HCatCreateTableDesc desc = new HCatCreateTableDesc(this.dbName,
+                this.tableName, this.cols);
+            desc.ifNotExists = this.ifNotExists;
+            desc.isExternal = this.isExternal;
+            desc.comment = this.comment;
+            desc.partCols = this.partCols;
+            desc.bucketCols = this.bucketCols;
+            desc.numBuckets = this.numBuckets;
+            desc.location = this.location;
+            desc.tblProps = this.tblProps;
+            desc.sortCols = this.sortCols;
+            desc.serde = null;
+            if (!StringUtils.isEmpty(fileFormat)) {
+                desc.fileFormat = fileFormat;
+                if ("SequenceFile".equalsIgnoreCase(fileFormat)) {
+                    desc.inputformat = SequenceFileInputFormat.class.getName();
+                    desc.outputformat = SequenceFileOutputFormat.class
+                        .getName();
+                } else if ("RCFile".equalsIgnoreCase(fileFormat)) {
+                    desc.inputformat = RCFileInputFormat.class.getName();
+                    desc.outputformat = RCFileOutputFormat.class.getName();
+                    desc.serde = ColumnarSerDe.class.getName();
+                }
+                desc.storageHandler = StringUtils.EMPTY;
+            } else if (!StringUtils.isEmpty(storageHandler)) {
+                desc.storageHandler = storageHandler;
+            } else {
+                desc.fileFormat = "TextFile";
+                LOG.info("Using text file format for the table.");
+                desc.inputformat = TextInputFormat.class.getName();
+                LOG.info("Table input format:" + desc.inputformat);
+                desc.outputformat = IgnoreKeyTextOutputFormat.class
+                    .getName();
+                LOG.info("Table output format:" + desc.outputformat);
+            }
+            return desc;
+        }
+    }
 }

Modified: incubator/hcatalog/trunk/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatDatabase.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatDatabase.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatDatabase.java (original)
+++ incubator/hcatalog/trunk/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatDatabase.java Mon Sep 10 23:28:55 2012
@@ -44,7 +44,7 @@ public class HCatDatabase {
      *
      * @return the database name
      */
-    public String getName(){
+    public String getName() {
         return dbName;
     }
 
@@ -53,7 +53,7 @@ public class HCatDatabase {
      *
      * @return the dB location
      */
-    public String getLocation(){
+    public String getLocation() {
         return dbLocation;
     }
 
@@ -62,7 +62,7 @@ public class HCatDatabase {
      *
      * @return the comment
      */
-    public String getComment(){
+    public String getComment() {
         return comment;
     }
 
@@ -71,17 +71,17 @@ public class HCatDatabase {
      *
      * @return the dB properties
      */
-    public Map<String, String> getProperties(){
+    public Map<String, String> getProperties() {
         return props;
     }
 
     @Override
     public String toString() {
         return "HCatDatabase ["
-                + (dbName != null ? "dbName=" + dbName + ", " : "dbName=null")
-                + (dbLocation != null ? "dbLocation=" + dbLocation + ", " : "dbLocation=null")
-                + (comment != null ? "comment=" + comment + ", " : "comment=null")
-                + (props != null ? "props=" + props : "props=null") + "]";
+            + (dbName != null ? "dbName=" + dbName + ", " : "dbName=null")
+            + (dbLocation != null ? "dbLocation=" + dbLocation + ", " : "dbLocation=null")
+            + (comment != null ? "comment=" + comment + ", " : "comment=null")
+            + (props != null ? "props=" + props : "props=null") + "]";
     }
 
 }

Modified: incubator/hcatalog/trunk/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatPartition.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatPartition.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatPartition.java (original)
+++ incubator/hcatalog/trunk/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatPartition.java Mon Sep 10 23:28:55 2012
@@ -41,7 +41,7 @@ public class HCatPartition {
     private int createTime;
     private int lastAccessTime;
     private StorageDescriptor sd;
-    private Map<String,String> parameters;
+    private Map<String, String> parameters;
 
     HCatPartition(Partition partition) throws HCatException {
         this.tableName = partition.getTableName();
@@ -62,7 +62,7 @@ public class HCatPartition {
      *
      * @return the table name
      */
-    public String getTableName(){
+    public String getTableName() {
         return this.tableName;
     }
 
@@ -71,7 +71,7 @@ public class HCatPartition {
      *
      * @return the database name
      */
-    public String getDatabaseName(){
+    public String getDatabaseName() {
         return this.dbName;
     }
 
@@ -80,8 +80,8 @@ public class HCatPartition {
      *
      * @return the columns
      */
-    public List<HCatFieldSchema> getColumns(){
-       return this.tableCols;
+    public List<HCatFieldSchema> getColumns() {
+        return this.tableCols;
     }
 
     /**
@@ -89,7 +89,7 @@ public class HCatPartition {
      *
      * @return the input format
      */
-    public String getInputFormat(){
+    public String getInputFormat() {
         return this.sd.getInputFormat();
     }
 
@@ -98,7 +98,7 @@ public class HCatPartition {
      *
      * @return the output format
      */
-    public String getOutputFormat(){
+    public String getOutputFormat() {
         return this.sd.getOutputFormat();
     }
 
@@ -109,8 +109,8 @@ public class HCatPartition {
      */
     public String getStorageHandler() {
         return this.sd
-                .getParameters()
-                .get(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_STORAGE);
+            .getParameters()
+            .get(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_STORAGE);
     }
 
     /**
@@ -118,7 +118,7 @@ public class HCatPartition {
      *
      * @return the location
      */
-    public String getLocation(){
+    public String getLocation() {
         return this.sd.getLocation();
     }
 
@@ -127,7 +127,7 @@ public class HCatPartition {
      *
      * @return the serde
      */
-    public String getSerDe(){
+    public String getSerDe() {
         return this.sd.getSerdeInfo().getSerializationLib();
     }
 
@@ -140,7 +140,7 @@ public class HCatPartition {
      *
      * @return the last access time
      */
-    public int getLastAccessTime(){
+    public int getLastAccessTime() {
         return this.lastAccessTime;
     }
 
@@ -158,7 +158,7 @@ public class HCatPartition {
      *
      * @return the values
      */
-    public List<String> getValues(){
+    public List<String> getValues() {
         return this.values;
     }
 
@@ -167,7 +167,7 @@ public class HCatPartition {
      *
      * @return the bucket columns
      */
-    public List<String> getBucketCols(){
+    public List<String> getBucketCols() {
         return this.sd.getBucketCols();
     }
 
@@ -176,7 +176,7 @@ public class HCatPartition {
      *
      * @return the number of buckets
      */
-    public int getNumBuckets(){
+    public int getNumBuckets() {
         return this.sd.getNumBuckets();
     }
 
@@ -185,19 +185,19 @@ public class HCatPartition {
      *
      * @return the sort columns
      */
-    public List<Order> getSortCols(){
+    public List<Order> getSortCols() {
         return this.sd.getSortCols();
     }
 
     @Override
     public String toString() {
         return "HCatPartition ["
-                + (tableName != null ? "tableName=" + tableName + ", " : "tableName=null")
-                + (dbName != null ? "dbName=" + dbName + ", " : "dbName=null")
-                + (values != null ? "values=" + values + ", " : "values=null")
-                + "createTime=" + createTime + ", lastAccessTime="
-                + lastAccessTime + ", " + (sd != null ? "sd=" + sd + ", " : "sd=null")
-                + (parameters != null ? "parameters=" + parameters : "parameters=null") + "]";
+            + (tableName != null ? "tableName=" + tableName + ", " : "tableName=null")
+            + (dbName != null ? "dbName=" + dbName + ", " : "dbName=null")
+            + (values != null ? "values=" + values + ", " : "values=null")
+            + "createTime=" + createTime + ", lastAccessTime="
+            + lastAccessTime + ", " + (sd != null ? "sd=" + sd + ", " : "sd=null")
+            + (parameters != null ? "parameters=" + parameters : "parameters=null") + "]";
     }
 
 }

Modified: incubator/hcatalog/trunk/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatTable.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatTable.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatTable.java (original)
+++ incubator/hcatalog/trunk/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatTable.java Mon Sep 10 23:28:55 2012
@@ -66,9 +66,9 @@ public class HCatTable {
         inputFileFormat = hiveTable.getSd().getInputFormat();
         outputFileFormat = hiveTable.getSd().getOutputFormat();
         storageHandler = hiveTable
-                .getSd()
-                .getParameters()
-                .get(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_STORAGE);
+            .getSd()
+            .getParameters()
+            .get(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_STORAGE);
         tblProps = hiveTable.getParameters();
         serde = hiveTable.getSd().getSerdeInfo().getSerializationLib();
         location = hiveTable.getSd().getLocation();
@@ -187,7 +187,7 @@ public class HCatTable {
      *
      * @return the serde lib
      */
-    public String getSerdeLib(){
+    public String getSerdeLib() {
         return serde;
     }
 
@@ -196,31 +196,31 @@ public class HCatTable {
      *
      * @return the location
      */
-    public String getLocation(){
+    public String getLocation() {
         return location;
     }
 
     @Override
     public String toString() {
         return "HCatTable ["
-                + (tableName != null ? "tableName=" + tableName + ", " : "tableName=null")
-                + (dbName != null ? "dbName=" + dbName + ", " : "dbName=null")
-                + (tabletype != null ? "tabletype=" + tabletype + ", " : "tabletype=null")
-                + (cols != null ? "cols=" + cols + ", " : "cols=null")
-                + (partCols != null ? "partCols=" + partCols + ", " : "partCols==null")
-                + (bucketCols != null ? "bucketCols=" + bucketCols + ", " : "bucketCols=null")
-                + (sortCols != null ? "sortCols=" + sortCols + ", " : "sortCols=null")
-                + "numBuckets="
-                + numBuckets
-                + ", "
-                + (inputFileFormat != null ? "inputFileFormat="
-                        + inputFileFormat + ", " : "inputFileFormat=null")
-                + (outputFileFormat != null ? "outputFileFormat="
-                        + outputFileFormat + ", " : "outputFileFormat=null")
-                + (storageHandler != null ? "storageHandler=" + storageHandler
-                        + ", " : "storageHandler=null")
-                + (tblProps != null ? "tblProps=" + tblProps + ", " : "tblProps=null")
-                + (serde != null ? "serde=" + serde + ", " : "serde=")
-                + (location != null ? "location=" + location : "location=") + "]";
+            + (tableName != null ? "tableName=" + tableName + ", " : "tableName=null")
+            + (dbName != null ? "dbName=" + dbName + ", " : "dbName=null")
+            + (tabletype != null ? "tabletype=" + tabletype + ", " : "tabletype=null")
+            + (cols != null ? "cols=" + cols + ", " : "cols=null")
+            + (partCols != null ? "partCols=" + partCols + ", " : "partCols==null")
+            + (bucketCols != null ? "bucketCols=" + bucketCols + ", " : "bucketCols=null")
+            + (sortCols != null ? "sortCols=" + sortCols + ", " : "sortCols=null")
+            + "numBuckets="
+            + numBuckets
+            + ", "
+            + (inputFileFormat != null ? "inputFileFormat="
+            + inputFileFormat + ", " : "inputFileFormat=null")
+            + (outputFileFormat != null ? "outputFileFormat="
+            + outputFileFormat + ", " : "outputFileFormat=null")
+            + (storageHandler != null ? "storageHandler=" + storageHandler
+            + ", " : "storageHandler=null")
+            + (tblProps != null ? "tblProps=" + tblProps + ", " : "tblProps=null")
+            + (serde != null ? "serde=" + serde + ", " : "serde=")
+            + (location != null ? "location=" + location : "location=") + "]";
     }
 }

Modified: incubator/hcatalog/trunk/webhcat/java-client/src/test/java/org/apache/hcatalog/api/TestHCatClient.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/webhcat/java-client/src/test/java/org/apache/hcatalog/api/TestHCatClient.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/webhcat/java-client/src/test/java/org/apache/hcatalog/api/TestHCatClient.java (original)
+++ incubator/hcatalog/trunk/webhcat/java-client/src/test/java/org/apache/hcatalog/api/TestHCatClient.java Mon Sep 10 23:28:55 2012
@@ -49,8 +49,8 @@ import static org.junit.Assert.assertTru
 
 public class TestHCatClient {
     private static final Logger LOG = LoggerFactory.getLogger(TestHCatClient.class);
-    private static final String msPort  = "20101";
-    private static HiveConf  hcatConf;
+    private static final String msPort = "20101";
+    private static HiveConf hcatConf;
     private static SecurityManager securityManager;
 
     private static class RunMS implements Runnable {
@@ -58,7 +58,7 @@ public class TestHCatClient {
         @Override
         public void run() {
             try {
-                HiveMetaStore.main(new String[] { "-v", "-p", msPort });
+                HiveMetaStore.main(new String[]{"-v", "-p", msPort});
             } catch (Throwable t) {
                 LOG.error("Exiting. Got exception from metastore: ", t);
             }
@@ -83,14 +83,14 @@ public class TestHCatClient {
         hcatConf = new HiveConf(TestHCatClient.class);
         hcatConf.set("hive.metastore.local", "false");
         hcatConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:"
-                + msPort);
+            + msPort);
         hcatConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTRETRIES, 3);
         hcatConf.set(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK.varname,
-                HCatSemanticAnalyzer.class.getName());
+            HCatSemanticAnalyzer.class.getName());
         hcatConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
         hcatConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
         hcatConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname,
-                "false");
+            "false");
         System.setProperty(HiveConf.ConfVars.PREEXECHOOKS.varname, " ");
         System.setProperty(HiveConf.ConfVars.POSTEXECHOOKS.varname, " ");
     }
@@ -104,7 +104,7 @@ public class TestHCatClient {
         client.dropDatabase(db, true, HCatClient.DROP_DB_MODE.CASCADE);
 
         HCatCreateDBDesc dbDesc = HCatCreateDBDesc.create(db).ifNotExists(false)
-                .build();
+            .build();
         client.createDatabase(dbDesc);
         List<String> dbNames = client.listDatabaseNamesByPattern("*");
         assertTrue(dbNames.contains("default"));
@@ -114,22 +114,22 @@ public class TestHCatClient {
         assertTrue(testDb.getComment() == null);
         assertTrue(testDb.getProperties().size() == 0);
         String warehouseDir = System
-                .getProperty(ConfVars.METASTOREWAREHOUSE.varname, "/user/hive/warehouse");
+            .getProperty(ConfVars.METASTOREWAREHOUSE.varname, "/user/hive/warehouse");
         assertTrue(testDb.getLocation().equals(
-                "file:" + warehouseDir + "/" + db + ".db"));
+            "file:" + warehouseDir + "/" + db + ".db"));
         ArrayList<HCatFieldSchema> cols = new ArrayList<HCatFieldSchema>();
         cols.add(new HCatFieldSchema("id", Type.INT, "id comment"));
         cols.add(new HCatFieldSchema("value", Type.STRING, "value comment"));
         HCatCreateTableDesc tableDesc = HCatCreateTableDesc
-                .create(db, tableOne, cols).fileFormat("rcfile").build();
+            .create(db, tableOne, cols).fileFormat("rcfile").build();
         client.createTable(tableDesc);
         HCatTable table1 = client.getTable(db, tableOne);
         assertTrue(table1.getInputFileFormat().equalsIgnoreCase(
-                RCFileInputFormat.class.getName()));
+            RCFileInputFormat.class.getName()));
         assertTrue(table1.getOutputFileFormat().equalsIgnoreCase(
-                RCFileOutputFormat.class.getName()));
+            RCFileOutputFormat.class.getName()));
         assertTrue(table1.getSerdeLib().equalsIgnoreCase(
-                ColumnarSerDe.class.getName()));
+            ColumnarSerDe.class.getName()));
         assertTrue(table1.getCols().equals(cols));
         // Since "ifexists" was not set to true, trying to create the same table
         // again
@@ -138,20 +138,20 @@ public class TestHCatClient {
             client.createTable(tableDesc);
         } catch (HCatException e) {
             assertTrue(e.getMessage().contains(
-                    "AlreadyExistsException while creating table."));
+                "AlreadyExistsException while creating table."));
         }
 
         client.dropTable(db, tableOne, true);
         HCatCreateTableDesc tableDesc2 = HCatCreateTableDesc.create(db,
-                tableTwo, cols).build();
+            tableTwo, cols).build();
         client.createTable(tableDesc2);
         HCatTable table2 = client.getTable(db, tableTwo);
         assertTrue(table2.getInputFileFormat().equalsIgnoreCase(
-                TextInputFormat.class.getName()));
+            TextInputFormat.class.getName()));
         assertTrue(table2.getOutputFileFormat().equalsIgnoreCase(
-                IgnoreKeyTextOutputFormat.class.getName()));
+            IgnoreKeyTextOutputFormat.class.getName()));
         assertTrue(table2.getLocation().equalsIgnoreCase(
-                "file:" + warehouseDir + "/" + db + ".db/" + tableTwo));
+            "file:" + warehouseDir + "/" + db + ".db/" + tableTwo));
         client.close();
     }
 
@@ -163,48 +163,48 @@ public class TestHCatClient {
         client.dropDatabase(dbName, true, HCatClient.DROP_DB_MODE.CASCADE);
 
         HCatCreateDBDesc dbDesc = HCatCreateDBDesc.create(dbName)
-                .ifNotExists(true).build();
+            .ifNotExists(true).build();
         client.createDatabase(dbDesc);
         ArrayList<HCatFieldSchema> cols = new ArrayList<HCatFieldSchema>();
         cols.add(new HCatFieldSchema("userid", Type.INT, "id columns"));
         cols.add(new HCatFieldSchema("viewtime", Type.BIGINT,
-                "view time columns"));
+            "view time columns"));
         cols.add(new HCatFieldSchema("pageurl", Type.STRING, ""));
         cols.add(new HCatFieldSchema("ip", Type.STRING,
-                "IP Address of the User"));
+            "IP Address of the User"));
 
         ArrayList<HCatFieldSchema> ptnCols = new ArrayList<HCatFieldSchema>();
         ptnCols.add(new HCatFieldSchema("dt", Type.STRING, "date column"));
         ptnCols.add(new HCatFieldSchema("country", Type.STRING,
-                "country column"));
+            "country column"));
         HCatCreateTableDesc tableDesc = HCatCreateTableDesc
-                .create(dbName, tableName, cols).fileFormat("sequencefile")
-                .partCols(ptnCols).build();
+            .create(dbName, tableName, cols).fileFormat("sequencefile")
+            .partCols(ptnCols).build();
         client.createTable(tableDesc);
 
         Map<String, String> firstPtn = new HashMap<String, String>();
         firstPtn.put("dt", "04/30/2012");
         firstPtn.put("country", "usa");
         HCatAddPartitionDesc addPtn = HCatAddPartitionDesc.create(dbName,
-                tableName, null, firstPtn).build();
+            tableName, null, firstPtn).build();
         client.addPartition(addPtn);
 
         Map<String, String> secondPtn = new HashMap<String, String>();
         secondPtn.put("dt", "04/12/2012");
         secondPtn.put("country", "brazil");
         HCatAddPartitionDesc addPtn2 = HCatAddPartitionDesc.create(dbName,
-                tableName, null, secondPtn).build();
+            tableName, null, secondPtn).build();
         client.addPartition(addPtn2);
 
         Map<String, String> thirdPtn = new HashMap<String, String>();
         thirdPtn.put("dt", "04/13/2012");
         thirdPtn.put("country", "argetina");
         HCatAddPartitionDesc addPtn3 = HCatAddPartitionDesc.create(dbName,
-                tableName, null, thirdPtn).build();
+            tableName, null, thirdPtn).build();
         client.addPartition(addPtn3);
 
         List<HCatPartition> ptnList = client.listPartitionsByFilter(dbName,
-                tableName, null);
+            tableName, null);
         assertTrue(ptnList.size() == 3);
 
         HCatPartition ptn = client.getPartition(dbName, tableName, firstPtn);
@@ -212,29 +212,29 @@ public class TestHCatClient {
 
         client.dropPartition(dbName, tableName, firstPtn, true);
         ptnList = client.listPartitionsByFilter(dbName,
-                tableName, null);
+            tableName, null);
         assertTrue(ptnList.size() == 2);
 
         List<HCatPartition> ptnListTwo = client.listPartitionsByFilter(dbName,
-                tableName, "country = \"argetina\"");
+            tableName, "country = \"argetina\"");
         assertTrue(ptnListTwo.size() == 1);
 
         client.markPartitionForEvent(dbName, tableName, thirdPtn,
-                PartitionEventType.LOAD_DONE);
+            PartitionEventType.LOAD_DONE);
         boolean isMarked = client.isPartitionMarkedForEvent(dbName, tableName,
-                thirdPtn, PartitionEventType.LOAD_DONE);
+            thirdPtn, PartitionEventType.LOAD_DONE);
         assertTrue(isMarked);
         client.close();
     }
 
     @Test
-    public void testDatabaseLocation() throws Exception{
+    public void testDatabaseLocation() throws Exception {
         HCatClient client = HCatClient.create(new Configuration(hcatConf));
         String dbName = "locationDB";
         client.dropDatabase(dbName, true, HCatClient.DROP_DB_MODE.CASCADE);
 
         HCatCreateDBDesc dbDesc = HCatCreateDBDesc.create(dbName)
-                .ifNotExists(true).location("/tmp/"+dbName).build();
+            .ifNotExists(true).location("/tmp/" + dbName).build();
         client.createDatabase(dbDesc);
         HCatDatabase newDB = client.getDatabase(dbName);
         assertTrue(newDB.getLocation().equalsIgnoreCase("file:/tmp/" + dbName));
@@ -253,12 +253,12 @@ public class TestHCatClient {
         cols.add(new HCatFieldSchema("id", Type.INT, "id columns"));
         cols.add(new HCatFieldSchema("value", Type.STRING, "id columns"));
         HCatCreateTableDesc tableDesc = HCatCreateTableDesc
-                .create(null, tableName, cols).fileFormat("rcfile").build();
+            .create(null, tableName, cols).fileFormat("rcfile").build();
         client.createTable(tableDesc);
         // create a new table similar to previous one.
         client.createTableLike(null, tableName, cloneTable, true, false, null);
         List<String> tables = client.listTableNamesByPattern(null, "table*");
-        assertTrue(tables.size() ==2);
+        assertTrue(tables.size() == 2);
         client.close();
     }
 
@@ -273,12 +273,12 @@ public class TestHCatClient {
         cols.add(new HCatFieldSchema("id", Type.INT, "id columns"));
         cols.add(new HCatFieldSchema("value", Type.STRING, "id columns"));
         HCatCreateTableDesc tableDesc = HCatCreateTableDesc
-                .create(null, tableName, cols).fileFormat("rcfile").build();
+            .create(null, tableName, cols).fileFormat("rcfile").build();
         client.createTable(tableDesc);
-        client.renameTable(null, tableName,newName);
+        client.renameTable(null, tableName, newName);
         try {
             client.getTable(null, tableName);
-        } catch(HCatException exp){
+        } catch (HCatException exp) {
             assertTrue(exp.getMessage().contains("NoSuchObjectException while fetching table"));
         }
         HCatTable newTable = client.getTable(null, newName);
@@ -299,7 +299,7 @@ public class TestHCatClient {
         cols.add(new HCatFieldSchema("value", Type.STRING, "id columns"));
         try {
             HCatCreateTableDesc tableDesc = HCatCreateTableDesc
-                    .create(null, tableName, cols).fileFormat("rcfile").build();
+                .create(null, tableName, cols).fileFormat("rcfile").build();
             client.createTable(tableDesc);
         } catch (Exception exp) {
             isExceptionCaught = true;
@@ -309,7 +309,7 @@ public class TestHCatClient {
             String newName = "goodTable";
             client.dropTable(null, newName, true);
             HCatCreateTableDesc tableDesc2 = HCatCreateTableDesc
-                    .create(null, newName, cols).fileFormat("rcfile").build();
+                .create(null, newName, cols).fileFormat("rcfile").build();
             client.createTable(tableDesc2);
             HCatTable newTable = client.getTable(null, newName);
             assertTrue(newTable != null);
@@ -332,7 +332,7 @@ public class TestHCatClient {
         cols.add(new HCatFieldSchema("value", Type.STRING, "id columns"));
         try {
             HCatCreateTableDesc tableDesc = HCatCreateTableDesc
-                    .create(null, tableName, cols).fileFormat("rcfile").build();
+                .create(null, tableName, cols).fileFormat("rcfile").build();
             client.createTable(tableDesc);
             // The DB foo is non-existent.
             client.getTable("foo", tableName);
@@ -342,7 +342,7 @@ public class TestHCatClient {
             String newName = "goodTable";
             client.dropTable(null, newName, true);
             HCatCreateTableDesc tableDesc2 = HCatCreateTableDesc
-                    .create(null, newName, cols).fileFormat("rcfile").build();
+                .create(null, newName, cols).fileFormat("rcfile").build();
             client.createTable(tableDesc2);
             HCatTable newTable = client.getTable(null, newName);
             assertTrue(newTable != null);

Modified: incubator/hcatalog/trunk/webhcat/svr/src/main/java/org/apache/hadoop/mapred/TempletonJobTracker.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/webhcat/svr/src/main/java/org/apache/hadoop/mapred/TempletonJobTracker.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/webhcat/svr/src/main/java/org/apache/hadoop/mapred/TempletonJobTracker.java (original)
+++ incubator/hcatalog/trunk/webhcat/svr/src/main/java/org/apache/hadoop/mapred/TempletonJobTracker.java Mon Sep 10 23:28:55 2012
@@ -19,6 +19,7 @@ package org.apache.hadoop.mapred;
 
 import java.io.IOException;
 import java.net.InetSocketAddress;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.ipc.RPC;
 import org.apache.hadoop.net.NetUtils;
@@ -36,16 +37,15 @@ public class TempletonJobTracker {
     public TempletonJobTracker(UserGroupInformation ugi,
                                InetSocketAddress addr,
                                Configuration conf)
-        throws IOException
-    {
+        throws IOException {
         cnx = (JobSubmissionProtocol)
             RPC.getProxy(JobSubmissionProtocol.class,
-                         JobSubmissionProtocol.versionID,
-                         addr,
-                         ugi,
-                         conf,
-                         NetUtils.getSocketFactory(conf,
-                                                   JobSubmissionProtocol.class));
+                JobSubmissionProtocol.versionID,
+                addr,
+                ugi,
+                conf,
+                NetUtils.getSocketFactory(conf,
+                    JobSubmissionProtocol.class));
     }
 
     /**
@@ -54,8 +54,7 @@ public class TempletonJobTracker {
      * @return Profile of the job, or null if not found.
      */
     public JobProfile getJobProfile(JobID jobid)
-        throws IOException
-    {
+        throws IOException {
         return cnx.getJobProfile(jobid);
     }
 
@@ -65,8 +64,7 @@ public class TempletonJobTracker {
      * @return Status of the job, or null if not found.
      */
     public JobStatus getJobStatus(JobID jobid)
-        throws IOException
-    {
+        throws IOException {
         return cnx.getJobStatus(jobid);
     }
 
@@ -75,8 +73,7 @@ public class TempletonJobTracker {
      * Kill a job.
      */
     public void killJob(JobID jobid)
-        throws IOException
-    {
+        throws IOException {
         cnx.killJob(jobid);
     }
 
@@ -84,8 +81,7 @@ public class TempletonJobTracker {
      * Get all the jobs submitted.
      */
     public JobStatus[] getAllJobs()
-        throws IOException
-    {
+        throws IOException {
         return cnx.getAllJobs();
     }
 

Modified: incubator/hcatalog/trunk/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/AppConfig.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/AppConfig.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/AppConfig.java (original)
+++ incubator/hcatalog/trunk/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/AppConfig.java Mon Sep 10 23:28:55 2012
@@ -192,11 +192,14 @@ public class AppConfig extends Configura
 
     public long zkCleanupInterval()  {
         return getLong(ZooKeeperCleanup.ZK_CLEANUP_INTERVAL,
-                       (1000L * 60L * 60L * 12L)); }
-    public long zkMaxAge()           {
+            (1000L * 60L * 60L * 12L));
+    }
+
+    public long zkMaxAge() {
         return getLong(ZooKeeperCleanup.ZK_CLEANUP_MAX_AGE,
-                       (1000L * 60L * 60L * 24L * 7L)); }
+            (1000L * 60L * 60L * 24L * 7L));
+    }
+
     public String zkHosts()          { return get(ZooKeeperStorage.ZK_HOSTS); }
-    public int zkSessionTimeout()    { return getInt(ZooKeeperStorage.ZK_SESSION_TIMEOUT,
-                                                     30000); }
+    public int zkSessionTimeout()    { return getInt(ZooKeeperStorage.ZK_SESSION_TIMEOUT, 30000); }
 }