You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hcatalog-commits@incubator.apache.org by tr...@apache.org on 2012/09/10 23:29:03 UTC

svn commit: r1383152 [7/27] - in /incubator/hcatalog/trunk: ./ hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/ hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/drivers/ hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/ s...

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatConstants.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatConstants.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatConstants.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatConstants.java Mon Sep 10 23:28:55 2012
@@ -23,140 +23,140 @@ import org.apache.hadoop.mapred.Sequence
 
 public final class HCatConstants {
 
-  public static final String HIVE_RCFILE_IF_CLASS = "org.apache.hadoop.hive.ql.io.RCFileInputFormat";
-  public static final String HIVE_RCFILE_OF_CLASS = "org.apache.hadoop.hive.ql.io.RCFileOutputFormat";
+    public static final String HIVE_RCFILE_IF_CLASS = "org.apache.hadoop.hive.ql.io.RCFileInputFormat";
+    public static final String HIVE_RCFILE_OF_CLASS = "org.apache.hadoop.hive.ql.io.RCFileOutputFormat";
 
-  public static final String SEQUENCEFILE_INPUT = SequenceFileInputFormat.class.getName();
-  public static final String SEQUENCEFILE_OUTPUT = SequenceFileOutputFormat.class.getName();
+    public static final String SEQUENCEFILE_INPUT = SequenceFileInputFormat.class.getName();
+    public static final String SEQUENCEFILE_OUTPUT = SequenceFileOutputFormat.class.getName();
 
-  public static final String HCAT_PIG_STORAGE_CLASS = "org.apache.pig.builtin.PigStorage";
-  public static final String HCAT_PIG_LOADER = "hcat.pig.loader";
-  public static final String HCAT_PIG_LOADER_LOCATION_SET = HCAT_PIG_LOADER + ".location.set" ;
-  public static final String HCAT_PIG_LOADER_ARGS = "hcat.pig.loader.args";
-  public static final String HCAT_PIG_STORER = "hcat.pig.storer";
-  public static final String HCAT_PIG_STORER_ARGS = "hcat.pig.storer.args";
-  public static final String HCAT_PIG_ARGS_DELIMIT = "hcat.pig.args.delimiter";
-  public static final String HCAT_PIG_ARGS_DELIMIT_DEFAULT = ",";
-  public static final String HCAT_PIG_STORER_LOCATION_SET = HCAT_PIG_STORER + ".location.set" ;
-  public static final String HCAT_PIG_INNER_TUPLE_NAME = "hcat.pig.inner.tuple.name";
-  public static final String HCAT_PIG_INNER_TUPLE_NAME_DEFAULT = "innertuple";
-  public static final String HCAT_PIG_INNER_FIELD_NAME = "hcat.pig.inner.field.name";
-  public static final String HCAT_PIG_INNER_FIELD_NAME_DEFAULT = "innerfield";
-
-  //The keys used to store info into the job Configuration
-  public static final String HCAT_KEY_BASE = "mapreduce.lib.hcat";
+    public static final String HCAT_PIG_STORAGE_CLASS = "org.apache.pig.builtin.PigStorage";
+    public static final String HCAT_PIG_LOADER = "hcat.pig.loader";
+    public static final String HCAT_PIG_LOADER_LOCATION_SET = HCAT_PIG_LOADER + ".location.set";
+    public static final String HCAT_PIG_LOADER_ARGS = "hcat.pig.loader.args";
+    public static final String HCAT_PIG_STORER = "hcat.pig.storer";
+    public static final String HCAT_PIG_STORER_ARGS = "hcat.pig.storer.args";
+    public static final String HCAT_PIG_ARGS_DELIMIT = "hcat.pig.args.delimiter";
+    public static final String HCAT_PIG_ARGS_DELIMIT_DEFAULT = ",";
+    public static final String HCAT_PIG_STORER_LOCATION_SET = HCAT_PIG_STORER + ".location.set";
+    public static final String HCAT_PIG_INNER_TUPLE_NAME = "hcat.pig.inner.tuple.name";
+    public static final String HCAT_PIG_INNER_TUPLE_NAME_DEFAULT = "innertuple";
+    public static final String HCAT_PIG_INNER_FIELD_NAME = "hcat.pig.inner.field.name";
+    public static final String HCAT_PIG_INNER_FIELD_NAME_DEFAULT = "innerfield";
+
+    //The keys used to store info into the job Configuration
+    public static final String HCAT_KEY_BASE = "mapreduce.lib.hcat";
 
-  public static final String HCAT_KEY_OUTPUT_SCHEMA = HCAT_KEY_BASE + ".output.schema";
+    public static final String HCAT_KEY_OUTPUT_SCHEMA = HCAT_KEY_BASE + ".output.schema";
 
-  public static final String HCAT_KEY_JOB_INFO =  HCAT_KEY_BASE + ".job.info";
+    public static final String HCAT_KEY_JOB_INFO = HCAT_KEY_BASE + ".job.info";
 
-  // hcatalog specific configurations, that can be put in hive-site.xml
-  public static final String HCAT_HIVE_CLIENT_EXPIRY_TIME = "hcatalog.hive.client.cache.expiry.time";
+    // hcatalog specific configurations, that can be put in hive-site.xml
+    public static final String HCAT_HIVE_CLIENT_EXPIRY_TIME = "hcatalog.hive.client.cache.expiry.time";
 
     private HCatConstants() { // restrict instantiation
-  }
+    }
 
-  public static final String HCAT_TABLE_SCHEMA = "hcat.table.schema";
+    public static final String HCAT_TABLE_SCHEMA = "hcat.table.schema";
 
-  public static final String HCAT_METASTORE_URI = HiveConf.ConfVars.METASTOREURIS.varname;
+    public static final String HCAT_METASTORE_URI = HiveConf.ConfVars.METASTOREURIS.varname;
 
-  public static final String HCAT_PERMS = "hcat.perms";
+    public static final String HCAT_PERMS = "hcat.perms";
 
-  public static final String HCAT_GROUP = "hcat.group";
+    public static final String HCAT_GROUP = "hcat.group";
 
-  public static final String HCAT_CREATE_TBL_NAME = "hcat.create.tbl.name";
+    public static final String HCAT_CREATE_TBL_NAME = "hcat.create.tbl.name";
 
-  public static final String HCAT_CREATE_DB_NAME = "hcat.create.db.name";
+    public static final String HCAT_CREATE_DB_NAME = "hcat.create.db.name";
 
-  public static final String HCAT_METASTORE_PRINCIPAL
-          = HiveConf.ConfVars.METASTORE_KERBEROS_PRINCIPAL.varname;
+    public static final String HCAT_METASTORE_PRINCIPAL
+        = HiveConf.ConfVars.METASTORE_KERBEROS_PRINCIPAL.varname;
 
-  // IMPORTANT IMPORTANT IMPORTANT!!!!!
-  //The keys used to store info into the job Configuration.
-  //If any new keys are added, the HCatStorer needs to be updated. The HCatStorer
-  //updates the job configuration in the backend to insert these keys to avoid
-  //having to call setOutput from the backend (which would cause a metastore call
-  //from the map jobs)
-  public static final String HCAT_KEY_OUTPUT_BASE = "mapreduce.lib.hcatoutput";
-  public static final String HCAT_KEY_OUTPUT_INFO = HCAT_KEY_OUTPUT_BASE + ".info";
-  public static final String HCAT_KEY_HIVE_CONF = HCAT_KEY_OUTPUT_BASE + ".hive.conf";
-  public static final String HCAT_KEY_TOKEN_SIGNATURE = HCAT_KEY_OUTPUT_BASE + ".token.sig";
+    // IMPORTANT IMPORTANT IMPORTANT!!!!!
+    //The keys used to store info into the job Configuration.
+    //If any new keys are added, the HCatStorer needs to be updated. The HCatStorer
+    //updates the job configuration in the backend to insert these keys to avoid
+    //having to call setOutput from the backend (which would cause a metastore call
+    //from the map jobs)
+    public static final String HCAT_KEY_OUTPUT_BASE = "mapreduce.lib.hcatoutput";
+    public static final String HCAT_KEY_OUTPUT_INFO = HCAT_KEY_OUTPUT_BASE + ".info";
+    public static final String HCAT_KEY_HIVE_CONF = HCAT_KEY_OUTPUT_BASE + ".hive.conf";
+    public static final String HCAT_KEY_TOKEN_SIGNATURE = HCAT_KEY_OUTPUT_BASE + ".token.sig";
 
-  public static final String[] OUTPUT_CONFS_TO_SAVE = {
-    HCAT_KEY_OUTPUT_INFO,
-    HCAT_KEY_HIVE_CONF,
-    HCAT_KEY_TOKEN_SIGNATURE
+    public static final String[] OUTPUT_CONFS_TO_SAVE = {
+        HCAT_KEY_OUTPUT_INFO,
+        HCAT_KEY_HIVE_CONF,
+        HCAT_KEY_TOKEN_SIGNATURE
     };
 
 
-  public static final String HCAT_MSG_CLEAN_FREQ = "hcat.msg.clean.freq";
-  public static final String HCAT_MSG_EXPIRY_DURATION = "hcat.msg.expiry.duration";
+    public static final String HCAT_MSG_CLEAN_FREQ = "hcat.msg.clean.freq";
+    public static final String HCAT_MSG_EXPIRY_DURATION = "hcat.msg.expiry.duration";
 
-  public static final String HCAT_MSGBUS_TOPIC_NAME = "hcat.msgbus.topic.name";
-  public static final String HCAT_MSGBUS_TOPIC_NAMING_POLICY = "hcat.msgbus.topic.naming.policy";
-  public static final String HCAT_MSGBUS_TOPIC_PREFIX = "hcat.msgbus.topic.prefix";
-
-  public static final String HCAT_DYNAMIC_PTN_JOBID = HCAT_KEY_OUTPUT_BASE + "dynamic.jobid";
-  public static final boolean HCAT_IS_DYNAMIC_MAX_PTN_CHECK_ENABLED = false;
-
-  // Message Bus related properties.
-  public static final String HCAT_DEFAULT_TOPIC_PREFIX = "hcat";
-  public static final String HCAT_EVENT = "HCAT_EVENT";
-  public static final String HCAT_ADD_PARTITION_EVENT = "HCAT_ADD_PARTITION";
-  public static final String HCAT_DROP_PARTITION_EVENT = "HCAT_DROP_PARTITION";
-  public static final String HCAT_PARTITION_DONE_EVENT = "HCAT_PARTITION_DONE";
-  public static final String HCAT_ADD_TABLE_EVENT = "HCAT_ADD_TABLE";
-  public static final String HCAT_DROP_TABLE_EVENT = "HCAT_DROP_TABLE";
-  public static final String HCAT_ADD_DATABASE_EVENT = "HCAT_ADD_DATABASE";
-  public static final String HCAT_DROP_DATABASE_EVENT = "HCAT_DROP_DATABASE";
-
-  // System environment variables
-  public static final String SYSENV_HADOOP_TOKEN_FILE_LOCATION = "HADOOP_TOKEN_FILE_LOCATION";
-
-  // Hadoop Conf Var Names
-  public static final String CONF_MAPREDUCE_JOB_CREDENTIALS_BINARY = "mapreduce.job.credentials.binary";
-
-  //***************************************************************************
-  // Data-related configuration properties.
-  //***************************************************************************
-
-  /**
-   * {@value} (default: {@value #HCAT_DATA_CONVERT_BOOLEAN_TO_INTEGER_DEFAULT}).
-   * Pig < 0.10.0 does not have boolean support, and scripts written for pre-boolean Pig versions
-   * will not expect boolean values when upgrading Pig. For integration the option is offered to
-   * convert boolean fields to integers by setting this Hadoop configuration key.
-   */
-  public static final String HCAT_DATA_CONVERT_BOOLEAN_TO_INTEGER =
-      "hcat.data.convert.boolean.to.integer";
-  public static final boolean HCAT_DATA_CONVERT_BOOLEAN_TO_INTEGER_DEFAULT = false;
-
-  /**
-   * {@value} (default: {@value #HCAT_DATA_TINY_SMALL_INT_PROMOTION_DEFAULT}).
-   * Hive tables support tinyint and smallint columns, while not all processing frameworks support
-   * these types (Pig only has integer for example). Enable this property to promote tinyint and
-   * smallint columns to integer at runtime. Note that writes to tinyint and smallint columns
-   * enforce bounds checking and jobs will fail if attempting to write values outside the column
-   * bounds.
-   */
-  public static final String HCAT_DATA_TINY_SMALL_INT_PROMOTION =
-      "hcat.data.tiny.small.int.promotion";
-  public static final boolean HCAT_DATA_TINY_SMALL_INT_PROMOTION_DEFAULT = false;
-
-  /**
-   * {@value} (default: {@value #HCAT_INPUT_BAD_RECORD_THRESHOLD_DEFAULT}).
-   * Threshold for the ratio of bad records that will be silently skipped without causing a task
-   * failure. This is useful when processing large data sets with corrupt records, when its
-   * acceptable to skip some bad records.
-   */
-  public static final String HCAT_INPUT_BAD_RECORD_THRESHOLD_KEY = "hcat.input.bad.record.threshold";
-  public static final float HCAT_INPUT_BAD_RECORD_THRESHOLD_DEFAULT = 0.0001f;
-
-  /**
-   * {@value} (default: {@value #HCAT_INPUT_BAD_RECORD_MIN_DEFAULT}).
-   * Number of bad records that will be accepted before applying
-   * {@value #HCAT_INPUT_BAD_RECORD_THRESHOLD_KEY}. This is necessary to prevent an initial bad
-   * record from causing a task failure.
-   */
-  public static final String HCAT_INPUT_BAD_RECORD_MIN_KEY = "hcat.input.bad.record.min";
-  public static final int HCAT_INPUT_BAD_RECORD_MIN_DEFAULT = 2;
+    public static final String HCAT_MSGBUS_TOPIC_NAME = "hcat.msgbus.topic.name";
+    public static final String HCAT_MSGBUS_TOPIC_NAMING_POLICY = "hcat.msgbus.topic.naming.policy";
+    public static final String HCAT_MSGBUS_TOPIC_PREFIX = "hcat.msgbus.topic.prefix";
+
+    public static final String HCAT_DYNAMIC_PTN_JOBID = HCAT_KEY_OUTPUT_BASE + "dynamic.jobid";
+    public static final boolean HCAT_IS_DYNAMIC_MAX_PTN_CHECK_ENABLED = false;
+
+    // Message Bus related properties.
+    public static final String HCAT_DEFAULT_TOPIC_PREFIX = "hcat";
+    public static final String HCAT_EVENT = "HCAT_EVENT";
+    public static final String HCAT_ADD_PARTITION_EVENT = "HCAT_ADD_PARTITION";
+    public static final String HCAT_DROP_PARTITION_EVENT = "HCAT_DROP_PARTITION";
+    public static final String HCAT_PARTITION_DONE_EVENT = "HCAT_PARTITION_DONE";
+    public static final String HCAT_ADD_TABLE_EVENT = "HCAT_ADD_TABLE";
+    public static final String HCAT_DROP_TABLE_EVENT = "HCAT_DROP_TABLE";
+    public static final String HCAT_ADD_DATABASE_EVENT = "HCAT_ADD_DATABASE";
+    public static final String HCAT_DROP_DATABASE_EVENT = "HCAT_DROP_DATABASE";
+
+    // System environment variables
+    public static final String SYSENV_HADOOP_TOKEN_FILE_LOCATION = "HADOOP_TOKEN_FILE_LOCATION";
+
+    // Hadoop Conf Var Names
+    public static final String CONF_MAPREDUCE_JOB_CREDENTIALS_BINARY = "mapreduce.job.credentials.binary";
+
+    //***************************************************************************
+    // Data-related configuration properties.
+    //***************************************************************************
+
+    /**
+     * {@value} (default: {@value #HCAT_DATA_CONVERT_BOOLEAN_TO_INTEGER_DEFAULT}).
+     * Pig < 0.10.0 does not have boolean support, and scripts written for pre-boolean Pig versions
+     * will not expect boolean values when upgrading Pig. For integration the option is offered to
+     * convert boolean fields to integers by setting this Hadoop configuration key.
+     */
+    public static final String HCAT_DATA_CONVERT_BOOLEAN_TO_INTEGER =
+        "hcat.data.convert.boolean.to.integer";
+    public static final boolean HCAT_DATA_CONVERT_BOOLEAN_TO_INTEGER_DEFAULT = false;
+
+    /**
+     * {@value} (default: {@value #HCAT_DATA_TINY_SMALL_INT_PROMOTION_DEFAULT}).
+     * Hive tables support tinyint and smallint columns, while not all processing frameworks support
+     * these types (Pig only has integer for example). Enable this property to promote tinyint and
+     * smallint columns to integer at runtime. Note that writes to tinyint and smallint columns
+     * enforce bounds checking and jobs will fail if attempting to write values outside the column
+     * bounds.
+     */
+    public static final String HCAT_DATA_TINY_SMALL_INT_PROMOTION =
+        "hcat.data.tiny.small.int.promotion";
+    public static final boolean HCAT_DATA_TINY_SMALL_INT_PROMOTION_DEFAULT = false;
+
+    /**
+     * {@value} (default: {@value #HCAT_INPUT_BAD_RECORD_THRESHOLD_DEFAULT}).
+     * Threshold for the ratio of bad records that will be silently skipped without causing a task
+     * failure. This is useful when processing large data sets with corrupt records, when its
+     * acceptable to skip some bad records.
+     */
+    public static final String HCAT_INPUT_BAD_RECORD_THRESHOLD_KEY = "hcat.input.bad.record.threshold";
+    public static final float HCAT_INPUT_BAD_RECORD_THRESHOLD_DEFAULT = 0.0001f;
+
+    /**
+     * {@value} (default: {@value #HCAT_INPUT_BAD_RECORD_MIN_DEFAULT}).
+     * Number of bad records that will be accepted before applying
+     * {@value #HCAT_INPUT_BAD_RECORD_THRESHOLD_KEY}. This is necessary to prevent an initial bad
+     * record from causing a task failure.
+     */
+    public static final String HCAT_INPUT_BAD_RECORD_MIN_KEY = "hcat.input.bad.record.min";
+    public static final int HCAT_INPUT_BAD_RECORD_MIN_DEFAULT = 2;
 }

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatContext.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatContext.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatContext.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatContext.java Mon Sep 10 23:28:55 2012
@@ -27,28 +27,28 @@ import java.util.Map;
  */
 public class HCatContext {
 
-  private static final HCatContext hCatContext = new HCatContext();
+    private static final HCatContext hCatContext = new HCatContext();
 
-  private final Configuration conf;
+    private final Configuration conf;
 
-  private HCatContext() {
-    conf = new Configuration();
-  }
-
-  public static HCatContext getInstance() {
-    return hCatContext;
-  }
-
-  public Configuration getConf() {
-    return conf;
-  }
-
-  /**
-   * Merge the given configuration into the HCatContext conf, overwriting any existing keys.
-   */
-  public void mergeConf(Configuration conf) {
-    for (Map.Entry<String, String> entry : conf) {
-      this.conf.set(entry.getKey(), entry.getValue());
+    private HCatContext() {
+        conf = new Configuration();
+    }
+
+    public static HCatContext getInstance() {
+        return hCatContext;
+    }
+
+    public Configuration getConf() {
+        return conf;
+    }
+
+    /**
+     * Merge the given configuration into the HCatContext conf, overwriting any existing keys.
+     */
+    public void mergeConf(Configuration conf) {
+        for (Map.Entry<String, String> entry : conf) {
+            this.conf.set(entry.getKey(), entry.getValue());
+        }
     }
-  }
 }

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatException.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatException.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatException.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatException.java Mon Sep 10 23:28:55 2012
@@ -24,134 +24,135 @@ import java.io.IOException;
  */
 public class HCatException extends IOException {
 
-  private static final long serialVersionUID = 1L;
+    private static final long serialVersionUID = 1L;
 
-  /** The error type enum for this exception. */
-  private final ErrorType errorType;
+    /** The error type enum for this exception. */
+    private final ErrorType errorType;
 
-  /**
-   * Instantiates a new hcat exception.
-   * @param errorType the error type
-   */
-  public HCatException(ErrorType errorType) {
-    this(errorType, null, null);
-  }
-
-
-  /**
-   * Instantiates a new hcat exception.
-   * @param errorType the error type
-   * @param cause the cause
-   */
-  public HCatException(ErrorType errorType, Throwable cause) {
-    this(errorType, null, cause);
-  }
-
-  /**
-   * Instantiates a new hcat exception.
-   * @param errorType the error type
-   * @param extraMessage extra messages to add to the message string
-   */
-  public HCatException(ErrorType errorType, String extraMessage) {
-    this(errorType, extraMessage, null);
-  }
-
-  /**
-   * Instantiates a new hcat exception.
-   * @param errorType the error type
-   * @param extraMessage extra messages to add to the message string
-   * @param cause the cause
-   */
-  public HCatException(ErrorType errorType, String extraMessage, Throwable cause) {
-    super(buildErrorMessage(
-        errorType,
-        extraMessage,
-        cause), cause);
-    this.errorType = errorType;
-  }
-
-
-  //TODO : remove default error type constructors after all exceptions
-  //are changed to use error types
-  /**
-   * Instantiates a new hcat exception.
-   * @param message the error message
-   */
-  public HCatException(String message) {
-    this(ErrorType.ERROR_INTERNAL_EXCEPTION, message, null);
-  }
-
-  /**
-   * Instantiates a new hcat exception.
-   * @param message the error message
-   * @param cause the cause
-   */
-  public HCatException(String message, Throwable cause) {
-    this(ErrorType.ERROR_INTERNAL_EXCEPTION, message, cause);
-  }
-
-
-  /**
-   * Builds the error message string. The error type message is appended with the extra message. If appendCause
-   * is true for the error type, then the message of the cause also is added to the message.
-   * @param type the error type
-   * @param extraMessage the extra message string
-   * @param cause the cause for the exception
-   * @return the exception message string
-   */
-  public static String buildErrorMessage(ErrorType type, String extraMessage, Throwable cause) {
-
-    //Initial message is just the error type message
-    StringBuffer message = new StringBuffer(HCatException.class.getName());
-    message.append(" : " + type.getErrorCode());
-    message.append(" : " + type.getErrorMessage());
-
-    if( extraMessage != null ) {
-      //Add the extra message value to buffer
-      message.append(" : " + extraMessage);
-    }
-
-    if( type.appendCauseMessage() ) {
-      if( cause != null ) {
-        //Add the cause message to buffer
-        message.append(". Cause : " + cause.toString());
-      }
-    }
-
-    return message.toString();
-  }
-
-
-  /**
-   * Is this a retriable error.
-   * @return is it retriable
-   */
-  public boolean isRetriable() {
-    return errorType.isRetriable();
-  }
-
-  /**
-   * Gets the error type.
-   * @return the error type enum
-   */
-  public ErrorType getErrorType() {
-    return errorType;
-  }
-
-  /**
-   * Gets the error code.
-   * @return the error code
-   */
-  public int getErrorCode() {
-    return errorType.getErrorCode();
-  }
-
-  /* (non-Javadoc)
-   * @see java.lang.Throwable#toString()
-   */
-  @Override
-  public String toString() {
-    return getMessage();
-  }
+    /**
+     * Instantiates a new hcat exception.
+     * @param errorType the error type
+     */
+    public HCatException(ErrorType errorType) {
+        this(errorType, null, null);
+    }
+
+
+    /**
+     * Instantiates a new hcat exception.
+     * @param errorType the error type
+     * @param cause the cause
+     */
+    public HCatException(ErrorType errorType, Throwable cause) {
+        this(errorType, null, cause);
+    }
+
+    /**
+     * Instantiates a new hcat exception.
+     * @param errorType the error type
+     * @param extraMessage extra messages to add to the message string
+     */
+    public HCatException(ErrorType errorType, String extraMessage) {
+        this(errorType, extraMessage, null);
+    }
+
+    /**
+     * Instantiates a new hcat exception.
+     * @param errorType the error type
+     * @param extraMessage extra messages to add to the message string
+     * @param cause the cause
+     */
+    public HCatException(ErrorType errorType, String extraMessage, Throwable cause) {
+        super(buildErrorMessage(
+            errorType,
+            extraMessage,
+            cause), cause);
+        this.errorType = errorType;
+    }
+
+
+    //TODO : remove default error type constructors after all exceptions
+    //are changed to use error types
+
+    /**
+     * Instantiates a new hcat exception.
+     * @param message the error message
+     */
+    public HCatException(String message) {
+        this(ErrorType.ERROR_INTERNAL_EXCEPTION, message, null);
+    }
+
+    /**
+     * Instantiates a new hcat exception.
+     * @param message the error message
+     * @param cause the cause
+     */
+    public HCatException(String message, Throwable cause) {
+        this(ErrorType.ERROR_INTERNAL_EXCEPTION, message, cause);
+    }
+
+
+    /**
+     * Builds the error message string. The error type message is appended with the extra message. If appendCause
+     * is true for the error type, then the message of the cause also is added to the message.
+     * @param type the error type
+     * @param extraMessage the extra message string
+     * @param cause the cause for the exception
+     * @return the exception message string
+     */
+    public static String buildErrorMessage(ErrorType type, String extraMessage, Throwable cause) {
+
+        //Initial message is just the error type message
+        StringBuffer message = new StringBuffer(HCatException.class.getName());
+        message.append(" : " + type.getErrorCode());
+        message.append(" : " + type.getErrorMessage());
+
+        if (extraMessage != null) {
+            //Add the extra message value to buffer
+            message.append(" : " + extraMessage);
+        }
+
+        if (type.appendCauseMessage()) {
+            if (cause != null) {
+                //Add the cause message to buffer
+                message.append(". Cause : " + cause.toString());
+            }
+        }
+
+        return message.toString();
+    }
+
+
+    /**
+     * Is this a retriable error.
+     * @return is it retriable
+     */
+    public boolean isRetriable() {
+        return errorType.isRetriable();
+    }
+
+    /**
+     * Gets the error type.
+     * @return the error type enum
+     */
+    public ErrorType getErrorType() {
+        return errorType;
+    }
+
+    /**
+     * Gets the error code.
+     * @return the error code
+     */
+    public int getErrorCode() {
+        return errorType.getErrorCode();
+    }
+
+    /* (non-Javadoc)
+    * @see java.lang.Throwable#toString()
+    */
+    @Override
+    public String toString() {
+        return getMessage();
+    }
 
 }

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatUtil.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatUtil.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatUtil.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatUtil.java Mon Sep 10 23:28:55 2012
@@ -75,7 +75,7 @@ public class HCatUtil {
 
     private static final Logger LOG = LoggerFactory.getLogger(HCatUtil.class);
     private static volatile HiveClientCache hiveClientCache;
-    private final static int DEFAULT_HIVE_CACHE_EXPIRY_TIME_SECONDS = 2*60;
+    private final static int DEFAULT_HIVE_CACHE_EXPIRY_TIME_SECONDS = 2 * 60;
 
     public static boolean checkJobContextIfRunningFromBackend(JobContext j) {
         if (j.getConfiguration().get("mapred.task.id", "").equals("")) {
@@ -105,7 +105,7 @@ public class HCatUtil {
         }
         try {
             ByteArrayInputStream serialObj = new ByteArrayInputStream(
-                    decodeBytes(str));
+                decodeBytes(str));
             ObjectInputStream objStream = new ObjectInputStream(serialObj);
             return objStream.readObject();
         } catch (Exception e) {
@@ -136,9 +136,9 @@ public class HCatUtil {
     }
 
     public static List<HCatFieldSchema> getHCatFieldSchemaList(
-            FieldSchema... fields) throws HCatException {
+        FieldSchema... fields) throws HCatException {
         List<HCatFieldSchema> result = new ArrayList<HCatFieldSchema>(
-                fields.length);
+            fields.length);
 
         for (FieldSchema f : fields) {
             result.add(HCatSchemaUtils.getHCatFieldSchema(f));
@@ -148,7 +148,7 @@ public class HCatUtil {
     }
 
     public static List<HCatFieldSchema> getHCatFieldSchemaList(
-            List<FieldSchema> fields) throws HCatException {
+        List<FieldSchema> fields) throws HCatException {
         if (fields == null) {
             return null;
         } else {
@@ -169,7 +169,7 @@ public class HCatUtil {
     }
 
     public static List<FieldSchema> getFieldSchemaList(
-            List<HCatFieldSchema> hcatFields) {
+        List<HCatFieldSchema> hcatFields) {
         if (hcatFields == null) {
             return null;
         } else {
@@ -183,7 +183,7 @@ public class HCatUtil {
 
     public static Table getTable(HiveMetaStoreClient client, String dbName, String tableName)
         throws NoSuchObjectException, TException, MetaException {
-      return new Table(client.getTable(dbName, tableName));
+        return new Table(client.getTable(dbName, tableName));
     }
 
     public static HCatSchema getTableSchemaWithPtnCols(Table table) throws IOException {
@@ -229,7 +229,7 @@ public class HCatUtil {
      * @throws IOException Signals that an I/O exception has occurred.
      */
     public static List<FieldSchema> validatePartitionSchema(Table table,
-            HCatSchema partitionSchema) throws IOException {
+                                                            HCatSchema partitionSchema) throws IOException {
         Map<String, FieldSchema> partitionKeyMap = new HashMap<String, FieldSchema>();
 
         for (FieldSchema field : table.getPartitionKeys()) {
@@ -242,7 +242,7 @@ public class HCatUtil {
         for (int i = 0; i < partitionSchema.getFields().size(); i++) {
 
             FieldSchema field = HCatSchemaUtils.getFieldSchema(partitionSchema
-                    .getFields().get(i));
+                .getFields().get(i));
 
             FieldSchema tableField;
             if (i < tableCols.size()) {
@@ -250,19 +250,19 @@ public class HCatUtil {
 
                 if (!tableField.getName().equalsIgnoreCase(field.getName())) {
                     throw new HCatException(
-                            ErrorType.ERROR_SCHEMA_COLUMN_MISMATCH,
-                            "Expected column <" + tableField.getName()
-                                    + "> at position " + (i + 1)
-                                    + ", found column <" + field.getName()
-                                    + ">");
+                        ErrorType.ERROR_SCHEMA_COLUMN_MISMATCH,
+                        "Expected column <" + tableField.getName()
+                            + "> at position " + (i + 1)
+                            + ", found column <" + field.getName()
+                            + ">");
                 }
             } else {
                 tableField = partitionKeyMap.get(field.getName().toLowerCase());
 
                 if (tableField != null) {
                     throw new HCatException(
-                            ErrorType.ERROR_SCHEMA_PARTITION_KEY, "Key <"
-                                    + field.getName() + ">");
+                        ErrorType.ERROR_SCHEMA_PARTITION_KEY, "Key <"
+                        + field.getName() + ">");
                 }
             }
 
@@ -272,16 +272,16 @@ public class HCatUtil {
             } else {
                 // field present in both. validate type has not changed
                 TypeInfo partitionType = TypeInfoUtils
-                        .getTypeInfoFromTypeString(field.getType());
+                    .getTypeInfoFromTypeString(field.getType());
                 TypeInfo tableType = TypeInfoUtils
-                        .getTypeInfoFromTypeString(tableField.getType());
+                    .getTypeInfoFromTypeString(tableField.getType());
 
                 if (!partitionType.equals(tableType)) {
                     throw new HCatException(
-                            ErrorType.ERROR_SCHEMA_TYPE_MISMATCH, "Column <"
-                                    + field.getName() + ">, expected <"
-                                    + tableType.getTypeName() + ">, got <"
-                                    + partitionType.getTypeName() + ">");
+                        ErrorType.ERROR_SCHEMA_TYPE_MISMATCH, "Column <"
+                        + field.getName() + ">, expected <"
+                        + tableType.getTypeName() + ">, got <"
+                        + partitionType.getTypeName() + ">");
                 }
             }
         }
@@ -304,16 +304,16 @@ public class HCatUtil {
      */
     public static boolean validateMorePermissive(FsAction first, FsAction second) {
         if ((first == FsAction.ALL) || (second == FsAction.NONE)
-                || (first == second)) {
+            || (first == second)) {
             return true;
         }
         switch (first) {
-            case READ_EXECUTE:
-                return ((second == FsAction.READ) || (second == FsAction.EXECUTE));
-            case READ_WRITE:
-                return ((second == FsAction.READ) || (second == FsAction.WRITE));
-            case WRITE_EXECUTE:
-                return ((second == FsAction.WRITE) || (second == FsAction.EXECUTE));
+        case READ_EXECUTE:
+            return ((second == FsAction.READ) || (second == FsAction.EXECUTE));
+        case READ_WRITE:
+            return ((second == FsAction.READ) || (second == FsAction.WRITE));
+        case WRITE_EXECUTE:
+            return ((second == FsAction.WRITE) || (second == FsAction.EXECUTE));
         }
         return false;
     }
@@ -329,18 +329,18 @@ public class HCatUtil {
      */
     public static boolean validateExecuteBitPresentIfReadOrWrite(FsAction perms) {
         if ((perms == FsAction.READ) || (perms == FsAction.WRITE)
-                || (perms == FsAction.READ_WRITE)) {
+            || (perms == FsAction.READ_WRITE)) {
             return false;
         }
         return true;
     }
 
     public static Token<org.apache.hadoop.mapreduce.security.token.delegation.DelegationTokenIdentifier> getJobTrackerDelegationToken(
-            Configuration conf, String userName) throws Exception {
+        Configuration conf, String userName) throws Exception {
         // LOG.info("getJobTrackerDelegationToken("+conf+","+userName+")");
         JobClient jcl = new JobClient(new JobConf(conf, HCatOutputFormat.class));
         Token<org.apache.hadoop.mapreduce.security.token.delegation.DelegationTokenIdentifier> t = jcl
-                .getDelegationToken(new Text(userName));
+            .getDelegationToken(new Text(userName));
         // LOG.info("got "+t);
         return t;
 
@@ -348,8 +348,8 @@ public class HCatUtil {
     }
 
     public static Token<? extends AbstractDelegationTokenIdentifier> extractThriftToken(
-            String tokenStrForm, String tokenSignature) throws MetaException,
-            TException, IOException {
+        String tokenStrForm, String tokenSignature) throws MetaException,
+        TException, IOException {
         // LOG.info("extractThriftToken("+tokenStrForm+","+tokenSignature+")");
         Token<? extends AbstractDelegationTokenIdentifier> t = new Token<DelegationTokenIdentifier>();
         t.decodeFromUrlString(tokenStrForm);
@@ -369,19 +369,19 @@ public class HCatUtil {
      */
     public static HCatStorageHandler getStorageHandler(Configuration conf, StorerInfo storerInfo) throws IOException {
         return getStorageHandler(conf,
-                                 storerInfo.getStorageHandlerClass(),
-                                 storerInfo.getSerdeClass(),
-                                 storerInfo.getIfClass(),
-                                 storerInfo.getOfClass());
+            storerInfo.getStorageHandlerClass(),
+            storerInfo.getSerdeClass(),
+            storerInfo.getIfClass(),
+            storerInfo.getOfClass());
     }
 
     public static HCatStorageHandler getStorageHandler(Configuration conf, PartInfo partitionInfo) throws IOException {
-      return HCatUtil.getStorageHandler(
-          conf,
-          partitionInfo.getStorageHandlerClassName(),
-          partitionInfo.getSerdeClassName(),
-          partitionInfo.getInputFormatClassName(),
-          partitionInfo.getOutputFormatClassName());
+        return HCatUtil.getStorageHandler(
+            conf,
+            partitionInfo.getStorageHandlerClassName(),
+            partitionInfo.getSerdeClassName(),
+            partitionInfo.getInputFormatClassName(),
+            partitionInfo.getOutputFormatClassName());
     }
 
     /**
@@ -401,9 +401,9 @@ public class HCatUtil {
                                                        String serDe,
                                                        String inputFormat,
                                                        String outputFormat)
-    throws IOException {
+        throws IOException {
 
-        if ((storageHandler == null) || (storageHandler.equals(FosterStorageHandler.class.getName()))){
+        if ((storageHandler == null) || (storageHandler.equals(FosterStorageHandler.class.getName()))) {
             try {
                 FosterStorageHandler fosterStorageHandler =
                     new FosterStorageHandler(inputFormat, outputFormat, serDe);
@@ -411,57 +411,57 @@ public class HCatUtil {
                 return fosterStorageHandler;
             } catch (ClassNotFoundException e) {
                 throw new IOException("Failed to load "
-                    + "foster storage handler",e);
+                    + "foster storage handler", e);
             }
         }
 
         try {
             Class<? extends HCatStorageHandler> handlerClass =
-                        (Class<? extends HCatStorageHandler>) Class
+                (Class<? extends HCatStorageHandler>) Class
                     .forName(storageHandler, true, JavaUtils.getClassLoader());
-            return (HCatStorageHandler)ReflectionUtils.newInstance(
-                                                            handlerClass, conf);
+            return (HCatStorageHandler) ReflectionUtils.newInstance(
+                handlerClass, conf);
         } catch (ClassNotFoundException e) {
             throw new IOException("Error in loading storage handler."
-                    + e.getMessage(), e);
+                + e.getMessage(), e);
         }
     }
 
-    public static Pair<String,String> getDbAndTableName(String tableName) throws IOException{
-      String[] dbTableNametokens = tableName.split("\\.");
-      if(dbTableNametokens.length == 1) {
-        return new Pair<String,String>(MetaStoreUtils.DEFAULT_DATABASE_NAME,tableName);
-      }else if (dbTableNametokens.length == 2) {
-        return new Pair<String, String>(dbTableNametokens[0], dbTableNametokens[1]);
-      }else{
-        throw new IOException("tableName expected in the form "
-            +"<databasename>.<table name> or <table name>. Got " + tableName);
-      }
+    public static Pair<String, String> getDbAndTableName(String tableName) throws IOException {
+        String[] dbTableNametokens = tableName.split("\\.");
+        if (dbTableNametokens.length == 1) {
+            return new Pair<String, String>(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName);
+        } else if (dbTableNametokens.length == 2) {
+            return new Pair<String, String>(dbTableNametokens[0], dbTableNametokens[1]);
+        } else {
+            throw new IOException("tableName expected in the form "
+                + "<databasename>.<table name> or <table name>. Got " + tableName);
+        }
     }
 
     public static Map<String, String>
-      getInputJobProperties(HCatStorageHandler storageHandler,
-                            InputJobInfo inputJobInfo) {
+    getInputJobProperties(HCatStorageHandler storageHandler,
+                          InputJobInfo inputJobInfo) {
         TableDesc tableDesc = new TableDesc(storageHandler.getSerDeClass(),
-                  storageHandler.getInputFormatClass(),
-                  storageHandler.getOutputFormatClass(),
-                  inputJobInfo.getTableInfo().getStorerInfo().getProperties());
-        if(tableDesc.getJobProperties() == null) {
+            storageHandler.getInputFormatClass(),
+            storageHandler.getOutputFormatClass(),
+            inputJobInfo.getTableInfo().getStorerInfo().getProperties());
+        if (tableDesc.getJobProperties() == null) {
             tableDesc.setJobProperties(new HashMap<String, String>());
         }
 
-        Map<String,String> jobProperties = new HashMap<String,String>();
+        Map<String, String> jobProperties = new HashMap<String, String>();
         try {
             tableDesc.getJobProperties().put(
                 HCatConstants.HCAT_KEY_JOB_INFO,
                 HCatUtil.serialize(inputJobInfo));
 
             storageHandler.configureInputJobProperties(tableDesc,
-                                                                jobProperties);
+                jobProperties);
 
         } catch (IOException e) {
             throw new IllegalStateException(
-                "Failed to configure StorageHandler",e);
+                "Failed to configure StorageHandler", e);
         }
 
         return jobProperties;
@@ -469,36 +469,36 @@ public class HCatUtil {
 
 
     public static void
-      configureOutputStorageHandler(HCatStorageHandler storageHandler,
-                                    JobContext context,
-                                    OutputJobInfo outputJobInfo) {
+    configureOutputStorageHandler(HCatStorageHandler storageHandler,
+                                  JobContext context,
+                                  OutputJobInfo outputJobInfo) {
         //TODO replace IgnoreKeyTextOutputFormat with a
         //HiveOutputFormatWrapper in StorageHandler
         TableDesc tableDesc = new TableDesc(storageHandler.getSerDeClass(),
-                  storageHandler.getInputFormatClass(),
-                  IgnoreKeyTextOutputFormat.class,
-                  outputJobInfo.getTableInfo().getStorerInfo().getProperties());
-        if(tableDesc.getJobProperties() == null)
+            storageHandler.getInputFormatClass(),
+            IgnoreKeyTextOutputFormat.class,
+            outputJobInfo.getTableInfo().getStorerInfo().getProperties());
+        if (tableDesc.getJobProperties() == null)
             tableDesc.setJobProperties(new HashMap<String, String>());
-        for (Map.Entry<String,String> el: context.getConfiguration()) {
-           tableDesc.getJobProperties().put(el.getKey(),el.getValue());
+        for (Map.Entry<String, String> el : context.getConfiguration()) {
+            tableDesc.getJobProperties().put(el.getKey(), el.getValue());
         }
 
-        Map<String,String> jobProperties = new HashMap<String,String>();
+        Map<String, String> jobProperties = new HashMap<String, String>();
         try {
             tableDesc.getJobProperties().put(
                 HCatConstants.HCAT_KEY_OUTPUT_INFO,
                 HCatUtil.serialize(outputJobInfo));
 
             storageHandler.configureOutputJobProperties(tableDesc,
-                                                        jobProperties);
+                jobProperties);
 
-            for(Map.Entry<String,String> el: jobProperties.entrySet()) {
-                context.getConfiguration().set(el.getKey(),el.getValue());
+            for (Map.Entry<String, String> el : jobProperties.entrySet()) {
+                context.getConfiguration().set(el.getKey(), el.getValue());
             }
         } catch (IOException e) {
             throw new IllegalStateException(
-                "Failed to configure StorageHandler",e);
+                "Failed to configure StorageHandler", e);
         }
     }
 
@@ -509,8 +509,8 @@ public class HCatUtil {
      */
     public static void copyConf(Configuration src, Configuration dest) {
         dest.clear();
-        for(Map.Entry<String,String> el : src) {
-            dest.set(el.getKey(),el.getValue());
+        for (Map.Entry<String, String> el : src) {
+            dest.set(el.getKey(), el.getValue());
         }
     }
 
@@ -522,16 +522,16 @@ public class HCatUtil {
      * @throws IOException
      */
     public static HiveMetaStoreClient getHiveClient(HiveConf hiveConf)
-            throws MetaException, IOException {
+        throws MetaException, IOException {
 
         // Singleton behaviour: create the cache instance if required. The cache needs to be created lazily and
         // using the expiry time available in hiveConf.
 
-        if(hiveClientCache == null ) {
+        if (hiveClientCache == null) {
             synchronized (HiveMetaStoreClient.class) {
-                if(hiveClientCache == null) {
+                if (hiveClientCache == null) {
                     hiveClientCache = new HiveClientCache(hiveConf.getInt(HCatConstants.HCAT_HIVE_CLIENT_EXPIRY_TIME,
-                            DEFAULT_HIVE_CACHE_EXPIRY_TIME_SECONDS));
+                        DEFAULT_HIVE_CACHE_EXPIRY_TIME_SECONDS));
                 }
             }
         }
@@ -552,65 +552,63 @@ public class HCatUtil {
     }
 
     public static HiveConf getHiveConf(Configuration conf)
-      throws IOException {
+        throws IOException {
+
+        HiveConf hiveConf = new HiveConf(conf, HCatUtil.class);
+
+        //copy the hive conf into the job conf and restore it
+        //in the backend context
+        if (conf.get(HCatConstants.HCAT_KEY_HIVE_CONF) == null) {
+            conf.set(HCatConstants.HCAT_KEY_HIVE_CONF,
+                HCatUtil.serialize(hiveConf.getAllProperties()));
+        } else {
+            //Copy configuration properties into the hive conf
+            Properties properties = (Properties) HCatUtil.deserialize(
+                conf.get(HCatConstants.HCAT_KEY_HIVE_CONF));
+
+            for (Map.Entry<Object, Object> prop : properties.entrySet()) {
+                if (prop.getValue() instanceof String) {
+                    hiveConf.set((String) prop.getKey(), (String) prop.getValue());
+                } else if (prop.getValue() instanceof Integer) {
+                    hiveConf.setInt((String) prop.getKey(),
+                        (Integer) prop.getValue());
+                } else if (prop.getValue() instanceof Boolean) {
+                    hiveConf.setBoolean((String) prop.getKey(),
+                        (Boolean) prop.getValue());
+                } else if (prop.getValue() instanceof Long) {
+                    hiveConf.setLong((String) prop.getKey(), (Long) prop.getValue());
+                } else if (prop.getValue() instanceof Float) {
+                    hiveConf.setFloat((String) prop.getKey(),
+                        (Float) prop.getValue());
+                }
+            }
+        }
 
-      HiveConf hiveConf = new HiveConf(conf, HCatUtil.class);
+        if (conf.get(HCatConstants.HCAT_KEY_TOKEN_SIGNATURE) != null) {
+            hiveConf.set("hive.metastore.token.signature",
+                conf.get(HCatConstants.HCAT_KEY_TOKEN_SIGNATURE));
+        }
 
-      //copy the hive conf into the job conf and restore it
-      //in the backend context
-      if( conf.get(HCatConstants.HCAT_KEY_HIVE_CONF) == null ) {
-        conf.set(HCatConstants.HCAT_KEY_HIVE_CONF,
-            HCatUtil.serialize(hiveConf.getAllProperties()));
-      } else {
-        //Copy configuration properties into the hive conf
-        Properties properties = (Properties) HCatUtil.deserialize(
-            conf.get(HCatConstants.HCAT_KEY_HIVE_CONF));
-
-        for(Map.Entry<Object, Object> prop : properties.entrySet() ) {
-          if( prop.getValue() instanceof String ) {
-            hiveConf.set((String) prop.getKey(), (String) prop.getValue());
-          } else if( prop.getValue() instanceof Integer ) {
-            hiveConf.setInt((String) prop.getKey(),
-                (Integer) prop.getValue());
-          } else if( prop.getValue() instanceof Boolean ) {
-            hiveConf.setBoolean((String) prop.getKey(),
-                (Boolean) prop.getValue());
-          } else if( prop.getValue() instanceof Long ) {
-            hiveConf.setLong((String) prop.getKey(), (Long) prop.getValue());
-          } else if( prop.getValue() instanceof Float ) {
-            hiveConf.setFloat((String) prop.getKey(),
-                (Float) prop.getValue());
-          }
-        }
-      }
-
-      if(conf.get(HCatConstants.HCAT_KEY_TOKEN_SIGNATURE) != null) {
-        hiveConf.set("hive.metastore.token.signature",
-                     conf.get(HCatConstants.HCAT_KEY_TOKEN_SIGNATURE));
-      }
-
-      return hiveConf;
+        return hiveConf;
     }
 
 
-    public static JobConf getJobConfFromContext(JobContext jobContext)
-    {
-      JobConf jobConf;
-      // we need to convert the jobContext into a jobConf
-      // 0.18 jobConf (Hive) vs 0.20+ jobContext (HCat)
-      // begin conversion..
-      jobConf = new JobConf(jobContext.getConfiguration());
-      // ..end of conversion
+    public static JobConf getJobConfFromContext(JobContext jobContext) {
+        JobConf jobConf;
+        // we need to convert the jobContext into a jobConf
+        // 0.18 jobConf (Hive) vs 0.20+ jobContext (HCat)
+        // begin conversion..
+        jobConf = new JobConf(jobContext.getConfiguration());
+        // ..end of conversion
 
 
-      return jobConf;
+        return jobConf;
     }
 
     public static void copyJobPropertiesToJobConf(
-                    Map<String, String>jobProperties, JobConf jobConf)
-    {
-      for (Map.Entry<String, String> entry : jobProperties.entrySet()) {
-        jobConf.set(entry.getKey(), entry.getValue());
-      }
+        Map<String, String> jobProperties, JobConf jobConf) {
+        for (Map.Entry<String, String> entry : jobProperties.entrySet()) {
+            jobConf.set(entry.getKey(), entry.getValue());
+        }
     }
 }

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HiveClientCache.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HiveClientCache.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HiveClientCache.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HiveClientCache.java Mon Sep 10 23:28:55 2012
@@ -57,11 +57,12 @@ class HiveClientCache {
     // Thread local variable containing each thread's unique ID, is used as one of the keys for the cache
     // causing each thread to get a different client even if the hiveConf is same.
     private static final ThreadLocal<Integer> threadId =
-            new ThreadLocal<Integer>() {
-                @Override protected Integer initialValue() {
-                    return nextId.getAndIncrement();
-                }
-            };
+        new ThreadLocal<Integer>() {
+            @Override
+            protected Integer initialValue() {
+                return nextId.getAndIncrement();
+            }
+        };
 
     private int getThreadId() {
         return threadId.get();
@@ -70,39 +71,39 @@ class HiveClientCache {
     /**
      * @param timeout the length of time in seconds after a client is created that it should be automatically removed
      */
-     public HiveClientCache(final int timeout) {
-         this.timeout = timeout;
-         RemovalListener<HiveClientCacheKey, CacheableHiveMetaStoreClient> removalListener =
-             new RemovalListener<HiveClientCacheKey, CacheableHiveMetaStoreClient>() {
-                 public void onRemoval(RemovalNotification<HiveClientCacheKey, CacheableHiveMetaStoreClient> notification) {
-                     CacheableHiveMetaStoreClient hiveMetaStoreClient = notification.getValue();
-                     if (hiveMetaStoreClient != null) {
-                         synchronized (CACHE_TEARDOWN_LOCK) {
+    public HiveClientCache(final int timeout) {
+        this.timeout = timeout;
+        RemovalListener<HiveClientCacheKey, CacheableHiveMetaStoreClient> removalListener =
+            new RemovalListener<HiveClientCacheKey, CacheableHiveMetaStoreClient>() {
+                public void onRemoval(RemovalNotification<HiveClientCacheKey, CacheableHiveMetaStoreClient> notification) {
+                    CacheableHiveMetaStoreClient hiveMetaStoreClient = notification.getValue();
+                    if (hiveMetaStoreClient != null) {
+                        synchronized (CACHE_TEARDOWN_LOCK) {
                             hiveMetaStoreClient.setExpiredFromCache();
                             hiveMetaStoreClient.tearDownIfUnused();
-                         }
-                     }
-                 }
-             };
-         hiveCache = CacheBuilder.newBuilder()
-                 .expireAfterWrite(timeout, TimeUnit.SECONDS)
-                 .removalListener(removalListener)
-                 .build();
-
-         // Add a shutdown hook for cleanup, if there are elements remaining in the cache which were not cleaned up.
-         // This is the best effort approach. Ignore any error while doing so. Notice that most of the clients
-         // would get cleaned up via either the removalListener or the close() call, only the active clients
-         // that are in the cache or expired but being used in other threads wont get cleaned. The following code will only
-         // clean the active cache ones. The ones expired from cache but being hold by other threads are in the mercy
-         // of finalize() being called.
-         Thread cleanupHiveClientShutdownThread = new Thread() {
-             @Override
-             public void run() {
-                 LOG.info("Cleaning up hive client cache in ShutDown hook");
-                 closeAllClientsQuietly();
-             }
-         };
-         Runtime.getRuntime().addShutdownHook(cleanupHiveClientShutdownThread);
+                        }
+                    }
+                }
+            };
+        hiveCache = CacheBuilder.newBuilder()
+            .expireAfterWrite(timeout, TimeUnit.SECONDS)
+            .removalListener(removalListener)
+            .build();
+
+        // Add a shutdown hook for cleanup, if there are elements remaining in the cache which were not cleaned up.
+        // This is the best effort approach. Ignore any error while doing so. Notice that most of the clients
+        // would get cleaned up via either the removalListener or the close() call, only the active clients
+        // that are in the cache or expired but being used in other threads wont get cleaned. The following code will only
+        // clean the active cache ones. The ones expired from cache but being hold by other threads are in the mercy
+        // of finalize() being called.
+        Thread cleanupHiveClientShutdownThread = new Thread() {
+            @Override
+            public void run() {
+                LOG.info("Cleaning up hive client cache in ShutDown hook");
+                closeAllClientsQuietly();
+            }
+        };
+        Runtime.getRuntime().addShutdownHook(cleanupHiveClientShutdownThread);
     }
 
     /**
@@ -215,36 +216,36 @@ class HiveClientCache {
             if (o == null || getClass() != o.getClass()) return false;
             HiveClientCacheKey that = (HiveClientCacheKey) o;
             return new EqualsBuilder().
-                    append(this.metaStoreURIs,
-                            that.metaStoreURIs).
-                    append(this.ugi, that.ugi).
-                    append(this.threadId, that.threadId).isEquals();
+                append(this.metaStoreURIs,
+                    that.metaStoreURIs).
+                append(this.ugi, that.ugi).
+                append(this.threadId, that.threadId).isEquals();
         }
 
         @Override
         public int hashCode() {
             return new HashCodeBuilder().
-                    append(metaStoreURIs).
-                    append(ugi).
-                    append(threadId).toHashCode();
+                append(metaStoreURIs).
+                append(ugi).
+                append(threadId).toHashCode();
         }
     }
 
     /**
      * Add # of current users on HiveMetaStoreClient, so that the client can be cleaned when no one is using it.
      */
-    public static class CacheableHiveMetaStoreClient extends HiveMetaStoreClient  {
-        private  AtomicInteger users = new AtomicInteger(0);
+    public static class CacheableHiveMetaStoreClient extends HiveMetaStoreClient {
+        private AtomicInteger users = new AtomicInteger(0);
         private volatile boolean expiredFromCache = false;
         private boolean isClosed = false;
         private final long expiryTime;
-        private static final int EXPIRY_TIME_EXTENSION_IN_MILLIS = 60*1000;
+        private static final int EXPIRY_TIME_EXTENSION_IN_MILLIS = 60 * 1000;
 
         public CacheableHiveMetaStoreClient(final HiveConf conf, final int timeout) throws MetaException {
             super(conf);
             // Extend the expiry time with some extra time on top of guava expiry time to make sure
             // that items closed() are for sure expired and would never be returned by guava.
-            this.expiryTime = System.currentTimeMillis() + timeout*1000 + EXPIRY_TIME_EXTENSION_IN_MILLIS;
+            this.expiryTime = System.currentTimeMillis() + timeout * 1000 + EXPIRY_TIME_EXTENSION_IN_MILLIS;
         }
 
         private void acquire() {
@@ -287,9 +288,9 @@ class HiveClientCache {
          * This *MUST* be called by anyone who uses this client.
          */
         @Override
-        public void close(){
+        public void close() {
             release();
-            if(System.currentTimeMillis() >= expiryTime)
+            if (System.currentTimeMillis() >= expiryTime)
                 setExpiredFromCache();
             tearDownIfUnused();
         }
@@ -300,7 +301,7 @@ class HiveClientCache {
          *  2. It has expired from the cache
          */
         private void tearDownIfUnused() {
-            if(users.get() == 0 && expiredFromCache) {
+            if (users.get() == 0 && expiredFromCache) {
                 this.tearDown();
             }
         }
@@ -310,11 +311,11 @@ class HiveClientCache {
          */
         protected synchronized void tearDown() {
             try {
-                if(!isClosed) {
+                if (!isClosed) {
                     super.close();
                 }
                 isClosed = true;
-            } catch(Exception e) {
+            } catch (Exception e) {
                 LOG.warn("Error closing hive metastore client. Ignored.", e);
             }
         }

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/DataType.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/DataType.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/DataType.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/DataType.java Mon Sep 10 23:28:55 2012
@@ -27,179 +27,180 @@ import java.util.Map.Entry;
 
 public abstract class DataType {
 
-  public static final byte NULL      =   1;
-  public static final byte BOOLEAN   =   5;
-  public static final byte BYTE      =   6;
-  public static final byte INTEGER   =  10;
-  public static final byte SHORT     =  11;
-  public static final byte LONG      =  15;
-  public static final byte FLOAT     =  20;
-  public static final byte DOUBLE    =  25;
-  public static final byte STRING    =  55;
-  public static final byte BINARY    =  60;
-
-  public static final byte MAP       = 100;
-  public static final byte STRUCT    = 110;
-  public static final byte LIST      = 120;
-  public static final byte ERROR     =  -1;
-
-  /**
-   * Determine the datatype of an object.
-   * @param o Object to test.
-   * @return byte code of the type, or ERROR if we don't know.
-   */
-  public static byte findType(Object o) {
-    if (o == null) {
-      return NULL;
+    public static final byte NULL = 1;
+    public static final byte BOOLEAN = 5;
+    public static final byte BYTE = 6;
+    public static final byte INTEGER = 10;
+    public static final byte SHORT = 11;
+    public static final byte LONG = 15;
+    public static final byte FLOAT = 20;
+    public static final byte DOUBLE = 25;
+    public static final byte STRING = 55;
+    public static final byte BINARY = 60;
+
+    public static final byte MAP = 100;
+    public static final byte STRUCT = 110;
+    public static final byte LIST = 120;
+    public static final byte ERROR = -1;
+
+    /**
+     * Determine the datatype of an object.
+     * @param o Object to test.
+     * @return byte code of the type, or ERROR if we don't know.
+     */
+    public static byte findType(Object o) {
+        if (o == null) {
+            return NULL;
+        }
+
+        Class<?> clazz = o.getClass();
+
+        // Try to put the most common first
+        if (clazz == String.class) {
+            return STRING;
+        } else if (clazz == Integer.class) {
+            return INTEGER;
+        } else if (clazz == Long.class) {
+            return LONG;
+        } else if (clazz == Float.class) {
+            return FLOAT;
+        } else if (clazz == Double.class) {
+            return DOUBLE;
+        } else if (clazz == Boolean.class) {
+            return BOOLEAN;
+        } else if (clazz == Byte.class) {
+            return BYTE;
+        } else if (clazz == Short.class) {
+            return SHORT;
+        } else if (o instanceof List<?>) {
+            return LIST;
+        } else if (o instanceof Map<?, ?>) {
+            return MAP;
+        } else if (o instanceof byte[]) {
+            return BINARY;
+        } else {
+            return ERROR;
+        }
     }
 
-    Class<?> clazz = o.getClass();
+    public static int compare(Object o1, Object o2) {
 
-    // Try to put the most common first
-    if (clazz == String.class) {
-      return STRING;
-    } else if (clazz == Integer.class) {
-      return INTEGER;
-    } else if (clazz == Long.class) {
-      return LONG;
-    } else if (clazz == Float.class) {
-      return FLOAT;
-    } else if (clazz == Double.class) {
-      return DOUBLE;
-    } else if (clazz == Boolean.class) {
-      return BOOLEAN;
-    } else if (clazz == Byte.class) {
-      return BYTE;
-    } else if (clazz == Short.class) {
-      return SHORT;
-    } else if (o instanceof List<?>) {
-      return LIST;
-    } else if (o instanceof Map<?,?>) {
-      return MAP;
-    }else if (o instanceof byte[]) {
-        return BINARY;
-    } else {return ERROR;}
-  }
-
-  public static int compare(Object o1, Object o2) {
-
-    return compare(o1, o2, findType(o1),findType(o2));
-  }
-
-  public static int compare(Object o1, Object o2, byte dt1, byte dt2) {
-    if (dt1 == dt2) {
-      switch (dt1) {
-      case NULL:
-        return 0;
-
-      case BOOLEAN:
-        return ((Boolean)o1).compareTo((Boolean)o2);
-
-      case BYTE:
-        return ((Byte)o1).compareTo((Byte)o2);
-
-      case INTEGER:
-        return ((Integer)o1).compareTo((Integer)o2);
-
-      case LONG:
-        return ((Long)o1).compareTo((Long)o2);
-
-      case FLOAT:
-        return ((Float)o1).compareTo((Float)o2);
-
-      case DOUBLE:
-        return ((Double)o1).compareTo((Double)o2);
-
-      case STRING:
-        return ((String)o1).compareTo((String)o2);
-
-      case SHORT:
-        return ((Short)o1).compareTo((Short)o2);
-        
-      case BINARY:
-        return compareByteArray((byte[])o1, (byte[])o2);
-
-      case LIST:
-        List<?> l1 = (List<?>)o1;
-        List<?> l2 = (List<?>)o2;
-        int len = l1.size();
-        if(len != l2.size()) {
-          return len - l2.size();
-        } else{
-          for(int i =0; i < len; i++){
-            int cmpVal = compare(l1.get(i), l2.get(i));
-            if(cmpVal != 0) {
-              return cmpVal;
+        return compare(o1, o2, findType(o1), findType(o2));
+    }
+
+    public static int compare(Object o1, Object o2, byte dt1, byte dt2) {
+        if (dt1 == dt2) {
+            switch (dt1) {
+            case NULL:
+                return 0;
+
+            case BOOLEAN:
+                return ((Boolean) o1).compareTo((Boolean) o2);
+
+            case BYTE:
+                return ((Byte) o1).compareTo((Byte) o2);
+
+            case INTEGER:
+                return ((Integer) o1).compareTo((Integer) o2);
+
+            case LONG:
+                return ((Long) o1).compareTo((Long) o2);
+
+            case FLOAT:
+                return ((Float) o1).compareTo((Float) o2);
+
+            case DOUBLE:
+                return ((Double) o1).compareTo((Double) o2);
+
+            case STRING:
+                return ((String) o1).compareTo((String) o2);
+
+            case SHORT:
+                return ((Short) o1).compareTo((Short) o2);
+
+            case BINARY:
+                return compareByteArray((byte[]) o1, (byte[]) o2);
+
+            case LIST:
+                List<?> l1 = (List<?>) o1;
+                List<?> l2 = (List<?>) o2;
+                int len = l1.size();
+                if (len != l2.size()) {
+                    return len - l2.size();
+                } else {
+                    for (int i = 0; i < len; i++) {
+                        int cmpVal = compare(l1.get(i), l2.get(i));
+                        if (cmpVal != 0) {
+                            return cmpVal;
+                        }
+                    }
+                    return 0;
+                }
+
+            case MAP: {
+                Map<?, ?> m1 = (Map<?, ?>) o1;
+                Map<?, ?> m2 = (Map<?, ?>) o2;
+                int sz1 = m1.size();
+                int sz2 = m2.size();
+                if (sz1 < sz2) {
+                    return -1;
+                } else if (sz1 > sz2) {
+                    return 1;
+                } else {
+                    // This is bad, but we have to sort the keys of the maps in order
+                    // to be commutative.
+                    TreeMap<Object, Object> tm1 = new TreeMap<Object, Object>(m1);
+                    TreeMap<Object, Object> tm2 = new TreeMap<Object, Object>(m2);
+                    Iterator<Entry<Object, Object>> i1 = tm1.entrySet().iterator();
+                    Iterator<Entry<Object, Object>> i2 = tm2.entrySet().iterator();
+                    while (i1.hasNext()) {
+                        Map.Entry<Object, Object> entry1 = i1.next();
+                        Map.Entry<Object, Object> entry2 = i2.next();
+                        int c = compare(entry1.getValue(), entry2.getValue());
+                        if (c != 0) {
+                            return c;
+                        } else {
+                            c = compare(entry1.getValue(), entry2.getValue());
+                            if (c != 0) {
+                                return c;
+                            }
+                        }
+                    }
+                    return 0;
+                }
             }
-          }
-          return 0;
-        }
 
-      case MAP: {
-        Map<?,?> m1 = (Map<?,?>)o1;
-        Map<?,?> m2 = (Map<?,?>)o2;
-        int sz1 = m1.size();
-        int sz2 = m2.size();
-        if (sz1 < sz2) {
-          return -1;
-        } else if (sz1 > sz2) {
-          return 1;
-        } else {
-          // This is bad, but we have to sort the keys of the maps in order
-          // to be commutative.
-          TreeMap<Object,Object> tm1 = new TreeMap<Object,Object>(m1);
-          TreeMap<Object, Object> tm2 = new TreeMap<Object,Object>(m2);
-          Iterator<Entry<Object, Object>> i1 = tm1.entrySet().iterator();
-          Iterator<Entry<Object, Object> > i2 = tm2.entrySet().iterator();
-          while (i1.hasNext()) {
-            Map.Entry<Object, Object> entry1 = i1.next();
-            Map.Entry<Object, Object> entry2 = i2.next();
-            int c = compare(entry1.getValue(), entry2.getValue());
-            if (c != 0) {
-              return c;
-            } else {
-              c = compare(entry1.getValue(), entry2.getValue());
-              if (c != 0) {
-                return c;
-              }
+            default:
+                throw new RuntimeException("Unkown type " + dt1 +
+                    " in compare");
             }
-          }
-          return 0;
+        } else {
+            return dt1 < dt2 ? -1 : 1;
         }
-      }
-
-      default:
-        throw new RuntimeException("Unkown type " + dt1 +
-        " in compare");
-      }
-    } else {
-      return dt1 < dt2 ? -1 : 1;
     }
-  }
 
-  private static int compareByteArray(byte[] o1, byte[] o2) {
-    
-    for(int i = 0; i < o1.length; i++){
-      if(i == o2.length){
-        return 1;
-      }
-      if(o1[i] == o2[i]){
-        continue;
-      }
-      if(o1[i] > o1[i]){
-        return 1;
-      }
-      else{
-        return -1;
-      }
-    }
+    private static int compareByteArray(byte[] o1, byte[] o2) {
+
+        for (int i = 0; i < o1.length; i++) {
+            if (i == o2.length) {
+                return 1;
+            }
+            if (o1[i] == o2[i]) {
+                continue;
+            }
+            if (o1[i] > o1[i]) {
+                return 1;
+            } else {
+                return -1;
+            }
+        }
 
-    //bytes in o1 are same as o2
-    //in case o2 was longer
-    if(o2.length > o1.length){
-      return -1;
+        //bytes in o1 are same as o2
+        //in case o2 was longer
+        if (o2.length > o1.length) {
+            return -1;
+        }
+        return 0; //equals
     }
-    return 0; //equals
-  }
 
 }

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/DefaultHCatRecord.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/DefaultHCatRecord.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/DefaultHCatRecord.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/DefaultHCatRecord.java Mon Sep 10 23:28:55 2012
@@ -31,20 +31,20 @@ public class DefaultHCatRecord extends H
 
     private List<Object> contents;
 
-    public DefaultHCatRecord(){
+    public DefaultHCatRecord() {
         contents = new ArrayList<Object>();
     }
 
-    public DefaultHCatRecord(int size){
+    public DefaultHCatRecord(int size) {
         contents = new ArrayList<Object>(size);
-        for(int i=0; i < size; i++){
+        for (int i = 0; i < size; i++) {
             contents.add(null);
         }
     }
 
     @Override
     public void remove(int idx) throws HCatException {
-      contents.remove(idx);
+        contents.remove(idx);
     }
 
     public DefaultHCatRecord(List<Object> list) {
@@ -76,7 +76,7 @@ public class DefaultHCatRecord extends H
 
         contents.clear();
         int len = in.readInt();
-        for(int i =0; i < len; i++){
+        for (int i = 0; i < len; i++) {
             contents.add(ReaderWriter.readDatum(in));
         }
     }
@@ -106,8 +106,8 @@ public class DefaultHCatRecord extends H
     public String toString() {
 
         StringBuilder sb = new StringBuilder();
-        for(Object o : contents) {
-            sb.append(o+"\t");
+        for (Object o : contents) {
+            sb.append(o + "\t");
         }
         return sb.toString();
     }
@@ -119,12 +119,12 @@ public class DefaultHCatRecord extends H
 
     @Override
     public void set(String fieldName, HCatSchema recordSchema, Object value) throws HCatException {
-        set(recordSchema.getPosition(fieldName),value);
+        set(recordSchema.getPosition(fieldName), value);
     }
 
     @Override
     public void copy(HCatRecord r) throws HCatException {
-      this.contents = r.getAll();
+        this.contents = r.getAll();
     }
 
 }

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/HCatRecord.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/HCatRecord.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/HCatRecord.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/HCatRecord.java Mon Sep 10 23:28:55 2012
@@ -33,13 +33,16 @@ import org.apache.hcatalog.data.schema.H
 public abstract class HCatRecord implements HCatRecordable {
 
     public abstract Object get(String fieldName, HCatSchema recordSchema) throws HCatException;
-    public abstract void set(String fieldName, HCatSchema recordSchema, Object value ) throws HCatException;
+
+    public abstract void set(String fieldName, HCatSchema recordSchema, Object value) throws HCatException;
+
     public abstract void remove(int idx) throws HCatException;
+
     public abstract void copy(HCatRecord r) throws HCatException;
 
-    protected Object get(String fieldName, HCatSchema recordSchema, Class clazz) throws HCatException{
+    protected Object get(String fieldName, HCatSchema recordSchema, Class clazz) throws HCatException {
         // TODO : if needed, verify that recordschema entry for fieldname matches appropriate type.
-        return get(fieldName,recordSchema);
+        return get(fieldName, recordSchema);
     }
 
     public Boolean getBoolean(String fieldName, HCatSchema recordSchema) throws HCatException {
@@ -47,15 +50,15 @@ public abstract class HCatRecord impleme
     }
 
     public void setBoolean(String fieldName, HCatSchema recordSchema, Boolean value) throws HCatException {
-        set(fieldName,recordSchema,value);
+        set(fieldName, recordSchema, value);
     }
-    
+
     public byte[] getByteArray(String fieldName, HCatSchema recordSchema) throws HCatException {
         return (byte[]) get(fieldName, recordSchema, byte[].class);
     }
 
     public void setByteArray(String fieldName, HCatSchema recordSchema, byte[] value) throws HCatException {
-        set(fieldName,recordSchema,value);
+        set(fieldName, recordSchema, value);
     }
 
     public Byte getByte(String fieldName, HCatSchema recordSchema) throws HCatException {
@@ -64,7 +67,7 @@ public abstract class HCatRecord impleme
     }
 
     public void setByte(String fieldName, HCatSchema recordSchema, Byte value) throws HCatException {
-        set(fieldName,recordSchema,value);
+        set(fieldName, recordSchema, value);
     }
 
     public Short getShort(String fieldName, HCatSchema recordSchema) throws HCatException {
@@ -73,73 +76,73 @@ public abstract class HCatRecord impleme
     }
 
     public void setShort(String fieldName, HCatSchema recordSchema, Short value) throws HCatException {
-        set(fieldName,recordSchema,value);
+        set(fieldName, recordSchema, value);
     }
 
     public Integer getInteger(String fieldName, HCatSchema recordSchema) throws HCatException {
-        return (Integer) get(fieldName,recordSchema, Integer.class);
+        return (Integer) get(fieldName, recordSchema, Integer.class);
     }
 
     public void setInteger(String fieldName, HCatSchema recordSchema, Integer value) throws HCatException {
-        set(fieldName,recordSchema,value);
+        set(fieldName, recordSchema, value);
     }
 
     public Long getLong(String fieldName, HCatSchema recordSchema) throws HCatException {
         // BIGINT
-        return (Long) get(fieldName,recordSchema,Long.class);
+        return (Long) get(fieldName, recordSchema, Long.class);
     }
 
     public void setLong(String fieldName, HCatSchema recordSchema, Long value) throws HCatException {
-        set(fieldName,recordSchema,value);
+        set(fieldName, recordSchema, value);
     }
 
     public Float getFloat(String fieldName, HCatSchema recordSchema) throws HCatException {
-        return (Float) get(fieldName,recordSchema,Float.class);
+        return (Float) get(fieldName, recordSchema, Float.class);
     }
 
     public void setFloat(String fieldName, HCatSchema recordSchema, Float value) throws HCatException {
-        set(fieldName,recordSchema,value);
+        set(fieldName, recordSchema, value);
     }
 
     public Double getDouble(String fieldName, HCatSchema recordSchema) throws HCatException {
-        return (Double) get(fieldName,recordSchema,Double.class);
+        return (Double) get(fieldName, recordSchema, Double.class);
     }
 
     public void setDouble(String fieldName, HCatSchema recordSchema, Double value) throws HCatException {
-        set(fieldName,recordSchema,value);
+        set(fieldName, recordSchema, value);
     }
 
     public String getString(String fieldName, HCatSchema recordSchema) throws HCatException {
-        return (String) get(fieldName,recordSchema,String.class);
+        return (String) get(fieldName, recordSchema, String.class);
     }
 
     public void setString(String fieldName, HCatSchema recordSchema, String value) throws HCatException {
-        set(fieldName,recordSchema,value);
+        set(fieldName, recordSchema, value);
     }
 
     @SuppressWarnings("unchecked")
     public List<? extends Object> getStruct(String fieldName, HCatSchema recordSchema) throws HCatException {
-        return (List<? extends Object>) get(fieldName,recordSchema,List.class);
+        return (List<? extends Object>) get(fieldName, recordSchema, List.class);
     }
 
     public void setStruct(String fieldName, HCatSchema recordSchema, List<? extends Object> value) throws HCatException {
-        set(fieldName,recordSchema,value);
+        set(fieldName, recordSchema, value);
     }
 
     public List<?> getList(String fieldName, HCatSchema recordSchema) throws HCatException {
-        return (List<?>) get(fieldName,recordSchema,List.class);
+        return (List<?>) get(fieldName, recordSchema, List.class);
     }
 
     public void setList(String fieldName, HCatSchema recordSchema, List<?> value) throws HCatException {
-        set(fieldName,recordSchema,value);
+        set(fieldName, recordSchema, value);
     }
 
-    public Map<?,?> getMap(String fieldName, HCatSchema recordSchema) throws HCatException {
-        return (Map<?,?>) get(fieldName,recordSchema,Map.class);
+    public Map<?, ?> getMap(String fieldName, HCatSchema recordSchema) throws HCatException {
+        return (Map<?, ?>) get(fieldName, recordSchema, Map.class);
     }
 
-    public void setMap(String fieldName, HCatSchema recordSchema, Map<?,?> value) throws HCatException {
-        set(fieldName,recordSchema,value);
+    public void setMap(String fieldName, HCatSchema recordSchema, Map<?, ?> value) throws HCatException {
+        set(fieldName, recordSchema, value);
     }
 
 }

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/HCatRecordObjectInspector.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/HCatRecordObjectInspector.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/HCatRecordObjectInspector.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/data/HCatRecordObjectInspector.java Mon Sep 10 23:28:55 2012
@@ -25,28 +25,28 @@ import org.apache.hadoop.hive.serde2.obj
 
 public class HCatRecordObjectInspector extends StandardStructObjectInspector {
 
-  protected HCatRecordObjectInspector(List<String> structFieldNames,
-      List<ObjectInspector> structFieldObjectInspectors) {
-    super(structFieldNames, structFieldObjectInspectors);
-  }
-
-  @Override
-  public Object getStructFieldData(Object data, StructField fieldRef) {
-    if (data == null){
-      return new IllegalArgumentException("Data passed in to get field from was null!");
+    protected HCatRecordObjectInspector(List<String> structFieldNames,
+                                        List<ObjectInspector> structFieldObjectInspectors) {
+        super(structFieldNames, structFieldObjectInspectors);
     }
 
-    int fieldID = ((MyField) fieldRef).getFieldID();
-    if (!(fieldID >= 0 && fieldID < fields.size())){
-      throw new IllegalArgumentException("Invalid field index ["+fieldID+"]");
-    }
+    @Override
+    public Object getStructFieldData(Object data, StructField fieldRef) {
+        if (data == null) {
+            return new IllegalArgumentException("Data passed in to get field from was null!");
+        }
+
+        int fieldID = ((MyField) fieldRef).getFieldID();
+        if (!(fieldID >= 0 && fieldID < fields.size())) {
+            throw new IllegalArgumentException("Invalid field index [" + fieldID + "]");
+        }
 
-    return ((HCatRecord) data).get(fieldID);
-  }
+        return ((HCatRecord) data).get(fieldID);
+    }
 
-  @Override
-  public List<Object> getStructFieldsDataAsList(Object o) {
-    return ((HCatRecord) o).getAll();
-  }
+    @Override
+    public List<Object> getStructFieldsDataAsList(Object o) {
+        return ((HCatRecord) o).getAll();
+    }
 
 }