You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hawq.apache.org by nh...@apache.org on 2015/10/08 20:37:26 UTC

[3/5] incubator-hawq git commit: HAWQ-28. JavaDoc fixes for PXF

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/dc115ff4/pxf/pxf-hive/src/main/java/com/pivotal/pxf/plugins/hive/HiveFilterBuilder.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/main/java/com/pivotal/pxf/plugins/hive/HiveFilterBuilder.java b/pxf/pxf-hive/src/main/java/com/pivotal/pxf/plugins/hive/HiveFilterBuilder.java
index abdc82c..1fe3a64 100644
--- a/pxf/pxf-hive/src/main/java/com/pivotal/pxf/plugins/hive/HiveFilterBuilder.java
+++ b/pxf/pxf-hive/src/main/java/com/pivotal/pxf/plugins/hive/HiveFilterBuilder.java
@@ -7,60 +7,75 @@ import java.util.LinkedList;
 import java.util.List;
 
 /**
- * Uses the filter parser code to build a filter object,
- * either simple - a single {@link com.pivotal.pxf.api.FilterParser.BasicFilter} 
- * object or a compound - a {@link java.util.List} of
+ * Uses the filter parser code to build a filter object, either simple - a
+ * single {@link com.pivotal.pxf.api.FilterParser.BasicFilter} object or a
+ * compound - a {@link java.util.List} of
  * {@link com.pivotal.pxf.api.FilterParser.BasicFilter} objects.
- * {@link com.pivotal.pxf.plugins.hive.HiveAccessor} will use the filter 
- *        for partition filtering
+ * {@link com.pivotal.pxf.plugins.hive.HiveAccessor} will use the filter for
+ * partition filtering.
  */
 public class HiveFilterBuilder implements FilterParser.FilterBuilder {
     private InputData inputData;
 
-	/**
-	 * Constructs a HiveFilterBuilder object
-	 * @param input
-	 */
+    /**
+     * Constructs a HiveFilterBuilder object.
+     *
+     * @param input input data containing filter string
+     */
     public HiveFilterBuilder(InputData input) {
         inputData = input;
     }
 
     /**
-     * Translates a filterString into a FilterParser.BasicFilter or a list of such filters
-	 * @param filterString the string representation of the filter
-	 * @return a single {@link com.pivotal.pxf.api.FilterParser.BasicFilter} object or
-	 * a {@link java.util.List} of {@link com.pivotal.pxf.api.FilterParser.BasicFilter} objects.
+     * Translates a filterString into a {@link com.pivotal.pxf.api.FilterParser.BasicFilter} or a
+     * list of such filters.
+     *
+     * @param filterString the string representation of the filter
+     * @return a single {@link com.pivotal.pxf.api.FilterParser.BasicFilter}
+     *         object or a {@link java.util.List} of
+     *         {@link com.pivotal.pxf.api.FilterParser.BasicFilter} objects.
+     * @throws Exception if parsing the filter failed or filter is not a basic
+     *             filter or list of basic filters
      */
     public Object getFilterObject(String filterString) throws Exception {
         FilterParser parser = new FilterParser(this);
         Object result = parser.parse(filterString);
 
-        if (!(result instanceof FilterParser.BasicFilter) && !(result instanceof List)) {
-            throw new Exception("String " + filterString + " resolved to no filter");
+        if (!(result instanceof FilterParser.BasicFilter)
+                && !(result instanceof List)) {
+            throw new Exception("String " + filterString
+                    + " resolved to no filter");
         }
 
         return result;
     }
 
-	@SuppressWarnings("unchecked")
-    public Object build(FilterParser.Operation opId,
-                        Object leftOperand,
+    @Override
+    @SuppressWarnings("unchecked")
+    public Object build(FilterParser.Operation opId, Object leftOperand,
                         Object rightOperand) throws Exception {
-        if (leftOperand instanceof FilterParser.BasicFilter || leftOperand instanceof List) {
-            if (opId != FilterParser.Operation.HDOP_AND ||
-                    !(rightOperand instanceof FilterParser.BasicFilter)) {
-                throw new Exception("Only AND is allowed between compound expressions");
+        if (leftOperand instanceof FilterParser.BasicFilter
+                || leftOperand instanceof List) {
+            if (opId != FilterParser.Operation.HDOP_AND
+                    || !(rightOperand instanceof FilterParser.BasicFilter)) {
+                throw new Exception(
+                        "Only AND is allowed between compound expressions");
             }
 
             if (leftOperand instanceof List) {
-                return handleCompoundOperations((List<FilterParser.BasicFilter>) leftOperand, (FilterParser.BasicFilter) rightOperand);
+                return handleCompoundOperations(
+                        (List<FilterParser.BasicFilter>) leftOperand,
+                        (FilterParser.BasicFilter) rightOperand);
             } else {
-                return handleCompoundOperations((FilterParser.BasicFilter) leftOperand, (FilterParser.BasicFilter) rightOperand);
+                return handleCompoundOperations(
+                        (FilterParser.BasicFilter) leftOperand,
+                        (FilterParser.BasicFilter) rightOperand);
             }
         }
 
         if (!(rightOperand instanceof FilterParser.Constant)) {
-            throw new Exception("expressions of column-op-column are not supported");
+            throw new Exception(
+                    "expressions of column-op-column are not supported");
         }
 
         // Assume column is on the left
@@ -70,8 +85,8 @@ public class HiveFilterBuilder implements FilterParser.FilterBuilder {
     }
 
     /*
-     * Handles simple column-operator-constant expressions
-     * Creates a special filter in the case the column is the row key column
+     * Handles simple column-operator-constant expressions Creates a special
+     * filter in the case the column is the row key column
      */
     private FilterParser.BasicFilter handleSimpleOperations(FilterParser.Operation opId,
                                                             FilterParser.ColumnIndex column,
@@ -79,24 +94,31 @@ public class HiveFilterBuilder implements FilterParser.FilterBuilder {
         return new FilterParser.BasicFilter(opId, column, constant);
     }
 
-    /*
-     * Handle AND of already calculated expressions
-     * Currently only AND, in the future OR can be added
+    /**
+     * Handles AND of already calculated expressions. Currently only AND, in the
+     * future OR can be added
      *
      * Four cases here:
-     * 1) both are simple filters
-     * 2) left is a FilterList and right is a filter
-     * 3) left is a filter and right is a FilterList
-     * 4) both are FilterLists
-     *
+     * <ol>
+     * <li>both are simple filters</li>
+     * <li>left is a FilterList and right is a filter</li>
+     * <li>left is a filter and right is a FilterList</li>
+     * <li>both are FilterLists</li>
+     * </ol>
      * Currently, 1, 2 can occur, since no parenthesis are used
+     *
+     * @param left left hand filter
+     * @param right right hand filter
+     * @return list of filters constructing the filter tree
      */
-    private List<FilterParser.BasicFilter> handleCompoundOperations(List<FilterParser.BasicFilter> left, FilterParser.BasicFilter right) {
+    private List<FilterParser.BasicFilter> handleCompoundOperations(List<FilterParser.BasicFilter> left,
+                                                                    FilterParser.BasicFilter right) {
         left.add(right);
         return left;
     }
 
-    private List<FilterParser.BasicFilter> handleCompoundOperations(FilterParser.BasicFilter left, FilterParser.BasicFilter right) {
+    private List<FilterParser.BasicFilter> handleCompoundOperations(FilterParser.BasicFilter left,
+                                                                    FilterParser.BasicFilter right) {
         List<FilterParser.BasicFilter> result = new LinkedList<FilterParser.BasicFilter>();
 
         result.add(left);

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/dc115ff4/pxf/pxf-hive/src/main/java/com/pivotal/pxf/plugins/hive/HiveInputFormatFragmenter.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/main/java/com/pivotal/pxf/plugins/hive/HiveInputFormatFragmenter.java b/pxf/pxf-hive/src/main/java/com/pivotal/pxf/plugins/hive/HiveInputFormatFragmenter.java
index eefcac6..5c51e93 100644
--- a/pxf/pxf-hive/src/main/java/com/pivotal/pxf/plugins/hive/HiveInputFormatFragmenter.java
+++ b/pxf/pxf-hive/src/main/java/com/pivotal/pxf/plugins/hive/HiveInputFormatFragmenter.java
@@ -15,17 +15,20 @@ import java.util.List;
 
 /**
  * Specialized Hive fragmenter for RC and Text files tables. Unlike the
- * HiveDataFragmenter, this class does not send the serde properties to the
- * accessor/resolvers. This is done to avoid memory explosion in Hawq. For RC
- * use together with HiveRCFileAccessor/HiveColumnarSerdeResolver. For Text use
- * together with HiveLineBreakAccessor/HiveStringPassResolver.
- * <p/>
+ * {@link HiveDataFragmenter}, this class does not send the serde properties to
+ * the accessor/resolvers. This is done to avoid memory explosion in Hawq. For
+ * RC use together with {@link HiveRCFileAccessor}/
+ * {@link HiveColumnarSerdeResolver}. For Text use together with
+ * {@link HiveLineBreakAccessor}/{@link HiveStringPassResolver}. <br>
  * Given a Hive table and its partitions, divide the data into fragments (here a
  * data fragment is actually a HDFS file block) and return a list of them. Each
- * data fragment will contain the following information: a. sourceName: full
- * HDFS path to the data file that this data fragment is part of b. hosts: a
- * list of the datanode machines that hold a replica of this block c. userData:
- * inputformat name, serde names and partition keys
+ * data fragment will contain the following information:
+ * <ol>
+ * <li>sourceName: full HDFS path to the data file that this data fragment is
+ * part of</li>
+ * <li>hosts: a list of the datanode machines that hold a replica of this block</li>
+ * <li>userData: inputformat name, serde names and partition keys</li>
+ * </ol>
  */
 public class HiveInputFormatFragmenter extends HiveDataFragmenter {
     private static final Log LOG = LogFactory.getLog(HiveInputFormatFragmenter.class);
@@ -40,13 +43,13 @@ public class HiveInputFormatFragmenter extends HiveDataFragmenter {
     public static final int TOK_KEYS = 1;
     public static final int TOK_FILTER_DONE = 2;
 
-    /* defines the Hive input formats currently supported in pxf */
+    /** Defines the Hive input formats currently supported in pxf */
     public enum PXF_HIVE_INPUT_FORMATS {
         RC_FILE_INPUT_FORMAT,
         TEXT_FILE_INPUT_FORMAT
     }
 
-    /* defines the Hive serializers (serde classes) currently supported in pxf */
+    /** Defines the Hive serializers (serde classes) currently supported in pxf */
     public enum PXF_HIVE_SERDES {
         COLUMNAR_SERDE,
         LAZY_BINARY_COLUMNAR_SERDE,
@@ -54,7 +57,7 @@ public class HiveInputFormatFragmenter extends HiveDataFragmenter {
     }
 
     /**
-     * Constructs a HiveInputFormatFragmenter
+     * Constructs a HiveInputFormatFragmenter.
      *
      * @param inputData all input parameters coming from the client
      */
@@ -63,10 +66,17 @@ public class HiveInputFormatFragmenter extends HiveDataFragmenter {
     }
 
     /**
-     * Extracts the user data
+     * Extracts the user data:
+     * serde, partition keys and whether filter was included in fragmenter
+     *
+     * @param input input data from client
+     * @param supportedSerdes supported serde names
+     * @return parsed tokens
+     * @throws UserDataException if user data contains unsupported serde
+     *                           or wrong number of tokens
      */
     static public String[] parseToks(InputData input, String... supportedSerdes)
-            throws Exception {
+            throws UserDataException {
         String userData = new String(input.getFragmentUserData());
         String[] toks = userData.split(HIVE_UD_DELIM);
         if (supportedSerdes.length > 0
@@ -143,23 +153,24 @@ public class HiveInputFormatFragmenter extends HiveDataFragmenter {
     }
 
     /**
-     * Converts HAWQ type to hive type. The supported mappings are:<br/>
-     * BOOLEAN -> boolean<br/>
-     * SMALLINT -> smallint (tinyint is converted to smallint)<br/>
-     * BIGINT -> bigint<br/>
-     * TIMESTAMP, TIME -> timestamp<br/>
-     * NUMERIC -> decimal<br/>
-     * BYTEA -> binary<br/>
-     * INTERGER -> int<br/>
-     * TEXT -> string<br/>
-     * REAL -> float<br/>
-     * FLOAT8 -> double
-     * <p/>
+     * Converts HAWQ type to hive type. The supported mappings are:<ul>
+     * <li>{@code BOOLEAN -> boolean}</li>
+     * <li>{@code SMALLINT -> smallint (tinyint is converted to smallint)}</li>
+     * <li>{@code BIGINT -> bigint}</li>
+     * <li>{@code TIMESTAMP, TIME -> timestamp}</li>
+     * <li>{@code NUMERIC -> decimal}</li>
+     * <li>{@code BYTEA -> binary}</li>
+     * <li>{@code INTERGER -> int}</li>
+     * <li>{@code TEXT -> string}</li>
+     * <li>{@code REAL -> float}</li>
+     * <li>{@code FLOAT8 -> double}</li>
+     * </ul>
      * All other types (both in HAWQ and in HIVE) are not supported.
      *
      * @param type HAWQ data type
      * @param name field name
      * @return Hive type
+     * @throws UnsupportedTypeException if type is not supported
      */
     public static String toHiveType(DataType type, String name) {
         switch (type) {
@@ -245,7 +256,6 @@ public class HiveInputFormatFragmenter extends HiveDataFragmenter {
         assertFileType(inputFormatName, partData);
         String userData = assertSerde(serdeName, partData) + HIVE_UD_DELIM
                 + partitionKeys + HIVE_UD_DELIM + filterInFragmenter;
-        ;
 
         return userData.getBytes();
     }

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/dc115ff4/pxf/pxf-hive/src/main/java/com/pivotal/pxf/plugins/hive/HiveLineBreakAccessor.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/main/java/com/pivotal/pxf/plugins/hive/HiveLineBreakAccessor.java b/pxf/pxf-hive/src/main/java/com/pivotal/pxf/plugins/hive/HiveLineBreakAccessor.java
index ad6e70e..b293123 100644
--- a/pxf/pxf-hive/src/main/java/com/pivotal/pxf/plugins/hive/HiveLineBreakAccessor.java
+++ b/pxf/pxf-hive/src/main/java/com/pivotal/pxf/plugins/hive/HiveLineBreakAccessor.java
@@ -10,12 +10,15 @@ import static com.pivotal.pxf.plugins.hive.HiveInputFormatFragmenter.PXF_HIVE_SE
 
 /**
  * Specialization of HiveAccessor for a Hive table stored as Text files.
- * Use together with HiveInputFormatFragmenter/HiveStringPassResolver
+ * Use together with {@link HiveInputFormatFragmenter}/{@link HiveStringPassResolver}.
  */
 public class HiveLineBreakAccessor extends HiveAccessor {
 
     /**
      * Constructs a HiveLineBreakAccessor.
+     *
+     * @param input input containing user data
+     * @throws Exception if user data was wrong
      */
     public HiveLineBreakAccessor(InputData input) throws Exception {
         super(input, new TextInputFormat());

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/dc115ff4/pxf/pxf-hive/src/main/java/com/pivotal/pxf/plugins/hive/HiveRCFileAccessor.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/main/java/com/pivotal/pxf/plugins/hive/HiveRCFileAccessor.java b/pxf/pxf-hive/src/main/java/com/pivotal/pxf/plugins/hive/HiveRCFileAccessor.java
index 66acd64..6e64296 100644
--- a/pxf/pxf-hive/src/main/java/com/pivotal/pxf/plugins/hive/HiveRCFileAccessor.java
+++ b/pxf/pxf-hive/src/main/java/com/pivotal/pxf/plugins/hive/HiveRCFileAccessor.java
@@ -15,12 +15,15 @@ import static com.pivotal.pxf.plugins.hive.HiveInputFormatFragmenter.PXF_HIVE_SE
 /**
  * Specialization of HiveAccessor for a Hive table that stores only RC files.
  * This class replaces the generic HiveAccessor for a case where a table is stored entirely as RC files.
- * Use together with HiveInputFormatFragmenter/HiveColumnarSerdeResolver
+ * Use together with {@link HiveInputFormatFragmenter}/{@link HiveColumnarSerdeResolver}
  */
 public class HiveRCFileAccessor extends HiveAccessor {
 
     /**
      * Constructs a HiveRCFileAccessor.
+     *
+     * @param input input containing user data
+     * @throws Exception if user data was wrong
      */
     public HiveRCFileAccessor(InputData input) throws Exception {
         super(input, new RCFileInputFormat());

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/dc115ff4/pxf/pxf-hive/src/main/java/com/pivotal/pxf/plugins/hive/HiveResolver.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/main/java/com/pivotal/pxf/plugins/hive/HiveResolver.java b/pxf/pxf-hive/src/main/java/com/pivotal/pxf/plugins/hive/HiveResolver.java
index 3096e68..85a04d3 100644
--- a/pxf/pxf-hive/src/main/java/com/pivotal/pxf/plugins/hive/HiveResolver.java
+++ b/pxf/pxf-hive/src/main/java/com/pivotal/pxf/plugins/hive/HiveResolver.java
@@ -71,6 +71,8 @@ public class HiveResolver extends Plugin implements ReadResolver {
      *
      * @param input contains the Serde class name, the serde properties string
      *            and the partition keys
+     * @throws Exception if user data was wrong or serde failed to be
+     *             instantiated
      */
     public HiveResolver(InputData input) throws Exception {
         super(input);
@@ -100,7 +102,7 @@ public class HiveResolver extends Plugin implements ReadResolver {
         return record;
     }
 
-    /* parse user data string (arrived from fragmenter) */
+    /* Parses user data string (arrived from fragmenter). */
     void parseUserData(InputData input) throws Exception {
         final int EXPECTED_NUM_OF_TOKS = 5;
 
@@ -122,7 +124,7 @@ public class HiveResolver extends Plugin implements ReadResolver {
                 : input.getUserProperty("MAPKEY_DELIM");
     }
 
-    /* Get and init the deserializer for the records of this Hive data fragment */
+    /* Gets and init the deserializer for the records of this Hive data fragment. */
     void initSerde(InputData inputData) throws Exception {
         Properties serdeProperties;
 
@@ -138,7 +140,7 @@ public class HiveResolver extends Plugin implements ReadResolver {
 
     /*
      * The partition fields are initialized one time base on userData provided
-     * by the fragmenter
+     * by the fragmenter.
      */
     void initPartitionFields() {
         partitionFields = new LinkedList<>();
@@ -234,7 +236,7 @@ public class HiveResolver extends Plugin implements ReadResolver {
 
     /*
      * The partition fields are initialized one time based on userData provided
-     * by the fragmenter
+     * by the fragmenter.
      */
     int initPartitionFields(StringBuilder parts) {
         if (partitionKeys.equals(HiveDataFragmenter.HIVE_NO_PART_TBL)) {
@@ -287,8 +289,7 @@ public class HiveResolver extends Plugin implements ReadResolver {
                         parts.append(HiveDecimal.create(val).bigDecimalValue());
                         break;
                     case serdeConstants.BINARY_TYPE_NAME:
-                        Utilities.byteArrayToOctalString(val.getBytes(),
-                                parts);
+                        Utilities.byteArrayToOctalString(val.getBytes(), parts);
                         break;
                     default:
                         throw new UnsupportedTypeException(
@@ -301,7 +302,7 @@ public class HiveResolver extends Plugin implements ReadResolver {
 
     /**
      * Returns true if the partition value is Hive's default partition name
-     * (defined in hive.exec.default.partition.name)
+     * (defined in hive.exec.default.partition.name).
      *
      * @param partitionType partition field type
      * @param partitionValue partition value
@@ -559,7 +560,7 @@ public class HiveResolver extends Plugin implements ReadResolver {
     }
 
     /*
-     * Get the delimiter character from the URL, verify and store it. Must be a
+     * Gets the delimiter character from the URL, verify and store it. Must be a
      * single ascii character (same restriction as Hawq's). If a hex
      * representation was passed, convert it to its char.
      */

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/dc115ff4/pxf/pxf-hive/src/main/java/com/pivotal/pxf/plugins/hive/utilities/HiveUtilities.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/main/java/com/pivotal/pxf/plugins/hive/utilities/HiveUtilities.java b/pxf/pxf-hive/src/main/java/com/pivotal/pxf/plugins/hive/utilities/HiveUtilities.java
index 13de547..700a88f 100644
--- a/pxf/pxf-hive/src/main/java/com/pivotal/pxf/plugins/hive/utilities/HiveUtilities.java
+++ b/pxf/pxf-hive/src/main/java/com/pivotal/pxf/plugins/hive/utilities/HiveUtilities.java
@@ -64,25 +64,27 @@ public class HiveUtilities {
      * Checks if hive type is supported, and if so
      * return its matching HAWQ type.
      * Unsupported types will result in an exception.
-     *
-     * The supported mappings are:
-     * tinyint -> int2
-     * smallint -> int2
-     * int -> int4
-     * bigint -> int8
-     * boolean -> bool
-     * float -> float4
-     * double -> float8
-     * string -> text
-     * binary -> bytea
-     * timestamp -> timestamp
-     * date -> date
-     * decimal(precision, scale) -> numeric(precision, scale)
-     * varchar(size) -> varchar(size)
-     * char(size) -> bpchar(size)
+     * <br>
+     * The supported mappings are:<ul>
+     * <li>{@code tinyint -> int2}</li>
+     * <li>{@code smallint -> int2}</li>
+     * <li>{@code int -> int4}</li>
+     * <li>{@code bigint -> int8}</li>
+     * <li>{@code boolean -> bool}</li>
+     * <li>{@code float -> float4}</li>
+     * <li>{@code double -> float8}</li>
+     * <li>{@code string -> text}</li>
+     * <li>{@code binary -> bytea}</li>
+     * <li>{@code timestamp -> timestamp}</li>
+     * <li>{@code date -> date}</li>
+     * <li>{@code decimal(precision, scale) -> numeric(precision, scale)}</li>
+     * <li>{@code varchar(size) -> varchar(size)}</li>
+     * <li>{@code char(size) -> bpchar(size)}</li>
+     * </ul>
      *
      * @param hiveColumn hive column schema
      * @return field with mapped HAWQ type and modifiers
+     * @throws UnsupportedTypeException if the column type is not supported
      */
     public static Metadata.Field mapHiveType(FieldSchema hiveColumn) throws UnsupportedTypeException {
         String fieldName = hiveColumn.getName();
@@ -156,8 +158,11 @@ public class HiveUtilities {
 
     /**
      * Verifies modifiers are null or integers.
+     * Modifier is a value assigned to a type,
+     * e.g. size of a varchar - varchar(size).
      *
      * @param modifiers type modifiers to be verified
+     * @return whether modifiers are null or integers
      */
     private static boolean verifyModifers(String[] modifiers) {
         if (modifiers == null) {
@@ -175,7 +180,10 @@ public class HiveUtilities {
      * Extracts the db_name and table_name from the qualifiedName.
      * qualifiedName is the Hive table name that the user enters in the CREATE EXTERNAL TABLE statement
      * or when querying HCatalog table.
-     * It can be either <table_name> or <db_name.table_name>.
+     * It can be either <code>table_name</code> or <code>db_name.table_name</code>.
+     *
+     * @param qualifiedName Hive table name
+     * @return {@link com.pivotal.pxf.api.Metadata.Table} object holding the full table name
      */
     public static Metadata.Table parseTableQualifiedName(String qualifiedName) {
 

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/dc115ff4/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/BridgeOutputBuilder.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/BridgeOutputBuilder.java b/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/BridgeOutputBuilder.java
index 634890c..dd8111b 100644
--- a/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/BridgeOutputBuilder.java
+++ b/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/BridgeOutputBuilder.java
@@ -18,12 +18,13 @@ import java.util.List;
 
 import static com.pivotal.pxf.api.io.DataType.TEXT;
 
-/*
- * Class creates the output record that is piped by the java process to the GPDB backend
- * Actually, the output record is serialized and the obtained byte string is piped to the GPDB
- * segment. The output record will implement Writable, and the mission of BridgeOutputBuilder
- * will be to translate a list of OneField objects (obtained from the Resolver) into
- * an output record.
+/**
+ * Class creates the output record that is piped by the java process to the HAWQ
+ * backend. Actually, the output record is serialized and the obtained byte
+ * string is piped to the HAWQ segment. The output record will implement
+ * Writable, and the mission of BridgeOutputBuilder will be to translate a list
+ * of {@link OneField} objects (obtained from the Resolver) into an output
+ * record.
  */
 public class BridgeOutputBuilder {
     private ProtocolData inputData;
@@ -33,24 +34,29 @@ public class BridgeOutputBuilder {
     private String[] colNames;
 
     /**
-     * Constructs a BridgeOutputBuilder
+     * Constructs a BridgeOutputBuilder.
+     *
+     * @param input input data, like requested output format and schema
+     *            information
      */
     public BridgeOutputBuilder(ProtocolData input) {
         inputData = input;
         makeErrorRecord();
     }
 
-    /*
-     * We need a separate GPDBWritable record to represent the error record. Just setting
-     * the errorFlag on the "output" GPDBWritable variable is not good enough, since the GPDBWritable is built
-     * only after the first record is read from the file. And if we encounter an error while fetching
-     * the first record from the file, then the output member will be null. The reason we cannot count on
-     * the schema to build the GPDBWritable output variable before reading the first record, is
-     * because the schema does not account for arrays - we cannot know from the schema the length of
-     * an array. We find out only after fetching the first record.
+    /**
+     * We need a separate GPDBWritable record to represent the error record.
+     * Just setting the errorFlag on the "output" GPDBWritable variable is not
+     * good enough, since the GPDBWritable is built only after the first record
+     * is read from the file. And if we encounter an error while fetching the
+     * first record from the file, then the output member will be null. The
+     * reason we cannot count on the schema to build the GPDBWritable output
+     * variable before reading the first record, is because the schema does not
+     * account for arrays - we cannot know from the schema the length of an
+     * array. We find out only after fetching the first record.
      */
     void makeErrorRecord() {
-        int[] errSchema = {TEXT.getOID()};
+        int[] errSchema = { TEXT.getOID() };
 
         if (inputData.outputFormat() != OutputFormat.BINARY) {
             return;
@@ -60,8 +66,13 @@ public class BridgeOutputBuilder {
         errorRecord.setError(true);
     }
 
-    /*
-     * Returns the error record
+    /**
+     * Returns the error record. If the output format is not binary, error
+     * records are not supported, and the given exception will be thrown
+     *
+     * @param ex exception to be stored in record
+     * @return error record
+     * @throws Exception if the output format is not binary
      */
     public Writable getErrorOutput(Exception ex) throws Exception {
         if (inputData.outputFormat() == OutputFormat.BINARY) {
@@ -72,10 +83,15 @@ public class BridgeOutputBuilder {
         }
     }
 
-    /*
+    /**
      * Translates recFields (obtained from the Resolver) into an output record.
-	 */
-    public Writable makeOutput(List<OneField> recFields) throws BadRecordException {
+     *
+     * @param recFields record fields to be serialized
+     * @return Writable object with serialized row
+     * @throws BadRecordException if building the output record failed
+     */
+    public Writable makeOutput(List<OneField> recFields)
+            throws BadRecordException {
         if (output == null && inputData.outputFormat() == OutputFormat.BINARY) {
             makeGPDBWritableOutput();
         }
@@ -85,9 +101,11 @@ public class BridgeOutputBuilder {
         return output;
     }
 
-    /*
-     * Creates the GPDBWritable object. The object is created one time
-     * and is refilled from recFields for each record sent
+    /**
+     * Creates the GPDBWritable object. The object is created one time and is
+     * refilled from recFields for each record sent
+     *
+     * @return empty GPDBWritable object with set columns
      */
     GPDBWritable makeGPDBWritableOutput() {
         int num_actual_fields = inputData.getColumns();
@@ -104,8 +122,11 @@ public class BridgeOutputBuilder {
         return (GPDBWritable) output;
     }
 
-    /*
-     * Fills the output record based on the fields in recFields
+    /**
+     * Fills the output record based on the fields in recFields.
+     *
+     * @param recFields record fields
+     * @throws BadRecordException if building the output record failed
      */
     void fillOutputRecord(List<OneField> recFields) throws BadRecordException {
         if (inputData.outputFormat() == OutputFormat.BINARY) {
@@ -115,56 +136,80 @@ public class BridgeOutputBuilder {
         }
     }
 
-    /*
-     * Fills a GPDBWritable object based on recFields
-     * The input record recFields must correspond to schema.
-     * If the record has more or less fields than the schema we throw an exception.
-     * We require that the type of field[i] in recFields corresponds to the type
-     * of field[i] in the schema.
+    /**
+     * Fills a GPDBWritable object based on recFields. The input record
+     * recFields must correspond to schema. If the record has more or less
+     * fields than the schema we throw an exception. We require that the type of
+     * field[i] in recFields corresponds to the type of field[i] in the schema.
+     *
+     * @param recFields record fields
+     * @throws BadRecordException if building the output record failed
      */
     void fillGPDBWritable(List<OneField> recFields) throws BadRecordException {
         int size = recFields.size();
-        if (size == 0) { // size 0 means the resolver couldn't deserialize any of the record fields
+        if (size == 0) { // size 0 means the resolver couldn't deserialize any
+                         // of the record fields
             throw new BadRecordException("No fields in record");
         } else if (size != schema.length) {
-            throw new BadRecordException("Record has " + size + " fields but the schema size is " + schema.length);
+            throw new BadRecordException("Record has " + size
+                    + " fields but the schema size is " + schema.length);
         }
 
         for (int i = 0; i < size; i++) {
             OneField current = recFields.get(i);
             if (!isTypeInSchema(current.type, schema[i])) {
-                throw new BadRecordException("For field " + colNames[i] + " schema requires type " + DataType.get(schema[i]).toString() +
-                        " but input record has type " + DataType.get(current.type).toString());
+                throw new BadRecordException("For field " + colNames[i]
+                        + " schema requires type "
+                        + DataType.get(schema[i]).toString()
+                        + " but input record has type "
+                        + DataType.get(current.type).toString());
             }
 
             fillOneGPDBWritableField(current, i);
         }
     }
 
-    /* Tests if data type is a string type */
+    /**
+     * Tests if data type is a string type. String type is a type that can be
+     * serialized as string, such as varchar, bpchar, text, numeric, timestamp,
+     * date.
+     *
+     * @param type data type
+     * @return whether data type is string type
+     */
     boolean isStringType(DataType type) {
-        return Arrays.asList(DataType.VARCHAR, DataType.BPCHAR, DataType.TEXT, DataType.NUMERIC, DataType.TIMESTAMP, DataType.DATE)
-                .contains(type);
+        return Arrays.asList(DataType.VARCHAR, DataType.BPCHAR, DataType.TEXT,
+                DataType.NUMERIC, DataType.TIMESTAMP, DataType.DATE).contains(
+                type);
     }
 
-    /* Tests if record field type and schema type correspond */
+    /**
+     * Tests if record field type and schema type correspond.
+     *
+     * @param recType record type code
+     * @param schemaType schema type code
+     * @return whether record type and schema type match
+     */
     boolean isTypeInSchema(int recType, int schemaType) {
         DataType dtRec = DataType.get(recType);
         DataType dtSchema = DataType.get(schemaType);
 
-        return (dtSchema == DataType.UNSUPPORTED_TYPE || dtRec == dtSchema ||
-                (isStringType(dtRec) && isStringType(dtSchema)));
+        return (dtSchema == DataType.UNSUPPORTED_TYPE || dtRec == dtSchema || (isStringType(dtRec) && isStringType(dtSchema)));
     }
 
-    /*
-     * Fills a Text object based on recFields
+    /**
+     * Fills a Text object based on recFields.
+     *
+     * @param recFields record fields
+     * @throws BadRecordException if text formatted record has more than one field
      */
     void fillText(List<OneField> recFields) throws BadRecordException {
         /*
          * For the TEXT case there must be only one record in the list
-		 */
+         */
         if (recFields.size() != 1) {
-            throw new BadRecordException("BridgeOutputBuilder must receive one field when handling the TEXT format");
+            throw new BadRecordException(
+                    "BridgeOutputBuilder must receive one field when handling the TEXT format");
         }
 
         OneField fld = recFields.get(0);
@@ -178,32 +223,37 @@ public class BridgeOutputBuilder {
         }
     }
 
-    /*
-     * Fills one GPDBWritable field
+    /**
+     * Fills one GPDBWritable field.
+     *
+     * @param oneField field
+     * @param colIdx column index
+     * @throws BadRecordException if field type is not supported or doesn't match the schema
      */
-    void fillOneGPDBWritableField(OneField oneField, int i) throws BadRecordException {
+    void fillOneGPDBWritableField(OneField oneField, int colIdx)
+            throws BadRecordException {
         int type = oneField.type;
         Object val = oneField.val;
         GPDBWritable GPDBoutput = (GPDBWritable) output;
         try {
             switch (DataType.get(type)) {
                 case INTEGER:
-                    GPDBoutput.setInt(i, (Integer) val);
+                    GPDBoutput.setInt(colIdx, (Integer) val);
                     break;
                 case FLOAT8:
-                    GPDBoutput.setDouble(i, (Double) val);
+                    GPDBoutput.setDouble(colIdx, (Double) val);
                     break;
                 case REAL:
-                    GPDBoutput.setFloat(i, (Float) val);
+                    GPDBoutput.setFloat(colIdx, (Float) val);
                     break;
                 case BIGINT:
-                    GPDBoutput.setLong(i, (Long) val);
+                    GPDBoutput.setLong(colIdx, (Long) val);
                     break;
                 case SMALLINT:
-                    GPDBoutput.setShort(i, (Short) val);
+                    GPDBoutput.setShort(colIdx, (Short) val);
                     break;
                 case BOOLEAN:
-                    GPDBoutput.setBoolean(i, (Boolean) val);
+                    GPDBoutput.setBoolean(colIdx, (Boolean) val);
                     break;
                 case BYTEA:
                     byte[] bts = null;
@@ -214,7 +264,7 @@ public class BridgeOutputBuilder {
                             bts[j] = Array.getByte(val, j);
                         }
                     }
-                    GPDBoutput.setBytes(i, bts);
+                    GPDBoutput.setBytes(colIdx, bts);
                     break;
                 case VARCHAR:
                 case BPCHAR:
@@ -223,11 +273,13 @@ public class BridgeOutputBuilder {
                 case NUMERIC:
                 case TIMESTAMP:
                 case DATE:
-                    GPDBoutput.setString(i, ObjectUtils.toString(val, null));
+                    GPDBoutput.setString(colIdx, ObjectUtils.toString(val, null));
                     break;
                 default:
-                    String valClassName = (val != null) ? val.getClass().getSimpleName() : null;
-                    throw new UnsupportedOperationException(valClassName + " is not supported for HAWQ conversion");
+                    String valClassName = (val != null) ? val.getClass().getSimpleName()
+                            : null;
+                    throw new UnsupportedOperationException(valClassName
+                            + " is not supported for HAWQ conversion");
             }
         } catch (TypeMismatchException e) {
             throw new BadRecordException(e);

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/dc115ff4/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/FragmentsResponse.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/FragmentsResponse.java b/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/FragmentsResponse.java
index dd8d2cf..87e918b 100644
--- a/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/FragmentsResponse.java
+++ b/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/FragmentsResponse.java
@@ -26,6 +26,11 @@ public class FragmentsResponse implements StreamingOutput {
 
     private List<Fragment> fragments;
 
+    /**
+     * Constructs fragments response out of a list of fragments
+     *
+     * @param fragments fragment list
+     */
     public FragmentsResponse(List<Fragment> fragments) {
         this.fragments = fragments;
     }
@@ -34,7 +39,7 @@ public class FragmentsResponse implements StreamingOutput {
      * Serializes a fragments list in JSON,
      * To be used as the result string for HAWQ.
      * An example result is as follows:
-     * {"PXFFragments":[{"replicas":["sdw1.corp.emc.com","sdw3.corp.emc.com","sdw8.corp.emc.com"],"sourceName":"text2.csv", "index":"0", "metadata":<base64 metadata for fragment>, "userData":"<data_specific_to_third_party_fragmenter>"},{"replicas":["sdw2.corp.emc.com","sdw4.corp.emc.com","sdw5.corp.emc.com"],"sourceName":"text_data.csv","index":"0","metadata":<base64 metadata for fragment>,"userData":"<data_specific_to_third_party_fragmenter>"}]}
+     * {@code {"PXFFragments":[{"replicas":["sdw1.corp.emc.com","sdw3.corp.emc.com","sdw8.corp.emc.com"],"sourceName":"text2.csv", "index":"0", "metadata":<base64 metadata for fragment>, "userData":"<data_specific_to_third_party_fragmenter>"},{"replicas":["sdw2.corp.emc.com","sdw4.corp.emc.com","sdw5.corp.emc.com"],"sourceName":"text_data.csv","index":"0","metadata":<base64 metadata for fragment>,"userData":"<data_specific_to_third_party_fragmenter>"}]}}
      */
     @Override
     public void write(OutputStream output) throws IOException,

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/dc115ff4/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/FragmentsResponseFormatter.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/FragmentsResponseFormatter.java b/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/FragmentsResponseFormatter.java
index a09e105..5145929 100644
--- a/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/FragmentsResponseFormatter.java
+++ b/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/FragmentsResponseFormatter.java
@@ -25,9 +25,9 @@ public class FragmentsResponseFormatter {
      * @param fragments list of fragments
      * @param data data (e.g. path) related to the fragments
      * @return FragmentsResponse with given fragments
-     * @throws IOException
+     * @throws UnknownHostException if converting host names to IP fails
      */
-    public static FragmentsResponse formatResponse(List<Fragment> fragments, String data) throws IOException {
+    public static FragmentsResponse formatResponse(List<Fragment> fragments, String data) throws UnknownHostException   {
         /* print the raw fragment list to log when in debug level */
         if (LOG.isDebugEnabled()) {
             LOG.debug("Fragments before conversion to IP list:");
@@ -69,7 +69,9 @@ public class FragmentsResponseFormatter {
     }
 
     /**
-     * Converts hosts to their matching IP addresses
+     * Converts hosts to their matching IP addresses.
+     *
+     * @throws UnknownHostException if converting host name to IP fails
      */
     private static void convertHostsToIPs(List<Fragment> fragments) throws UnknownHostException {
         /* host converted to IP map. Used to limit network calls. */

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/dc115ff4/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/MetadataResponseFormatter.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/MetadataResponseFormatter.java b/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/MetadataResponseFormatter.java
index 4e1323f..1bb0cfd 100644
--- a/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/MetadataResponseFormatter.java
+++ b/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/MetadataResponseFormatter.java
@@ -18,6 +18,10 @@ public class MetadataResponseFormatter {
 
     /**
      * Converts {@link Metadata} to JSON String format.
+     *
+     * @param metadata metadata to convert
+     * @return JSON formatted response
+     * @throws IOException if converting the data to JSON fails
      */
     public static String formatResponseString(Metadata metadata) throws IOException {
         /* print the metadata before serialization */

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/dc115ff4/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/io/BufferWritable.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/io/BufferWritable.java b/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/io/BufferWritable.java
index 2be23d3..3115ca7 100644
--- a/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/io/BufferWritable.java
+++ b/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/io/BufferWritable.java
@@ -5,49 +5,53 @@ import java.io.DataOutput;
 import java.io.IOException;
 import java.lang.UnsupportedOperationException;
 
-
 /**
- * A serializable object for transporting a byte array through the Bridge framework
+ * A serializable object for transporting a byte array through the Bridge
+ * framework
  */
 public class BufferWritable implements Writable {
-	
-	byte [] buf = null;
-	
-	/**
-	 * Constructs a BufferWritable.
-	 * Copies the buffer reference and not the actual bytes. This class
-	 * is used when we intend to transport a buffer through the Bridge
-	 * framework without copying the data each time the buffer is passed
-	 * between the Bridge objects.
-	 */
-	public BufferWritable(byte [] inBuf) {
-		buf = inBuf;
-	}
+
+    byte[] buf = null;
+
+    /**
+     * Constructs a BufferWritable. Copies the buffer reference and not the
+     * actual bytes. This class is used when we intend to transport a buffer
+     * through the Bridge framework without copying the data each time the
+     * buffer is passed between the Bridge objects.
+     *
+     * @param inBuf buffer
+     */
+    public BufferWritable(byte[] inBuf) {
+        buf = inBuf;
+    }
 
     /**
      * Serializes the fields of this object to <code>out</code>.
      *
      * @param out <code>DataOutput</code> to serialize this object into.
-     * @throws IOException
+     * @throws IOException if the buffer was not set
      */
-	@Override
+    @Override
     public void write(DataOutput out) throws IOException {
-		if (buf == null)
-			throw new IOException("BufferWritable was not set");
-		out.write(buf);
+        if (buf == null)
+            throw new IOException("BufferWritable was not set");
+        out.write(buf);
     }
 
     /**
      * Deserializes the fields of this object from <code>in</code>.
-     * <p>For efficiency, implementations should attempt to re-use storage in the
-     * existing object where possible.</p>
+     * <p>
+     * For efficiency, implementations should attempt to re-use storage in the
+     * existing object where possible.
+     * </p>
      *
-     * @param in <code>DataInput</code> to deserialize this object from.
-     * @throws IOException
+     * @param in <code>DataInput</code> to deserialize this object from
+     * @throws UnsupportedOperationException this function is not supported
      */
-	@Override
+    @Override
     public void readFields(DataInput in) {
-		throw new UnsupportedOperationException("BufferWritable.readFields() is not implemented");
-	}
+        throw new UnsupportedOperationException(
+                "BufferWritable.readFields() is not implemented");
+    }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/dc115ff4/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/io/GPDBWritable.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/io/GPDBWritable.java b/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/io/GPDBWritable.java
index 9e22a85..074f97e 100644
--- a/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/io/GPDBWritable.java
+++ b/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/io/GPDBWritable.java
@@ -19,7 +19,7 @@ public class GPDBWritable implements Writable {
      * GPDBWritable is using the following serialization form:
 	 * Total Length | Version | Error Flag | # of columns | Col type |...| Col type | Null Bit array            |   Col val...
      * 4 byte		| 2 byte  |	1 byte     |   2 byte     |  1 byte  |...|  1 byte  | ceil(# of columns/8) byte |   Fixed or Var length
-     * 
+     *
      * For fixed length type, we know the length.
      * In the col val, we align pad according to the alignment requirement of the type.
      * For var length type, the alignment is always 4 byte.
@@ -150,11 +150,11 @@ public class GPDBWritable implements Writable {
     public void readFields(DataInput in) throws IOException {
         /*
          * extract pkt len.
-		 * 
-		 * GPSQL-1107: 
+		 *
+		 * GPSQL-1107:
 		 * The DataInput might already be empty (EOF), but we can't check it beforehand.
 		 * If that's the case, pktlen is updated to -1, to mark that the object is still empty.
-		 * (can be checked with isEmpty()). 
+		 * (can be checked with isEmpty()).
 		 */
         pktlen = readPktLen(in);
         if (isEmpty()) {
@@ -185,7 +185,7 @@ public class GPDBWritable implements Writable {
         colType = new int[colCnt];
         DBType[] coldbtype = new DBType[colCnt];
         for (int i = 0; i < colCnt; i++) {
-            int enumType = (int) (in.readByte());
+            int enumType = (in.readByte());
             curOffset += 1;
             if (enumType == DBType.BIGINT.ordinal()) {
                 colType[i] = BIGINT.getOID();
@@ -362,7 +362,7 @@ public class GPDBWritable implements Writable {
             } else {
                 nullBits[i] = false;
 
-				/* 
+				/*
                  * For fixed length type, we get the fixed length.
 				 * For var len binary format, the length is in the col value.
 				 * For text format, we must convert encoding first.
@@ -378,7 +378,7 @@ public class GPDBWritable implements Writable {
 				/* calculate and add the type alignment padding */
                 padLength[i] = roundUpAlignment(datlen, coldbtype.getAlignment()) - datlen;
                 datlen += padLength[i];
-				
+
 				/* for variable length type, we add a 4 byte length header */
                 if (coldbtype.isVarLength()) {
                     datlen += 4;
@@ -392,22 +392,22 @@ public class GPDBWritable implements Writable {
 		 */
         int endpadding = roundUpAlignment(datlen, 8) - datlen;
         datlen += endpadding;
-		
+
 		/* Construct the packet header */
         out.writeInt(datlen);
         out.writeShort(VERSION);
         out.writeByte(errorFlag);
         out.writeShort(numCol);
-		
+
 		/* Write col type */
         for (int i = 0; i < numCol; i++) {
             out.writeByte(enumType[i]);
         }
-		
+
 		/* Nullness */
         byte[] nullBytes = boolArrayToByteArray(nullBits);
         out.write(nullBytes);
-		
+
 		/* Column Value */
         for (int i = 0; i < numCol; i++) {
             if (!nullBits[i]) {
@@ -442,7 +442,7 @@ public class GPDBWritable implements Writable {
                         out.writeInt(colLength[i]);
                         out.write((byte[]) colValue[i]);
                         break;
-					
+
 					/* For text format, add 4byte length header. string is already '\0' terminated */
                     default: {
                         out.writeInt(colLength[i]);
@@ -513,7 +513,7 @@ public class GPDBWritable implements Writable {
      */
 
     /**
-     * Set the column value of the record
+     * Sets the column value of the record.
      *
      * @param colIdx the column index
      * @param val    the value
@@ -526,7 +526,7 @@ public class GPDBWritable implements Writable {
     }
 
     /**
-     * Set the column value of the record
+     * Sets the column value of the record.
      *
      * @param colIdx the column index
      * @param val    the value
@@ -539,7 +539,7 @@ public class GPDBWritable implements Writable {
     }
 
     /**
-     * Set the column value of the record
+     * Sets the column value of the record.
      *
      * @param colIdx the column index
      * @param val    the value
@@ -552,7 +552,7 @@ public class GPDBWritable implements Writable {
     }
 
     /**
-     * Set the column value of the record
+     * Sets the column value of the record.
      *
      * @param colIdx the column index
      * @param val    the value
@@ -569,7 +569,7 @@ public class GPDBWritable implements Writable {
     }
 
     /**
-     * Set the column value of the record
+     * Sets the column value of the record.
      *
      * @param colIdx the column index
      * @param val    the value
@@ -582,7 +582,7 @@ public class GPDBWritable implements Writable {
     }
 
     /**
-     * Set the column value of the record
+     * Sets the column value of the record.
      *
      * @param colIdx the column index
      * @param val    the value
@@ -595,7 +595,7 @@ public class GPDBWritable implements Writable {
     }
 
     /**
-     * Set the column value of the record
+     * Sets the column value of the record.
      *
      * @param colIdx the column index
      * @param val    the value
@@ -608,7 +608,7 @@ public class GPDBWritable implements Writable {
     }
 
     /**
-     * Set the column value of the record
+     * Sets the column value of the record.
      *
      * @param colIdx the column index
      * @param val    the value
@@ -621,9 +621,10 @@ public class GPDBWritable implements Writable {
     }
 
     /**
-     * Get the column value of the record
+     * Gets the column value of the record.
      *
      * @param colIdx the column index
+     * @return column value
      * @throws TypeMismatchException the column type does not match
      */
     public Long getLong(int colIdx)
@@ -633,9 +634,10 @@ public class GPDBWritable implements Writable {
     }
 
     /**
-     * Get the column value of the record
+     * Gets the column value of the record.
      *
      * @param colIdx the column index
+     * @return column value
      * @throws TypeMismatchException the column type does not match
      */
     public Boolean getBoolean(int colIdx)
@@ -645,9 +647,10 @@ public class GPDBWritable implements Writable {
     }
 
     /**
-     * Get the column value of the record
+     * Gets the column value of the record.
      *
      * @param colIdx the column index
+     * @return column value
      * @throws TypeMismatchException the column type does not match
      */
     public byte[] getBytes(int colIdx)
@@ -657,9 +660,10 @@ public class GPDBWritable implements Writable {
     }
 
     /**
-     * Get the column value of the record
+     * Gets the column value of the record.
      *
      * @param colIdx the column index
+     * @return column value
      * @throws TypeMismatchException the column type does not match
      */
     public String getString(int colIdx)
@@ -669,9 +673,10 @@ public class GPDBWritable implements Writable {
     }
 
     /**
-     * Get the column value of the record
+     * Gets the column value of the record.
      *
      * @param colIdx the column index
+     * @return column value
      * @throws TypeMismatchException the column type does not match
      */
     public Float getFloat(int colIdx)
@@ -681,9 +686,10 @@ public class GPDBWritable implements Writable {
     }
 
     /**
-     * Get the column value of the record
+     * Gets the column value of the record.
      *
      * @param colIdx the column index
+     * @return column value
      * @throws TypeMismatchException the column type does not match
      */
     public Double getDouble(int colIdx)
@@ -693,9 +699,10 @@ public class GPDBWritable implements Writable {
     }
 
     /**
-     * Get the column value of the record
+     * Gets the column value of the record.
      *
      * @param colIdx the column index
+     * @return column value
      * @throws TypeMismatchException the column type does not match
      */
     public Integer getInt(int colIdx)
@@ -705,9 +712,10 @@ public class GPDBWritable implements Writable {
     }
 
     /**
-     * Get the column value of the record
+     * Gets the column value of the record.
      *
      * @param colIdx the column index
+     * @return column value
      * @throws TypeMismatchException the column type does not match
      */
     public Short getShort(int colIdx)
@@ -717,7 +725,7 @@ public class GPDBWritable implements Writable {
     }
 
     /**
-     * Set the error field
+     * Sets the error field.
      *
      * @param errorVal the error value
      */
@@ -726,8 +734,9 @@ public class GPDBWritable implements Writable {
     }
 
     /**
-     * Return a string representation of the object
+     * Returns a string representation of the object.
      */
+    @Override
     public String toString() {
         if (colType == null) {
             return null;
@@ -798,6 +807,9 @@ public class GPDBWritable implements Writable {
      * Helper to get the type name.
      * If a given oid is not in the commonly used list, we
      * would expect a TEXT for it (for the error message).
+     *
+     * @param oid type OID
+     * @return type name
      */
     public static String getTypeName(int oid) {
         switch (DataType.get(oid)) {
@@ -852,6 +864,8 @@ public class GPDBWritable implements Writable {
      * Returns if the writable object is empty,
      * based on the pkt len as read from stream.
      * -1 means nothing was read (eof).
+     *
+     * @return whether the writable object is empty
      */
     public boolean isEmpty() {
         return pktlen == EOF;

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/dc115ff4/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/io/Text.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/io/Text.java b/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/io/Text.java
index 4a5af6c..2ce28ba 100644
--- a/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/io/Text.java
+++ b/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/io/Text.java
@@ -13,9 +13,9 @@ import java.nio.charset.*;
 import java.util.Arrays;
 
 /**
- * This class stores text using standard UTF8 encoding.
- * It provides methods to serialize, deserialize.
- * The type of length is integer and is serialized using zero-compressed format.
+ * This class stores text using standard UTF8 encoding. It provides methods to
+ * serialize, deserialize. The type of length is integer and is serialized using
+ * zero-compressed format.
  */
 public class Text implements Writable {
 
@@ -28,22 +28,22 @@ public class Text implements Writable {
     private static final int EOF = -1;
 
     private static final byte[] EMPTY_BYTES = new byte[0];
-    private static ThreadLocal<CharsetEncoder> ENCODER_FACTORY =
-            new ThreadLocal<CharsetEncoder>() {
-                protected CharsetEncoder initialValue() {
-                    return Charset.forName("UTF-8").newEncoder().
-                            onMalformedInput(CodingErrorAction.REPORT).
-                            onUnmappableCharacter(CodingErrorAction.REPORT);
-                }
-            };
-    private static ThreadLocal<CharsetDecoder> DECODER_FACTORY =
-            new ThreadLocal<CharsetDecoder>() {
-                protected CharsetDecoder initialValue() {
-                    return Charset.forName("UTF-8").newDecoder().
-                            onMalformedInput(CodingErrorAction.REPORT).
-                            onUnmappableCharacter(CodingErrorAction.REPORT);
-                }
-            };
+    private static ThreadLocal<CharsetEncoder> ENCODER_FACTORY = new ThreadLocal<CharsetEncoder>() {
+        @Override
+        protected CharsetEncoder initialValue() {
+            return Charset.forName("UTF-8").newEncoder().onMalformedInput(
+                    CodingErrorAction.REPORT).onUnmappableCharacter(
+                    CodingErrorAction.REPORT);
+        }
+    };
+    private static ThreadLocal<CharsetDecoder> DECODER_FACTORY = new ThreadLocal<CharsetDecoder>() {
+        @Override
+        protected CharsetDecoder initialValue() {
+            return Charset.forName("UTF-8").newDecoder().onMalformedInput(
+                    CodingErrorAction.REPORT).onUnmappableCharacter(
+                    CodingErrorAction.REPORT);
+        }
+    };
     private byte[] bytes;
     private int length;
 
@@ -54,6 +54,8 @@ public class Text implements Writable {
 
     /**
      * Construct from a string.
+     *
+     * @param string input string
      */
     public Text(String string) {
         set(string);
@@ -61,6 +63,8 @@ public class Text implements Writable {
 
     /**
      * Construct from another text.
+     *
+     * @param utf8 text to copy
      */
     public Text(Text utf8) {
         set(utf8);
@@ -68,6 +72,8 @@ public class Text implements Writable {
 
     /**
      * Construct from a byte array.
+     *
+     * @param utf8 input byte array
      */
     public Text(byte[] utf8) {
         set(utf8);
@@ -107,13 +113,22 @@ public class Text implements Writable {
     }
 
     /**
-     * Converts the provided byte array to a String using the
-     * UTF-8 encoding. If <code>replace</code> is true, then
-     * malformed input is replaced with the
-     * substitution character, which is U+FFFD. Otherwise the
-     * method throws a MalformedInputException.
+     * Converts the provided byte array to a String using the UTF-8 encoding. If
+     * <code>replace</code> is true, then malformed input is replaced with the
+     * substitution character, which is U+FFFD. Otherwise the method throws a
+     * MalformedInputException.
+     *
+     * @param utf8 UTF-8 encoded byte array
+     * @param start start point
+     * @param length length of array
+     * @param replace whether to replace malformed input with substitution
+     *            character
+     * @return decoded string
+     * @throws MalformedInputException if a malformed input is used
+     * @throws CharacterCodingException if the conversion failed
      */
-    public static String decode(byte[] utf8, int start, int length, boolean replace)
+    public static String decode(byte[] utf8, int start, int length,
+                                boolean replace)
             throws CharacterCodingException {
         return decode(ByteBuffer.wrap(utf8, start, length), replace);
     }
@@ -122,8 +137,7 @@ public class Text implements Writable {
             throws CharacterCodingException {
         CharsetDecoder decoder = DECODER_FACTORY.get();
         if (replace) {
-            decoder.onMalformedInput(
-                    java.nio.charset.CodingErrorAction.REPLACE);
+            decoder.onMalformedInput(java.nio.charset.CodingErrorAction.REPLACE);
             decoder.onUnmappableCharacter(CodingErrorAction.REPLACE);
         }
         String str = decoder.decode(utf8).toString();
@@ -136,28 +150,31 @@ public class Text implements Writable {
     }
 
     /**
-     * Converts the provided String to bytes using the
-     * UTF-8 encoding. If the input is malformed,
-     * invalid chars are replaced by a default value.
+     * Converts the provided String to bytes using the UTF-8 encoding. If the
+     * input is malformed, invalid chars are replaced by a default value.
      *
-     * @return ByteBuffer: bytes stores at ByteBuffer.array()
-     * and length is ByteBuffer.limit()
+     * @param string string to encode
+     * @return ByteBuffer: bytes stores at ByteBuffer.array() and length is
+     *         ByteBuffer.limit()
+     * @throws CharacterCodingException if conversion failed
      */
-
     public static ByteBuffer encode(String string)
             throws CharacterCodingException {
         return encode(string, true);
     }
 
     /**
-     * Converts the provided String to bytes using the
-     * UTF-8 encoding. If <code>replace</code> is true, then
-     * malformed input is replaced with the
-     * substitution character, which is U+FFFD. Otherwise the
-     * method throws a MalformedInputException.
+     * Converts the provided String to bytes using the UTF-8 encoding. If
+     * <code>replace</code> is true, then malformed input is replaced with the
+     * substitution character, which is U+FFFD. Otherwise the method throws a
+     * MalformedInputException.
      *
-     * @return ByteBuffer: bytes stores at ByteBuffer.array()
-     * and length is ByteBuffer.limit()
+     * @param string string to encode
+     * @param replace whether to replace malformed input with substitution character
+     * @return ByteBuffer: bytes stores at ByteBuffer.array() and length is
+     *         ByteBuffer.limit()
+     * @throws MalformedInputException if a malformed input is used
+     * @throws CharacterCodingException if the conversion failed
      */
     public static ByteBuffer encode(String string, boolean replace)
             throws CharacterCodingException {
@@ -166,8 +183,7 @@ public class Text implements Writable {
             encoder.onMalformedInput(CodingErrorAction.REPLACE);
             encoder.onUnmappableCharacter(CodingErrorAction.REPLACE);
         }
-        ByteBuffer bytes =
-                encoder.encode(CharBuffer.wrap(string.toCharArray()));
+        ByteBuffer bytes = encoder.encode(CharBuffer.wrap(string.toCharArray()));
         if (replace) {
             encoder.onMalformedInput(CodingErrorAction.REPORT);
             encoder.onUnmappableCharacter(CodingErrorAction.REPORT);
@@ -178,6 +194,8 @@ public class Text implements Writable {
     /**
      * Returns the raw bytes; however, only data up to {@link #getLength()} is
      * valid.
+     *
+     * @return raw bytes of byte array
      */
     public byte[] getBytes() {
         return bytes;
@@ -185,13 +203,17 @@ public class Text implements Writable {
 
     /**
      * Returns the number of bytes in the byte array
+     *
+     * @return number of bytes in byte array
      */
     public int getLength() {
         return length;
     }
 
     /**
-     * Set to contain the contents of a string.
+     * Sets to contain the contents of a string.
+     *
+     * @param string input string
      */
     public void set(String string) {
         try {
@@ -199,30 +221,35 @@ public class Text implements Writable {
             bytes = bb.array();
             length = bb.limit();
         } catch (CharacterCodingException e) {
-            throw new RuntimeException("Should not have happened " + e.toString());
+            throw new RuntimeException("Should not have happened "
+                    + e.toString());
         }
     }
 
     /**
-     * Set to a utf8 byte array
+     * Sets to a UTF-8 byte array.
+     *
+     * @param utf8 input UTF-8 byte array
      */
     public void set(byte[] utf8) {
         set(utf8, 0, utf8.length);
     }
 
     /**
-     * copy a text.
+     * Copies a text.
+     *
+     * @param other text object to copy.
      */
     public void set(Text other) {
         set(other.getBytes(), 0, other.getLength());
     }
 
     /**
-     * Set the Text to range of bytes
+     * Sets the Text to range of bytes.
      *
-     * @param utf8  the data to copy from
+     * @param utf8 the data to copy from
      * @param start the first position of the new string
-     * @param len   the number of bytes of the new string
+     * @param len the number of bytes of the new string
      */
     public void set(byte[] utf8, int start, int len) {
         setCapacity(len, false);
@@ -231,11 +258,11 @@ public class Text implements Writable {
     }
 
     /**
-     * Append a range of bytes to the end of the given text
+     * Appends a range of bytes to the end of the given text.
      *
-     * @param utf8  the data to copy from
+     * @param utf8 the data to copy from
      * @param start the first position to append from utf8
-     * @param len   the number of bytes to append
+     * @param len the number of bytes to append
      */
     public void append(byte[] utf8, int start, int len) {
         setCapacity(length + len, true);
@@ -244,7 +271,7 @@ public class Text implements Writable {
     }
 
     /**
-     * Clear the string to empty.
+     * Clears the string to empty.
      */
     public void clear() {
         length = 0;
@@ -252,12 +279,13 @@ public class Text implements Writable {
 
     /*
      * Sets the capacity of this Text object to <em>at least</em>
-     * <code>len</code> bytes. If the current buffer is longer,
-     * then the capacity and existing content of the buffer are
-     * unchanged. If <code>len</code> is larger
-     * than the current capacity, the Text object's capacity is
-     * increased to match.
+     * <code>len</code> bytes. If the current buffer is longer, then the
+     * capacity and existing content of the buffer are unchanged. If
+     * <code>len</code> is larger than the current capacity, the Text object's
+     * capacity is increased to match.
+     *
      * @param len the number of bytes we need
+     *
      * @param keepData should the old data be kept
      */
     private void setCapacity(int len, boolean keepData) {
@@ -275,11 +303,13 @@ public class Text implements Writable {
      *
      * @see java.lang.Object#toString()
      */
+    @Override
     public String toString() {
         try {
             return decode(bytes, 0, length);
         } catch (CharacterCodingException e) {
-            throw new RuntimeException("Should not have happened " + e.toString());
+            throw new RuntimeException("Should not have happened "
+                    + e.toString());
         }
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/dc115ff4/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/io/Writable.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/io/Writable.java b/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/io/Writable.java
index 8a550cf..3bb3d50 100644
--- a/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/io/Writable.java
+++ b/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/io/Writable.java
@@ -4,7 +4,6 @@ import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
 
-
 /**
  * A serializable object which implements a simple, efficient, serialization
  * protocol, based on {@link DataInput} and {@link DataOutput}.
@@ -15,7 +14,7 @@ public interface Writable {
      * Serialize the fields of this object to <code>out</code>.
      *
      * @param out <code>DataOutput</code> to serialize this object into.
-     * @throws IOException
+     * @throws IOException if I/O error occurs
      */
     void write(DataOutput out) throws IOException;
 
@@ -25,7 +24,7 @@ public interface Writable {
      * existing object where possible.</p>
      *
      * @param in <code>DataInput</code> to deserialize this object from.
-     * @throws IOException
+     * @throws IOException if I/O error occurs
      */
     void readFields(DataInput in) throws IOException;
 }

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/dc115ff4/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/rest/InvalidPathResource.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/rest/InvalidPathResource.java b/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/rest/InvalidPathResource.java
index 50c2936..8a32b01 100644
--- a/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/rest/InvalidPathResource.java
+++ b/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/rest/InvalidPathResource.java
@@ -25,7 +25,7 @@ class Version {
  * as it is used to verify proper load of the PXF webapp.
  *
  * For each path, the version is compared to the current version PXF_VERSION.
- * The expected format of a path is "http://<host>:<port>/pxf/<version>/<rest of path>
+ * The expected format of a path is "{@code http://<host>:<port>/pxf/<version>/<rest of path>}"
  *
  * The returned value is always a Server Error code (500).
  * If the version is different than the current version, an appropriate error is returned with version details.

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/dc115ff4/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/rest/MetadataResource.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/rest/MetadataResource.java b/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/rest/MetadataResource.java
index 368a7a5..0e5252f 100644
--- a/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/rest/MetadataResource.java
+++ b/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/rest/MetadataResource.java
@@ -24,9 +24,10 @@ import com.pivotal.pxf.service.MetadataResponseFormatter;
 /**
  * Class enhances the API of the WEBHDFS REST server.
  * Returns the metadata of a given hcatalog table.
- * Example for querying API FRAGMENTER from a web client
- * curl -i "http://localhost:51200/pxf/v13/Metadata/getTableMetadata?table=t1"
- * /pxf/ is made part of the path when there is a webapp by that name in tcServer.
+ * <br>
+ * Example for querying API FRAGMENTER from a web client:<br>
+ * <code>curl -i "http://localhost:51200/pxf/v13/Metadata/getTableMetadata?table=t1"</code><br>
+ * /pxf/ is made part of the path when there is a webapp by that name in tomcat.
  */
 @Path("/" + Version.PXF_PROTOCOL_VERSION + "/Metadata/")
 public class MetadataResource extends RestResource {
@@ -40,14 +41,21 @@ public class MetadataResource extends RestResource {
      * This function queries the HiveMetaStore to get the given table's metadata:
      * Table name, field names, field types.
      * The types are converted from HCatalog types to HAWQ types.
-     * Supported HCatalog types: 
-     * TINYINT, SMALLINT, INT, BIGINT, BOOLEAN, FLOAT, DOUBLE, 
-     * STRING, BINARY, TIMESTAMP, DATE, DECIMAL, VARCHAR, CHAR. 
+     * Supported HCatalog types:
+     * TINYINT, SMALLINT, INT, BIGINT, BOOLEAN, FLOAT, DOUBLE,
+     * STRING, BINARY, TIMESTAMP, DATE, DECIMAL, VARCHAR, CHAR.
+     * <br>
      * Unsupported types result in an error.
-     * 
-     * Response Examples:
-     * For a table default.t1 with 2 fields (a int, b float) will be returned as:
-     *      {"PXFMetadata":[{"table":{"dbName":"default","tableName":"t1"},"fields":[{"name":"a","type":"int"},{"name":"b","type":"float"}]}]}
+     * <br>
+     * Response Examples:<br>
+     * For a table <code>default.t1</code> with 2 fields (a int, b float) will be returned as:
+     *      <code>{"PXFMetadata":[{"table":{"dbName":"default","tableName":"t1"},"fields":[{"name":"a","type":"int"},{"name":"b","type":"float"}]}]}</code>
+     *
+     * @param servletContext servlet context
+     * @param headers http headers
+     * @param table HCatalog table name
+     * @return JSON formatted response with metadata for given table
+     * @throws Exception if connection to Hcatalog failed, table didn't exist or its type or fields are not supported
      */
     @GET
     @Path("getTableMetadata")
@@ -58,18 +66,18 @@ public class MetadataResource extends RestResource {
         Log.debug("getTableMetadata started");
         String jsonOutput;
         try {
-        	// 1. start MetadataFetcher 
-        	MetadataFetcher metadataFetcher = 
+        	// 1. start MetadataFetcher
+        	MetadataFetcher metadataFetcher =
         	        MetadataFetcherFactory.create("com.pivotal.pxf.plugins.hive.HiveMetadataFetcher"); //TODO: nhorn - 09-03-15 - pass as param
-        	
+
         	// 2. get Metadata
         	Metadata metadata = metadataFetcher.getTableMetadata(table);
-        	
+
         	// 3. serialize to JSON
         	jsonOutput = MetadataResponseFormatter.formatResponseString(metadata);
-        	
+
             Log.debug("getTableMetadata output: " + jsonOutput);
-        
+
         } catch (ClientAbortException e) {
             Log.error("Remote connection closed by HAWQ", e);
             throw e;

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/dc115ff4/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/rest/RestResource.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/rest/RestResource.java b/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/rest/RestResource.java
index fb62927..a77c967 100644
--- a/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/rest/RestResource.java
+++ b/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/rest/RestResource.java
@@ -21,10 +21,11 @@ public abstract class RestResource {
     /**
      * Converts the request headers multivalued map to a case-insensitive regular map
      * by taking only first values and storing them in a CASE_INSENSITIVE_ORDER TreeMap.
+     * All values are converted from ISO_8859_1 (ISO-LATIN-1) to UTF_8.
      *
      * @param requestHeaders request headers multi map.
      * @return a regular case-insensitive map.
-     * @throws UnsupportedEncodingException
+     * @throws UnsupportedEncodingException if the named charsets ISO_8859_1 and UTF_8 are not supported
      */
     public Map<String, String> convertToCaseInsensitiveMap(MultivaluedMap<String, String> requestHeaders)
             throws UnsupportedEncodingException {

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/dc115ff4/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/utilities/CustomWebappLoader.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/utilities/CustomWebappLoader.java b/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/utilities/CustomWebappLoader.java
index 3c62cfc..cc78026 100644
--- a/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/utilities/CustomWebappLoader.java
+++ b/pxf/pxf-service/src/main/java/com/pivotal/pxf/service/utilities/CustomWebappLoader.java
@@ -18,22 +18,22 @@ import org.apache.juli.logging.LogFactory;
 /**
  * A WebappLoader that allows a customized classpath to be added through configuration in context xml.
  * Any additional classpath entry will be added to the default webapp classpath.
- * <p/>
+ *
  * <pre>
- * &lt;Context>
+ * &lt;Context&gt;
  *   &lt;Loader className="com.pivotal.pxf.service.utilities.CustomWebappLoader"
- *              classpathFiles="/somedir/classpathFile1;/somedir/classpathFile2"/>
- * &lt;/Context>
+ *              classpathFiles="/somedir/classpathFile1;/somedir/classpathFile2"/&gt;
+ * &lt;/Context&gt;
  * </pre>
  */
 public class CustomWebappLoader extends WebappLoader {
-	
+
 	/**
-	 * Because this class belongs in tcServer itself, logs go into tcServer's log facility that is separate 
+	 * Because this class belongs in tcServer itself, logs go into tcServer's log facility that is separate
 	 * from the web app's log facility.
-	 * 
+	 *
 	 * Logs are directed to catalina.log file. By default only INFO or higher messages are logged.
-	 * To change log level, add the following line to {catalina.base}/conf/logging.properties  
+	 * To change log level, add the following line to {catalina.base}/conf/logging.properties
 	 * <code>com.pivotal.pxf.level = FINE/INFO/WARNING</code> (FINE = debug).
 	 */
 	private static final Log LOG = LogFactory.getLog(CustomWebappLoader.class);
@@ -46,9 +46,9 @@ public class CustomWebappLoader extends WebappLoader {
 	 * anotherdir/somejar.jar
 	 * anotherone/hadoop*.jar
 	 * anotherone/pxf*[0-9].jar
-	 * Unix wildcard convention can be used to match a number of files 
+	 * Unix wildcard convention can be used to match a number of files
 	 * (e.g. <code>*</code>, <code>[0-9]</code>, <code>?</code>), but not a number of directories.
-	 * 
+	 *
 	 * The files specified under classpathFiles must exist - if they can't be read an exception will be thrown.
 	 */
 	private String classpathFiles;
@@ -101,35 +101,35 @@ public class CustomWebappLoader extends WebappLoader {
 
 		addRepositories(classpathFiles, true);
 		addRepositories(secondaryClasspathFiles, false);
-		
+
 		super.startInternal();
 	}
 
 	private void addRepositories(String classpathFiles, boolean throwException) throws LifecycleException {
-		
+
 		for (String classpathFile : classpathFiles.split(";")) {
-			
+
 			String classpath = readClasspathFile(classpathFile, throwException);
 			if (classpath == null) {
-				continue;	
+				continue;
 			}
-			
+
 			ArrayList<String> classpathEntries = trimEntries(classpath.split("[\\r\\n]+"));
 			LOG.info("Classpath file " + classpathFile + " has " + classpathEntries.size() + " entries");
-			
+
 			for (String entry : classpathEntries) {
 				LOG.debug("Trying to load entry " + entry);
 				int repositoriesCount = 0;
 				Path pathEntry = Paths.get(entry);
 				/*
-				 * For each entry, we look at the parent directory and try to match each of the files 
+				 * For each entry, we look at the parent directory and try to match each of the files
 				 * inside it to the file name or pattern in the file name (the last part of the path).
 				 * E.g., for path '/some/path/with/pattern*', the parent directory will be '/some/path/with/'
-				 * and the file name will be 'pattern*'. Each file under that directory matching 
-				 * this pattern will be added to the class loader repository. 
+				 * and the file name will be 'pattern*'. Each file under that directory matching
+				 * this pattern will be added to the class loader repository.
 				 */
 				try (DirectoryStream<Path> repositories = Files.newDirectoryStream(pathEntry.getParent(),
-						pathEntry.getFileName().toString())) { 
+						pathEntry.getFileName().toString())) {
 					for (Path repository : repositories) {
 						if (addPathToRepository(repository, entry)) {
 							repositoriesCount++;
@@ -137,7 +137,7 @@ public class CustomWebappLoader extends WebappLoader {
 					}
 				} catch (IOException e) {
 					LOG.warn("Failed to load entry " + entry + ": " + e);
-				} 
+				}
 				if (repositoriesCount == 0) {
 					LOG.warn("Entry " + entry + " doesn't match any files");
 				}
@@ -166,17 +166,17 @@ public class CustomWebappLoader extends WebappLoader {
 	 * @return valid entries
 	 */
 	private ArrayList<String> trimEntries(String[] classpathEntries) {
-		
+
 		ArrayList<String> trimmed = new ArrayList<String>();
 		int line = 0;
 		for (String entry : classpathEntries) {
-			
+
 			line++;
 			if (entry == null) {
 				LOG.debug("Skipping entry #" + line + " (null)");
 				continue;
 			}
-			
+
 			entry = entry.trim();
 			if (entry.isEmpty() || entry.startsWith("#")) {
 				LOG.debug("Skipping entry #" + line + " (" + entry + ")");
@@ -186,9 +186,9 @@ public class CustomWebappLoader extends WebappLoader {
 		}
 		return trimmed;
 	}
-	
+
 	private boolean addPathToRepository(Path path, String entry) {
-		
+
 		try {
 			URI pathUri = path.toUri();
 			String pathUriStr = pathUri.toString();
@@ -205,7 +205,7 @@ public class CustomWebappLoader extends WebappLoader {
 
 		return false;
 	}
-	
+
 }