You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hawq.apache.org by nh...@apache.org on 2016/01/11 22:58:55 UTC

[1/2] incubator-hawq git commit: HAWQ-165. Change PXF logger to be private static final

Repository: incubator-hawq
Updated Branches:
  refs/heads/master e48a07b0d -> 127cac3e2


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/io/Text.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/io/Text.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/io/Text.java
index 82804ab..253b525 100644
--- a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/io/Text.java
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/io/Text.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.service.io;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -19,7 +19,6 @@ package org.apache.hawq.pxf.service.io;
  * under the License.
  */
 
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
@@ -190,7 +189,8 @@ public class Text implements Writable {
      * MalformedInputException.
      *
      * @param string string to encode
-     * @param replace whether to replace malformed input with substitution character
+     * @param replace whether to replace malformed input with substitution
+     *            character
      * @return ByteBuffer: bytes stores at ByteBuffer.array() and length is
      *         ByteBuffer.limit()
      * @throws MalformedInputException if a malformed input is used

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/BridgeResource.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/BridgeResource.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/BridgeResource.java
index 56f10b8..3a062c3 100644
--- a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/BridgeResource.java
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/BridgeResource.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.service.rest;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -19,7 +19,6 @@ package org.apache.hawq.pxf.service.rest;
  * under the License.
  */
 
-
 import java.io.DataOutputStream;
 import java.io.IOException;
 import java.io.OutputStream;
@@ -55,7 +54,7 @@ import org.apache.hawq.pxf.service.utilities.SecuredHDFS;
 @Path("/" + Version.PXF_PROTOCOL_VERSION + "/Bridge/")
 public class BridgeResource extends RestResource {
 
-    private static Log Log = LogFactory.getLog(BridgeResource.class);
+    private static final Log LOG = LogFactory.getLog(BridgeResource.class);
     /**
      * Lock is needed here in the case of a non-thread-safe plugin. Using
      * synchronized methods is not enough because the bridge work is called by
@@ -91,7 +90,7 @@ public class BridgeResource extends RestResource {
         // Convert headers into a regular map
         Map<String, String> params = convertToCaseInsensitiveMap(headers.getRequestHeaders());
 
-        Log.debug("started with parameters: " + params);
+        LOG.debug("started with parameters: " + params);
 
         ProtocolData protData = new ProtocolData(params);
         SecuredHDFS.verifyToken(protData, servletContext);
@@ -105,7 +104,7 @@ public class BridgeResource extends RestResource {
         String dataDir = protData.getDataSource();
         // THREAD-SAFE parameter has precedence
         boolean isThreadSafe = protData.isThreadSafe() && bridge.isThreadSafe();
-        Log.debug("Request for " + dataDir + " will be handled "
+        LOG.debug("Request for " + dataDir + " will be handled "
                 + (isThreadSafe ? "without" : "with") + " synchronization");
 
         return readResponse(bridge, protData, isThreadSafe);
@@ -136,24 +135,24 @@ public class BridgeResource extends RestResource {
 
                     Writable record;
                     DataOutputStream dos = new DataOutputStream(out);
-                    Log.debug("Starting streaming fragment " + fragment
+                    LOG.debug("Starting streaming fragment " + fragment
                             + " of resource " + dataDir);
                     while ((record = bridge.getNext()) != null) {
                         record.write(dos);
                         ++recordCount;
                     }
-                    Log.debug("Finished streaming fragment " + fragment
+                    LOG.debug("Finished streaming fragment " + fragment
                             + " of resource " + dataDir + ", " + recordCount
                             + " records.");
                 } catch (ClientAbortException e) {
                     // Occurs whenever client (HAWQ) decides the end the
                     // connection
-                    Log.error("Remote connection closed by HAWQ", e);
+                    LOG.error("Remote connection closed by HAWQ", e);
                 } catch (Exception e) {
-                    Log.error("Exception thrown when streaming", e);
+                    LOG.error("Exception thrown when streaming", e);
                     throw new IOException(e.getMessage());
                 } finally {
-                    Log.debug("Stopped streaming fragment " + fragment
+                    LOG.debug("Stopped streaming fragment " + fragment
                             + " of resource " + dataDir + ", " + recordCount
                             + " records.");
                     if (!threadSafe) {
@@ -172,9 +171,9 @@ public class BridgeResource extends RestResource {
      * @param path path for the request, used for logging.
      */
     private void lock(String path) {
-        Log.trace("Locking BridgeResource for " + path);
+        LOG.trace("Locking BridgeResource for " + path);
         BRIDGE_LOCK.lock();
-        Log.trace("Locked BridgeResource for " + path);
+        LOG.trace("Locked BridgeResource for " + path);
     }
 
     /**
@@ -183,8 +182,8 @@ public class BridgeResource extends RestResource {
      * @param path path for the request, used for logging.
      */
     private void unlock(String path) {
-        Log.trace("Unlocking BridgeResource for " + path);
+        LOG.trace("Unlocking BridgeResource for " + path);
         BRIDGE_LOCK.unlock();
-        Log.trace("Unlocked BridgeResource for " + path);
+        LOG.trace("Unlocked BridgeResource for " + path);
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/ClusterNodesResource.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/ClusterNodesResource.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/ClusterNodesResource.java
index f05d200..d877590 100644
--- a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/ClusterNodesResource.java
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/ClusterNodesResource.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.service.rest;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -19,7 +19,6 @@ package org.apache.hawq.pxf.service.rest;
  * under the License.
  */
 
-
 import org.apache.catalina.connector.ClientAbortException;
 import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
@@ -35,75 +34,84 @@ import javax.ws.rs.Path;
 import javax.ws.rs.Produces;
 import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.Response;
-import java.io.IOException;
 
-/*
+/**
  * Class enhances the API of the HBASE rest server.
  * Example for querying API getClusterNodesInfo from a web client
- * curl "http://localhost:50070/pxf/v2/HadoopCluster/getNodesInfo"
+ * <code>curl "http://localhost:51200/pxf/v2/HadoopCluster/getNodesInfo"</code>
  * /pxf/ is made part of the path when there is a webapp by that name in tcServer.
  */
 @Path("/" + Version.PXF_PROTOCOL_VERSION + "/HadoopCluster/")
 public class ClusterNodesResource {
-    private Log Log;
+    private static final Log LOG = LogFactory.getLog(ClusterNodesResource.class);
 
-    public ClusterNodesResource() throws IOException {
-        Log = LogFactory.getLog(ClusterNodesResource.class);
+    public ClusterNodesResource() {
     }
 
-
-    /*
-     * Function queries the Hadoop namenode with the getDataNodeStats API
-     * It gets the host's IP and REST port of every HDFS data node in the
-     * cluster. Then, it packs the results in JSON format and writes to the
-     * HTTP response stream.
-     * Response Examples:
-     * a. When there are no datanodes - getDataNodeStats returns an empty array
-     *    {"regions":[]}
-     * b. When there are datanodes
-     *    {"regions":[{"host":"1.2.3.1","port":50075},{"host":"1.2.3.2","port":50075}]}
+    /**
+     * Function queries the Hadoop namenode with the getDataNodeStats API It
+     * gets the host's IP and REST port of every HDFS data node in the cluster.
+     * Then, it packs the results in JSON format and writes to the HTTP response
+     * stream. Response Examples:<br>
+     * <ol>
+     * <li>When there are no datanodes - getDataNodeStats returns an empty array
+     * <code>{"regions":[]}</code></li>
+     * <li>When there are datanodes
+     * <code>{"regions":[{"host":"1.2.3.1","port":50075},{"host":"1.2.3.2","port"
+     * :50075}]}</code></li>
+     * </ol>
+     *
+     * @return JSON response with nodes info
+     * @throws Exception if failed to retrieve info
      */
     @GET
     @Path("getNodesInfo")
     @Produces("application/json")
     public Response read() throws Exception {
-        Log.debug("getNodesInfo started");
+        LOG.debug("getNodesInfo started");
         StringBuilder jsonOutput = new StringBuilder("{\"regions\":[");
         try {
-            /* 1. Initialize the HADOOP client side API for a distributed file system */
+            /*
+             * 1. Initialize the HADOOP client side API for a distributed file
+             * system
+             */
             Configuration conf = new Configuration();
             FileSystem fs = FileSystem.get(conf);
             DistributedFileSystem dfs = (DistributedFileSystem) fs;
 
-			/* 2. Query the namenode for the datanodes info.  
-			 *    Only live nodes are returned  - in accordance with the results returned by 
-			 *    org.apache.hadoop.hdfs.tools.DFSAdmin#report().
-			 */
+            /*
+             * 2. Query the namenode for the datanodes info. Only live nodes are
+             * returned - in accordance with the results returned by
+             * org.apache.hadoop.hdfs.tools.DFSAdmin#report().
+             */
             DatanodeInfo[] liveNodes = dfs.getDataNodeStats(DatanodeReportType.LIVE);
 
-			/* 3. Pack the datanodes info in a JSON text format and write it 
-             *    to the HTTP output stream.
-			 */
+            /*
+             * 3. Pack the datanodes info in a JSON text format and write it to
+             * the HTTP output stream.
+             */
             String prefix = "";
             for (DatanodeInfo node : liveNodes) {
                 verifyNode(node);
-                jsonOutput.append(prefix).append(writeNode(node)); // write one node to the HTTP stream
+                // write one node to the HTTP stream
+                jsonOutput.append(prefix).append(writeNode(node));
                 prefix = ",";
             }
             jsonOutput.append("]}");
-            Log.debug("getNodesCluster output: " + jsonOutput);
+            LOG.debug("getNodesCluster output: " + jsonOutput);
         } catch (NodeDataException e) {
-            Log.error("Nodes verification failed", e);
+            LOG.error("Nodes verification failed", e);
             throw e;
         } catch (ClientAbortException e) {
-            Log.error("Remote connection closed by HAWQ", e);
+            LOG.error("Remote connection closed by HAWQ", e);
             throw e;
         } catch (java.io.IOException e) {
-            Log.error("Unhandled exception thrown", e);
+            LOG.error("Unhandled exception thrown", e);
             throw e;
         }
 
-        return Response.ok(jsonOutput.toString(), MediaType.APPLICATION_JSON_TYPE).build();
+        return Response.ok(jsonOutput.toString(),
+                MediaType.APPLICATION_JSON_TYPE).build();
     }
 
     private class NodeDataException extends java.io.IOException {
@@ -123,15 +131,18 @@ public class ClusterNodesResource {
         String ip = node.getIpAddr();
 
         if (StringUtils.isEmpty(ip)) {
-            throw new NodeDataException("Invalid IP: " + ip + " (Node " + node + ")");
+            throw new NodeDataException("Invalid IP: " + ip + " (Node " + node
+                    + ")");
         }
 
         if (port <= 0) {
-            throw new NodeDataException("Invalid port: " + port + " (Node " + node + ")");
+            throw new NodeDataException("Invalid port: " + port + " (Node "
+                    + node + ")");
         }
     }
 
     String writeNode(DatanodeInfo node) throws java.io.IOException {
-        return "{\"host\":\"" + node.getIpAddr() + "\",\"port\":" + node.getInfoPort() + "}";
+        return "{\"host\":\"" + node.getIpAddr() + "\",\"port\":"
+                + node.getInfoPort() + "}";
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/FragmenterResource.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/FragmenterResource.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/FragmenterResource.java
index 567c71d..aeddb04 100644
--- a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/FragmenterResource.java
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/FragmenterResource.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.service.rest;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -19,7 +19,6 @@ package org.apache.hawq.pxf.service.rest;
  * under the License.
  */
 
-
 import org.apache.hawq.pxf.api.Fragment;
 import org.apache.hawq.pxf.api.Fragmenter;
 import org.apache.hawq.pxf.api.FragmentsStats;
@@ -56,7 +55,7 @@ import java.util.Map;
  */
 @Path("/" + Version.PXF_PROTOCOL_VERSION + "/Fragmenter/")
 public class FragmenterResource extends RestResource {
-    private static Log Log = LogFactory.getLog(FragmenterResource.class);
+    private static final Log LOG = LogFactory.getLog(FragmenterResource.class);
 
     /**
      * The function is called when
@@ -119,8 +118,8 @@ public class FragmenterResource extends RestResource {
 
         FragmentsStats fragmentsStats = fragmenter.getFragmentsStats();
         String response = FragmentsStats.dataToJSON(fragmentsStats);
-        if (Log.isDebugEnabled()) {
-            Log.debug(FragmentsStats.dataToString(fragmentsStats, path));
+        if (LOG.isDebugEnabled()) {
+            LOG.debug(FragmentsStats.dataToString(fragmentsStats, path));
         }
 
         return Response.ok(response, MediaType.APPLICATION_JSON_TYPE).build();
@@ -130,14 +129,14 @@ public class FragmenterResource extends RestResource {
                                          final HttpHeaders headers,
                                          final String path) throws Exception {
 
-        if (Log.isDebugEnabled()) {
+        if (LOG.isDebugEnabled()) {
             StringBuilder startMsg = new StringBuilder(
                     "FRAGMENTER started for path \"" + path + "\"");
             for (String header : headers.getRequestHeaders().keySet()) {
                 startMsg.append(" Header: ").append(header).append(" Value: ").append(
                         headers.getRequestHeader(header));
             }
-            Log.debug(startMsg);
+            LOG.debug(startMsg);
         }
 
         /* Convert headers into a case-insensitive regular map */

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/InvalidPathResource.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/InvalidPathResource.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/InvalidPathResource.java
index d3da48e..8e987f3 100644
--- a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/InvalidPathResource.java
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/InvalidPathResource.java
@@ -56,7 +56,7 @@ public class InvalidPathResource {
     @Context
     UriInfo rootUri;
 
-    private static final Log Log = LogFactory.getLog(InvalidPathResource.class);
+    private static final Log LOG = LogFactory.getLog(InvalidPathResource.class);
     // Set of retired endpoints
     private final ImmutableSet<String> retiredEndPoints = ImmutableSet.of(
             "Analyzer");
@@ -127,7 +127,7 @@ public class InvalidPathResource {
         String version = pathSegments.get(0).getPath();
         String endPoint = (pathSegments.size() > 1) ? pathSegments.get(1).getPath() : null;
 
-        Log.debug("REST request: " + rootUri.getAbsolutePath() + ". " +
+        LOG.debug("REST request: " + rootUri.getAbsolutePath() + ". " +
                 "Version " + version + ", supported version is " + Version.PXF_PROTOCOL_VERSION);
 
         if(version.equals(Version.PXF_PROTOCOL_VERSION)) { // api with correct version but incorrect path

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/MetadataResource.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/MetadataResource.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/MetadataResource.java
index d6595eb..4c0f2bf 100644
--- a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/MetadataResource.java
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/MetadataResource.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.service.rest;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -19,7 +19,6 @@ package org.apache.hawq.pxf.service.rest;
  * under the License.
  */
 
-
 import java.io.IOException;
 
 import javax.servlet.ServletContext;
@@ -42,67 +41,72 @@ import org.apache.hawq.pxf.service.MetadataFetcherFactory;
 import org.apache.hawq.pxf.service.MetadataResponseFormatter;
 
 /**
- * Class enhances the API of the WEBHDFS REST server.
- * Returns the metadata of a given hcatalog table.
- * <br>
+ * Class enhances the API of the WEBHDFS REST server. Returns the metadata of a
+ * given hcatalog table. <br>
  * Example for querying API FRAGMENTER from a web client:<br>
- * <code>curl -i "http://localhost:51200/pxf/v13/Metadata/getTableMetadata?table=t1"</code><br>
+ * <code>curl -i "http://localhost:51200/pxf/v13/Metadata/getTableMetadata?table=t1"</code>
+ * <br>
  * /pxf/ is made part of the path when there is a webapp by that name in tomcat.
  */
 @Path("/" + Version.PXF_PROTOCOL_VERSION + "/Metadata/")
 public class MetadataResource extends RestResource {
-    private Log Log;
+    private static final Log LOG = LogFactory.getLog(MetadataResource.class);
 
     public MetadataResource() throws IOException {
-        Log = LogFactory.getLog(MetadataResource.class);
     }
 
     /**
-     * This function queries the HiveMetaStore to get the given table's metadata:
-     * Table name, field names, field types.
-     * The types are converted from HCatalog types to HAWQ types.
-     * Supported HCatalog types:
-     * TINYINT, SMALLINT, INT, BIGINT, BOOLEAN, FLOAT, DOUBLE,
-     * STRING, BINARY, TIMESTAMP, DATE, DECIMAL, VARCHAR, CHAR.
-     * <br>
-     * Unsupported types result in an error.
-     * <br>
+     * This function queries the HiveMetaStore to get the given table's
+     * metadata: Table name, field names, field types. The types are converted
+     * from HCatalog types to HAWQ types. Supported HCatalog types: TINYINT,
+     * SMALLINT, INT, BIGINT, BOOLEAN, FLOAT, DOUBLE, STRING, BINARY, TIMESTAMP,
+     * DATE, DECIMAL, VARCHAR, CHAR. <br>
+     * Unsupported types result in an error. <br>
      * Response Examples:<br>
-     * For a table <code>default.t1</code> with 2 fields (a int, b float) will be returned as:
-     *      <code>{"PXFMetadata":[{"table":{"dbName":"default","tableName":"t1"},"fields":[{"name":"a","type":"int"},{"name":"b","type":"float"}]}]}</code>
+     * For a table <code>default.t1</code> with 2 fields (a int, b float) will
+     * be returned as:
+     * <code>{"PXFMetadata":[{"table":{"dbName":"default","tableName":"t1"},"fields":[{"name":"a","type":"int"},{"name":"b","type":"float"}]}]}</code>
      *
      * @param servletContext servlet context
      * @param headers http headers
      * @param table HCatalog table name
      * @return JSON formatted response with metadata for given table
-     * @throws Exception if connection to Hcatalog failed, table didn't exist or its type or fields are not supported
+     * @throws Exception if connection to Hcatalog failed, table didn't exist or
+     *             its type or fields are not supported
      */
     @GET
     @Path("getTableMetadata")
     @Produces("application/json")
     public Response read(@Context final ServletContext servletContext,
-            			 @Context final HttpHeaders headers,
-            			 @QueryParam("table") final String table) throws Exception {
-        Log.debug("getTableMetadata started");
+                         @Context final HttpHeaders headers,
+                         @QueryParam("table") final String table)
+            throws Exception {
+        LOG.debug("getTableMetadata started");
         String jsonOutput;
         try {
-        	// 1. start MetadataFetcher
-        	MetadataFetcher metadataFetcher =
-        	        MetadataFetcherFactory.create("org.apache.hawq.pxf.plugins.hive.HiveMetadataFetcher"); //TODO: nhorn - 09-03-15 - pass as param
+            // 1. start MetadataFetcher
+            MetadataFetcher metadataFetcher = MetadataFetcherFactory.create("org.apache.hawq.pxf.plugins.hive.HiveMetadataFetcher"); // TODO:
+                                                                                                                                     // nhorn
+                                                                                                                                     // -
+                                                                                                                                     // 09-03-15
+                                                                                                                                     // -
+                                                                                                                                     // pass
+                                                                                                                                     // as
+                                                                                                                                     // param
 
-        	// 2. get Metadata
-        	Metadata metadata = metadataFetcher.getTableMetadata(table);
+            // 2. get Metadata
+            Metadata metadata = metadataFetcher.getTableMetadata(table);
 
-        	// 3. serialize to JSON
-        	jsonOutput = MetadataResponseFormatter.formatResponseString(metadata);
+            // 3. serialize to JSON
+            jsonOutput = MetadataResponseFormatter.formatResponseString(metadata);
 
-            Log.debug("getTableMetadata output: " + jsonOutput);
+            LOG.debug("getTableMetadata output: " + jsonOutput);
 
         } catch (ClientAbortException e) {
-            Log.error("Remote connection closed by HAWQ", e);
+            LOG.error("Remote connection closed by HAWQ", e);
             throw e;
         } catch (java.io.IOException e) {
-            Log.error("Unhandled exception thrown", e);
+            LOG.error("Unhandled exception thrown", e);
             throw e;
         }
 

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/RestResource.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/RestResource.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/RestResource.java
index c609abb..60bb31e 100644
--- a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/RestResource.java
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/RestResource.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.service.rest;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -19,7 +19,6 @@ package org.apache.hawq.pxf.service.rest;
  * under the License.
  */
 
-
 import javax.ws.rs.core.MultivaluedMap;
 
 import org.apache.commons.codec.CharEncoding;
@@ -36,20 +35,23 @@ import java.util.TreeMap;
  */
 public abstract class RestResource {
 
-    private static Log Log = LogFactory.getLog(RestResource.class);
+    private static final Log LOG = LogFactory.getLog(RestResource.class);
 
     /**
-     * Converts the request headers multivalued map to a case-insensitive regular map
-     * by taking only first values and storing them in a CASE_INSENSITIVE_ORDER TreeMap.
-     * All values are converted from ISO_8859_1 (ISO-LATIN-1) to UTF_8.
+     * Converts the request headers multivalued map to a case-insensitive
+     * regular map by taking only first values and storing them in a
+     * CASE_INSENSITIVE_ORDER TreeMap. All values are converted from ISO_8859_1
+     * (ISO-LATIN-1) to UTF_8.
      *
      * @param requestHeaders request headers multi map.
      * @return a regular case-insensitive map.
-     * @throws UnsupportedEncodingException if the named charsets ISO_8859_1 and UTF_8 are not supported
+     * @throws UnsupportedEncodingException if the named charsets ISO_8859_1 and
+     *             UTF_8 are not supported
      */
     public Map<String, String> convertToCaseInsensitiveMap(MultivaluedMap<String, String> requestHeaders)
             throws UnsupportedEncodingException {
-        Map<String, String> result = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
+        Map<String, String> result = new TreeMap<>(
+                String.CASE_INSENSITIVE_ORDER);
         for (Map.Entry<String, List<String>> entry : requestHeaders.entrySet()) {
             String key = entry.getKey();
             List<String> values = entry.getValue();
@@ -57,8 +59,9 @@ public abstract class RestResource {
                 String value = values.get(0);
                 if (value != null) {
                     // converting to value UTF-8 encoding
-                    value = new String(value.getBytes(CharEncoding.ISO_8859_1), CharEncoding.UTF_8);
-                    Log.trace("key: " + key + ". value: " + value);
+                    value = new String(value.getBytes(CharEncoding.ISO_8859_1),
+                            CharEncoding.UTF_8);
+                    LOG.trace("key: " + key + ". value: " + value);
                     result.put(key, value.replace("\\\"", "\""));
                 }
             }

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/ServletLifecycleListener.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/ServletLifecycleListener.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/ServletLifecycleListener.java
index e0ea850..f7b897a 100644
--- a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/ServletLifecycleListener.java
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/ServletLifecycleListener.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.service.rest;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -29,26 +29,26 @@ import javax.servlet.ServletContextEvent;
 import org.apache.hawq.pxf.service.utilities.Log4jConfigure;
 import org.apache.hawq.pxf.service.utilities.SecureLogin;
 
-/** 
+/**
  * Listener on lifecycle events of our webapp
  */
 public class ServletLifecycleListener implements ServletContextListener {
 
-    private static Log LOG = LogFactory.getLog(ServletContextListener.class);
+    private static final Log LOG = LogFactory.getLog(ServletContextListener.class);
 
 	/**
 	 * Called after the webapp has been initialized.
-	 * 
+	 *
 	 * 1. Initializes log4j.
 	 * 2. Initiates a Kerberos login when Hadoop security is on.
 	 */
 	@Override
-	public void contextInitialized(ServletContextEvent event) {	
+	public void contextInitialized(ServletContextEvent event) {
 		// 1. Initialize log4j:
 		Log4jConfigure.configure(event);
-		
+
 		LOG.info("webapp initialized");
-		
+
 		// 2. Initiate secure login
 		SecureLogin.login();
 	}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/VersionResource.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/VersionResource.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/VersionResource.java
index f30e5e9..6f326d6 100644
--- a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/VersionResource.java
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/VersionResource.java
@@ -47,7 +47,7 @@ class Version {
 @Path("/ProtocolVersion")
 public class VersionResource {
 
-    private static final Log Log = LogFactory.getLog(VersionResource.class);
+    private static final Log LOG = LogFactory.getLog(VersionResource.class);
 
     public VersionResource() {
     }

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/AnalyzeUtils.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/AnalyzeUtils.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/AnalyzeUtils.java
index 67efeb4..21172c5 100644
--- a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/AnalyzeUtils.java
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/AnalyzeUtils.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.service.utilities;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -19,7 +19,6 @@ package org.apache.hawq.pxf.service.utilities;
  * under the License.
  */
 
-
 import java.util.ArrayList;
 import java.util.BitSet;
 import java.util.List;
@@ -34,7 +33,7 @@ import org.apache.hawq.pxf.api.Fragment;
  */
 public class AnalyzeUtils {
 
-    private static Log Log = LogFactory.getLog(AnalyzeUtils.class);
+    private static final Log LOG = LogFactory.getLog(AnalyzeUtils.class);
 
     /**
      * In case pxf_max_fragments parameter is declared, make sure not to get
@@ -57,7 +56,7 @@ public class AnalyzeUtils {
             return fragments;
         }
 
-        Log.debug("fragments list has " + listSize
+        LOG.debug("fragments list has " + listSize
                 + " fragments, maxFragments = " + maxSize);
 
         bitSet = generateSamplingBitSet(listSize, maxSize);
@@ -88,7 +87,7 @@ public class AnalyzeUtils {
         }
 
         if (sampleSize >= poolSize) {
-            Log.debug("sampling bit map has " + poolSize + " elements (100%)");
+            LOG.debug("sampling bit map has " + poolSize + " elements (100%)");
             bitSet.set(0, poolSize);
             return bitSet;
         }
@@ -116,7 +115,7 @@ public class AnalyzeUtils {
             }
         }
 
-        Log.debug("sampling bit map has " + chosen + " elements:"
+        LOG.debug("sampling bit map has " + chosen + " elements:"
                 + bitSet.toString());
 
         return bitSet;

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/Log4jConfigure.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/Log4jConfigure.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/Log4jConfigure.java
index ecd51d9..c2ccd20 100644
--- a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/Log4jConfigure.java
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/Log4jConfigure.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.service.utilities;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -19,7 +19,6 @@ package org.apache.hawq.pxf.service.utilities;
  * under the License.
  */
 
-
 import java.io.File;
 
 import javax.servlet.ServletContext;
@@ -31,35 +30,37 @@ import org.apache.log4j.PropertyConfigurator;
 
 public class Log4jConfigure {
 
-    private static Log LOG = LogFactory.getLog(Log4jConfigure.class);
-	
+    private static final Log LOG = LogFactory.getLog(Log4jConfigure.class);
+
     /**
      * Initializes log4j logging for the webapp.
-     * 
-     * Reads log4j properties file location from log4jConfigLocation parameter in web.xml.
-     * When not using aboslute path, the path starts from the webapp root directory.
-	 * If the file can't be read, reverts to default configuration file under
-	 * WEB-INF/classes/pxf-log4j.properties.
-	 * 
+     *
+     * Reads log4j properties file location from log4jConfigLocation parameter
+     * in web.xml. When not using aboslute path, the path starts from the webapp
+     * root directory. If the file can't be read, reverts to default
+     * configuration file under WEB-INF/classes/pxf-log4j.properties.
+     *
      * @param event Servlet context, used to determine webapp root directory.
      */
-	public static void configure(ServletContextEvent event) {
-		
-		final String defaultLog4jLocation = "WEB-INF/classes/pxf-log4j.properties";
+    public static void configure(ServletContextEvent event) {
 
-		ServletContext context = event.getServletContext();
+        final String defaultLog4jLocation = "WEB-INF/classes/pxf-log4j.properties";
+
+        ServletContext context = event.getServletContext();
         String log4jConfigLocation = context.getInitParameter("log4jConfigLocation");
 
         if (!log4jConfigLocation.startsWith(File.separator)) {
-        	log4jConfigLocation = context.getRealPath("") + File.separator + log4jConfigLocation;
+            log4jConfigLocation = context.getRealPath("") + File.separator
+                    + log4jConfigLocation;
         }
-        
+
         // revert to default properties file if file doesn't exist
-        File log4jConfigFile = new File (log4jConfigLocation);
+        File log4jConfigFile = new File(log4jConfigLocation);
         if (!log4jConfigFile.canRead()) {
-        	log4jConfigLocation = context.getRealPath("") + File.separator + defaultLog4jLocation;
+            log4jConfigLocation = context.getRealPath("") + File.separator
+                    + defaultLog4jLocation;
         }
-		PropertyConfigurator.configure(log4jConfigLocation); 
+        PropertyConfigurator.configure(log4jConfigLocation);
         LOG.info("log4jConfigLocation = " + log4jConfigLocation);
-	}
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/SecureLogin.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/SecureLogin.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/SecureLogin.java
index 20fce7b..6ce05ed 100644
--- a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/SecureLogin.java
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/SecureLogin.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.service.utilities;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -19,42 +19,43 @@ package org.apache.hawq.pxf.service.utilities;
  * under the License.
  */
 
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.security.SecurityUtil;
 
-/*
+/**
  * This class relies heavily on Hadoop API to
- * - Check need for secure login in Hadoop
- * - Parse and load .xml configuration file
- * - Do a Kerberos login with a kaytab file
- * - convert _HOST in Kerberos principal to current hostname
+ * <ul>
+ * <li>Check need for secure login in Hadoop</li>
+ * <li>Parse and load .xml configuration file</li>
+ * <li>Do a Kerberos login with a kaytab file</li>
+ * <li>convert _HOST in Kerberos principal to current hostname</li>
+ * </ul>
  *
- * It uses Hadoop Configuration to parse XML configuration files
- * It uses Hadoop Security to modify principal and perform the login
+ * It uses Hadoop Configuration to parse XML configuration files.<br>
+ * It uses Hadoop Security to modify principal and perform the login.
  *
- * The major limitation in this class is its dependency
- * on Hadoop. If Hadoop security is off, no login will be performed
- * regardless of connector being used.
+ * The major limitation in this class is its dependency on Hadoop. If Hadoop
+ * security is off, no login will be performed regardless of connector being
+ * used.
  */
 public class SecureLogin {
-    private static Log LOG = LogFactory.getLog(SecureLogin.class);
-	private static final String CONFIG_KEY_SERVICE_KEYTAB = "pxf.service.kerberos.keytab";
-	private static final String CONFIG_KEY_SERVICE_PRINCIPAL = "pxf.service.kerberos.principal";
+    private static final Log LOG = LogFactory.getLog(SecureLogin.class);
+    private static final String CONFIG_KEY_SERVICE_KEYTAB = "pxf.service.kerberos.keytab";
+    private static final String CONFIG_KEY_SERVICE_PRINCIPAL = "pxf.service.kerberos.principal";
 
-	public static void login() {
-		try {
-			Configuration config = new Configuration();
-			config.addResource("pxf-site.xml");
+    public static void login() {
+        try {
+            Configuration config = new Configuration();
+            config.addResource("pxf-site.xml");
 
-			SecurityUtil.login(config, CONFIG_KEY_SERVICE_KEYTAB, CONFIG_KEY_SERVICE_PRINCIPAL);
-		} catch (Exception e)
-		{
-			LOG.error("PXF service login failed");
-			throw new RuntimeException(e);
-		}
-	}
+            SecurityUtil.login(config, CONFIG_KEY_SERVICE_KEYTAB,
+                    CONFIG_KEY_SERVICE_PRINCIPAL);
+        } catch (Exception e) {
+            LOG.error("PXF service login failed");
+            throw new RuntimeException(e);
+        }
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/SecuredHDFS.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/SecuredHDFS.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/SecuredHDFS.java
index 6917bac..f442a6d 100644
--- a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/SecuredHDFS.java
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/SecuredHDFS.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.service.utilities;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -19,7 +19,6 @@ package org.apache.hawq.pxf.service.utilities;
  * under the License.
  */
 
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
@@ -34,19 +33,21 @@ import java.io.ByteArrayInputStream;
 import java.io.DataInputStream;
 import java.io.IOException;
 
-/*
- * The class handles security functions for handling
- * secured HDFS
+/**
+ * The class handles security functions for handling secured HDFS
  */
 public class SecuredHDFS {
     private static final Log LOG = LogFactory.getLog(SecuredHDFS.class);
 
-    /*
+    /**
      * The function will get the token information from parameters and call
      * SecuredHDFS to verify the token.
      *
      * All token properties will be deserialized from string to a Token object
      *
+     * @param protData input parameters
+     * @param context servlet context which contains the NN address
+     *
      * @throws SecurityException Thrown when authentication fails
      */
     public static void verifyToken(ProtocolData protData, ServletContext context) {
@@ -65,16 +66,16 @@ public class SecuredHDFS {
         }
     }
 
-    /*
+    /**
      * The function will verify the token with NameNode if available and will
      * create a UserGroupInformation.
      *
      * Code in this function is copied from JspHelper.getTokenUGI
      *
      * @param identifier Delegation token identifier
-     *
      * @param password Delegation token password
-     *
+     * @param kind the kind of token
+     * @param service the service for this token
      * @param servletContext Jetty servlet context which contains the NN address
      *
      * @throws SecurityException Thrown when authentication fails

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/Utilities.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/Utilities.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/Utilities.java
index 372bcc8..8467734 100644
--- a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/Utilities.java
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/Utilities.java
@@ -19,7 +19,6 @@ package org.apache.hawq.pxf.service.utilities;
  * under the License.
  */
 
-
 import java.lang.reflect.Constructor;
 import java.lang.reflect.InvocationTargetException;
 
@@ -138,11 +137,10 @@ public class Utilities {
     }
 
     /**
-     * Replaces any non-alpha-numeric character with a '.'.
-     * This measure is used to prevent cross-site scripting (XSS)
-     * when an input string might include code or script. By removing
-     * all special characters and returning a censured string to the user
-     * this threat is avoided.
+     * Replaces any non-alpha-numeric character with a '.'. This measure is used
+     * to prevent cross-site scripting (XSS) when an input string might include
+     * code or script. By removing all special characters and returning a
+     * censured string to the user this threat is avoided.
      *
      * @param input string to be masked
      * @return masked string

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/io/GPDBWritableTest.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/io/GPDBWritableTest.java b/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/io/GPDBWritableTest.java
index 098f29c..dcb89b8 100644
--- a/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/io/GPDBWritableTest.java
+++ b/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/io/GPDBWritableTest.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.service.io;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -46,12 +46,12 @@ public class GPDBWritableTest {
 
     DataInput inputStream;
     OngoingStubbing<Integer> ongoing;
-    Log Log;
+    Log LOG;
 
     @Before
-    public void SetupStaticLog() {
-        Log = mock(Log.class);
-        Whitebox.setInternalState(GPDBWritable.class, Log);
+    public void setupStaticLog() {
+        LOG = mock(Log.class);
+        Whitebox.setInternalState(GPDBWritable.class, LOG);
     }
 
     /*
@@ -147,6 +147,6 @@ public class GPDBWritableTest {
     }
 
     private void verifyLog(String msg) {
-        Mockito.verify(Log).debug(msg);
+        Mockito.verify(LOG).debug(msg);
     }
 }


[2/2] incubator-hawq git commit: HAWQ-165. Change PXF logger to be private static final

Posted by nh...@apache.org.
HAWQ-165. Change PXF logger to be private static final

Includes many minor indentations and doc fixes.


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/127cac3e
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/127cac3e
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/127cac3e

Branch: refs/heads/master
Commit: 127cac3e23fb502bfedab5cf0910564636c284bd
Parents: e48a07b
Author: Noa Horn <nh...@pivotal.io>
Authored: Mon Jan 11 13:01:37 2016 -0800
Committer: Noa Horn <nh...@pivotal.io>
Committed: Mon Jan 11 13:01:37 2016 -0800

----------------------------------------------------------------------
 .../hawq-hadoop/hawq-mapreduce-tool/.gitignore  |   1 +
 .../org/apache/hawq/pxf/api/FragmentsStats.java |  10 +-
 .../hawq/pxf/api/utilities/ProfilesConf.java    |  17 +--
 .../apache/hawq/pxf/api/FragmentsStatsTest.java |   4 +-
 .../pxf/api/utilities/ProfilesConfTest.java     | 142 +++++++++++++------
 .../hbase/utilities/HBaseLookupTable.java       |  62 ++++----
 .../pxf/plugins/hdfs/LineBreakAccessor.java     |  10 +-
 .../pxf/plugins/hdfs/SequenceFileAccessor.java  |  14 +-
 .../plugins/hdfs/utilities/HdfsUtilities.java   |  10 +-
 .../hdfs/utilities/RecordkeyAdapter.java        | 123 ++++++++--------
 .../plugins/hdfs/StringPassResolverTest.java    | 111 ++++++++-------
 .../hdfs/utilities/RecordkeyAdapterTest.java    |  20 +--
 .../hawq/pxf/plugins/hive/HiveAccessor.java     |   5 +-
 .../pxf/plugins/hive/HiveDataFragmenter.java    |  11 +-
 .../hawq/pxf/plugins/hive/HiveResolver.java     |  17 +--
 .../hawq/pxf/service/BridgeOutputBuilder.java   |  19 ++-
 .../hawq/pxf/service/FragmentsResponse.java     |  30 ++--
 .../pxf/service/FragmentsResponseFormatter.java |  49 ++++---
 .../pxf/service/MetadataResponseFormatter.java  |   8 +-
 .../org/apache/hawq/pxf/service/ReadBridge.java |  15 +-
 .../hawq/pxf/service/ReadSamplingBridge.java    |   9 +-
 .../hawq/pxf/service/io/GPDBWritable.java       |  10 +-
 .../org/apache/hawq/pxf/service/io/Text.java    |   8 +-
 .../hawq/pxf/service/rest/BridgeResource.java   |  29 ++--
 .../pxf/service/rest/ClusterNodesResource.java  |  87 +++++++-----
 .../pxf/service/rest/FragmenterResource.java    |  15 +-
 .../pxf/service/rest/InvalidPathResource.java   |   4 +-
 .../hawq/pxf/service/rest/MetadataResource.java |  72 +++++-----
 .../hawq/pxf/service/rest/RestResource.java     |  25 ++--
 .../service/rest/ServletLifecycleListener.java  |  16 +--
 .../hawq/pxf/service/rest/VersionResource.java  |   2 +-
 .../pxf/service/utilities/AnalyzeUtils.java     |  13 +-
 .../pxf/service/utilities/Log4jConfigure.java   |  43 +++---
 .../hawq/pxf/service/utilities/SecureLogin.java |  55 +++----
 .../hawq/pxf/service/utilities/SecuredHDFS.java |  21 +--
 .../hawq/pxf/service/utilities/Utilities.java   |  10 +-
 .../hawq/pxf/service/io/GPDBWritableTest.java   |  14 +-
 37 files changed, 602 insertions(+), 509 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/contrib/hawq-hadoop/hawq-mapreduce-tool/.gitignore
----------------------------------------------------------------------
diff --git a/contrib/hawq-hadoop/hawq-mapreduce-tool/.gitignore b/contrib/hawq-hadoop/hawq-mapreduce-tool/.gitignore
index 314002f..9e2a5d7 100644
--- a/contrib/hawq-hadoop/hawq-mapreduce-tool/.gitignore
+++ b/contrib/hawq-hadoop/hawq-mapreduce-tool/.gitignore
@@ -1,2 +1,3 @@
 target/
 test-data/*/output
+lib/

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/FragmentsStats.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/FragmentsStats.java b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/FragmentsStats.java
index fad1a2d..425922c 100644
--- a/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/FragmentsStats.java
+++ b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/FragmentsStats.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.api;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -19,7 +19,6 @@ package org.apache.hawq.pxf.api;
  * under the License.
  */
 
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.codehaus.jackson.map.ObjectMapper;
@@ -37,7 +36,7 @@ public class FragmentsStats {
      */
     public static final long DEFAULT_FRAGMENT_SIZE = 67108864L;
 
-    private static Log Log = LogFactory.getLog(FragmentsStats.class);
+    private static final Log LOG = LogFactory.getLog(FragmentsStats.class);
 
     // number of fragments
     private long fragmentsNumber;
@@ -139,8 +138,7 @@ public class FragmentsStats {
     /**
      * Given a {@link FragmentsStats}, serialize it in JSON to be used as the
      * result string for HAWQ. An example result is as follows:
-     * <code>{"PXFFragmentsStats":{"fragmentsNumber"
-     * :3,"firstFragmentSize":67108864,"totalSize":200000000}}</code>
+     * <code>{"PXFFragmentsStats":{"fragmentsNumber":3,"firstFragmentSize":{"size"=67108864,"unit":"B"},"totalSize":{"size"=200000000,"unit"="B"}}}</code>
      *
      * @param stats the data to be serialized
      * @return the result in json format

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/utilities/ProfilesConf.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/utilities/ProfilesConf.java b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/utilities/ProfilesConf.java
index 607f28b..2c20ab7 100644
--- a/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/utilities/ProfilesConf.java
+++ b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/utilities/ProfilesConf.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.api.utilities;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -41,8 +41,9 @@ import static org.apache.hawq.pxf.api.utilities.ProfileConfException.MessageForm
  * It exposes a public static method getProfilePluginsMap(String plugin) which returns the requested profile plugins
  */
 public enum ProfilesConf {
-    INSTANCE;
-    private Log log = LogFactory.getLog(ProfilesConf.class);
+    INSTANCE; // enum singleton
+    // not necessary to declare LOG as static final, because this is a singleton
+    private Log LOG = LogFactory.getLog(ProfilesConf.class);
     private Map<String, Map<String, String>> profilesMap;
     private final static String EXTERNAL_PROFILES = "pxf-profiles.xml";
     private final static String INTERNAL_PROFILES = "pxf-profiles-default.xml";
@@ -59,7 +60,7 @@ public enum ProfilesConf {
         if (profilesMap.isEmpty()) {
             throw new ProfileConfException(PROFILES_FILE_NOT_FOUND, EXTERNAL_PROFILES);
         }
-        log.info("PXF profiles loaded: " + profilesMap.keySet());
+        LOG.info("PXF profiles loaded: " + profilesMap.keySet());
     }
 
     /**
@@ -88,7 +89,7 @@ public enum ProfilesConf {
     private void loadConf(String fileName, boolean isMandatory) {
         URL url = getClassLoader().getResource(fileName);
         if (url == null) {
-            log.warn(fileName + " not found in the classpath");
+            LOG.warn(fileName + " not found in the classpath");
             if (isMandatory) {
                 throw new ProfileConfException(PROFILES_FILE_NOT_FOUND, fileName);
             }
@@ -105,14 +106,14 @@ public enum ProfilesConf {
     private void loadMap(XMLConfiguration conf) {
         String[] profileNames = conf.getStringArray("profile.name");
         if (profileNames.length == 0) {
-            log.warn("Profile file: " + conf.getFileName() + " is empty");
+            LOG.warn("Profile file: " + conf.getFileName() + " is empty");
             return;
         }
         Map<String, Map<String, String>> profileMap = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
         for (int profileIdx = 0; profileIdx < profileNames.length; profileIdx++) {
             String profileName = profileNames[profileIdx];
             if (profileMap.containsKey(profileName)) {
-                log.warn("Duplicate profile definition found in " + conf.getFileName() + " for: " + profileName);
+                LOG.warn("Duplicate profile definition found in " + conf.getFileName() + " for: " + profileName);
                 continue;
             }
             Configuration profileSubset = conf.subset("profile(" + profileIdx + ").plugins");

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-api/src/test/java/org/apache/hawq/pxf/api/FragmentsStatsTest.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-api/src/test/java/org/apache/hawq/pxf/api/FragmentsStatsTest.java b/pxf/pxf-api/src/test/java/org/apache/hawq/pxf/api/FragmentsStatsTest.java
index e139ce6..6248f9d 100644
--- a/pxf/pxf-api/src/test/java/org/apache/hawq/pxf/api/FragmentsStatsTest.java
+++ b/pxf/pxf-api/src/test/java/org/apache/hawq/pxf/api/FragmentsStatsTest.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.api;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-api/src/test/java/org/apache/hawq/pxf/api/utilities/ProfilesConfTest.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-api/src/test/java/org/apache/hawq/pxf/api/utilities/ProfilesConfTest.java b/pxf/pxf-api/src/test/java/org/apache/hawq/pxf/api/utilities/ProfilesConfTest.java
index 495e48c..0631cb2 100644
--- a/pxf/pxf-api/src/test/java/org/apache/hawq/pxf/api/utilities/ProfilesConfTest.java
+++ b/pxf/pxf-api/src/test/java/org/apache/hawq/pxf/api/utilities/ProfilesConfTest.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.api.utilities;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -19,7 +19,6 @@ package org.apache.hawq.pxf.api.utilities;
  * under the License.
  */
 
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.junit.Before;
@@ -44,14 +43,18 @@ import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.when;
 
 /**
- * Base test class for all ProfilesConf tests.
- * Each test case is encapsulated inside its own inner class to force reloading of ProfilesConf enum singleton
+ * Base test class for all ProfilesConf tests. Each test case is encapsulated
+ * inside its own inner class to force reloading of ProfilesConf enum singleton
  */
 @RunWith(PowerMockRunner.class)
-@PrepareForTest({ProfilesConf.class, Log.class, LogFactory.class, ClassLoader.class})
+@PrepareForTest({
+        ProfilesConf.class,
+        Log.class,
+        LogFactory.class,
+        ClassLoader.class })
 public class ProfilesConfTest {
     static ClassLoader classLoader;
-    static Log log;
+    static Log LOG;
     String mandatoryFileName = "mandatory.xml";
     String optionalFileName = "optional.xml";
     File mandatoryFile;
@@ -65,10 +68,12 @@ public class ProfilesConfTest {
         mandatoryFile = testFolder.newFile(mandatoryFileName);
         optionalFile = testFolder.newFile(optionalFileName);
         PowerMockito.mockStatic(LogFactory.class);
-        log = mock(Log.class);
-        when(LogFactory.getLog(ProfilesConf.class)).thenReturn(log);
+        LOG = mock(Log.class);
+        when(LogFactory.getLog(ProfilesConf.class)).thenReturn(LOG);
         classLoader = mock(ClassLoader.class);
-        PowerMockito.stub(PowerMockito.method(ProfilesConf.class, "getClassLoader")).toReturn(classLoader);
+        PowerMockito.stub(
+                PowerMockito.method(ProfilesConf.class, "getClassLoader")).toReturn(
+                classLoader);
     }
 
     void writeFile(File file, String content) throws IOException {
@@ -79,36 +84,53 @@ public class ProfilesConfTest {
 class ProfilesConfTestDefinedProfile extends ProfilesConfTest {
     @Test
     public void definedProfile() throws Exception {
-        writeFile(mandatoryFile, "<profiles><profile><name>HBase</name><plugins><plugin1>X</plugin1><plugin2>XX</plugin2></plugins></profile></profiles>");
-        writeFile(optionalFile, "<profiles><profile><name>Hive</name><plugins><plugin1>Y</plugin1></plugins></profile></profiles>");
-        when(classLoader.getResource("pxf-profiles-default.xml")).thenReturn(mandatoryFile.toURI().toURL());
-        when(classLoader.getResource("pxf-profiles.xml")).thenReturn(optionalFile.toURI().toURL());
+        writeFile(
+                mandatoryFile,
+                "<profiles><profile><name>HBase</name><plugins><plugin1>X</plugin1><plugin2>XX</plugin2></plugins></profile></profiles>");
+        writeFile(
+                optionalFile,
+                "<profiles><profile><name>Hive</name><plugins><plugin1>Y</plugin1></plugins></profile></profiles>");
+        when(classLoader.getResource("pxf-profiles-default.xml")).thenReturn(
+                mandatoryFile.toURI().toURL());
+        when(classLoader.getResource("pxf-profiles.xml")).thenReturn(
+                optionalFile.toURI().toURL());
 
         Map<String, String> hbaseProfile = ProfilesConf.getProfilePluginsMap("HBase");
         assertEquals(2, hbaseProfile.keySet().size());
         assertEquals(hbaseProfile.get("X-GP-PLUGIN1"), "X");
         assertEquals(hbaseProfile.get("X-GP-PLUGIN2"), "XX");
 
-        Map<String, String> hiveProfile = ProfilesConf.getProfilePluginsMap("hIVe");// case insensitive profile name
+        Map<String, String> hiveProfile = ProfilesConf.getProfilePluginsMap("hIVe");// case
+                                                                                    // insensitive
+                                                                                    // profile
+                                                                                    // name
         assertEquals(1, hiveProfile.keySet().size());
         assertEquals(hiveProfile.get("X-GP-PLUGIN1"), "Y");
 
-        Mockito.verify(log).info("PXF profiles loaded: [HBase, Hive]");
+        Mockito.verify(LOG).info("PXF profiles loaded: [HBase, Hive]");
     }
 }
 
 class ProfilesConfTestUndefinedProfile extends ProfilesConfTest {
     @Test
     public void undefinedProfile() throws Exception {
-        writeFile(mandatoryFile, "<profiles><profile><name>HBase</name><plugins><plugin1>X</plugin1></plugins></profile></profiles>");
-        writeFile(optionalFile, "<profiles><profile><name>Hive</name><plugins><plugin1>Y</plugin1></plugins></profile></profiles>");
-        when(classLoader.getResource("pxf-profiles-default.xml")).thenReturn(mandatoryFile.toURI().toURL());
-        when(classLoader.getResource("pxf-profiles.xml")).thenReturn(optionalFile.toURI().toURL());
+        writeFile(
+                mandatoryFile,
+                "<profiles><profile><name>HBase</name><plugins><plugin1>X</plugin1></plugins></profile></profiles>");
+        writeFile(
+                optionalFile,
+                "<profiles><profile><name>Hive</name><plugins><plugin1>Y</plugin1></plugins></profile></profiles>");
+        when(classLoader.getResource("pxf-profiles-default.xml")).thenReturn(
+                mandatoryFile.toURI().toURL());
+        when(classLoader.getResource("pxf-profiles.xml")).thenReturn(
+                optionalFile.toURI().toURL());
         try {
             ProfilesConf.getProfilePluginsMap("UndefinedProfile");
             fail("undefined profile should have thrown exception");
         } catch (ProfileConfException pce) {
-            assertEquals(pce.getMessage(), String.format(NO_PROFILE_DEF.getFormat(), "UndefinedProfile", "pxf-profiles.xml"));
+            assertEquals(pce.getMessage(), String.format(
+                    NO_PROFILE_DEF.getFormat(), "UndefinedProfile",
+                    "pxf-profiles.xml"));
         }
     }
 }
@@ -116,22 +138,36 @@ class ProfilesConfTestUndefinedProfile extends ProfilesConfTest {
 class ProfilesConfTestDuplicateProfileDefinition extends ProfilesConfTest {
     @Test
     public void duplicateProfileDefinition() throws Exception {
-        writeFile(mandatoryFile, "<profiles><profile><name>HBase</name><plugins><plugin1>Y</plugin1><plugin1>YY</plugin1></plugins></profile><profile><name>HBase</name><plugins><plugin1>Y</plugin1></plugins></profile></profiles>");
-        writeFile(optionalFile, "<profiles><profile><name>Hive</name><plugins><plugin1>Y</plugin1></plugins></profile></profiles>");
-        when(classLoader.getResource("pxf-profiles-default.xml")).thenReturn(mandatoryFile.toURI().toURL());
-        when(classLoader.getResource("pxf-profiles.xml")).thenReturn(optionalFile.toURI().toURL());
+        writeFile(
+                mandatoryFile,
+                "<profiles><profile><name>HBase</name><plugins><plugin1>Y</plugin1><plugin1>YY</plugin1></plugins></profile><profile><name>HBase</name><plugins><plugin1>Y</plugin1></plugins></profile></profiles>");
+        writeFile(
+                optionalFile,
+                "<profiles><profile><name>Hive</name><plugins><plugin1>Y</plugin1></plugins></profile></profiles>");
+        when(classLoader.getResource("pxf-profiles-default.xml")).thenReturn(
+                mandatoryFile.toURI().toURL());
+        when(classLoader.getResource("pxf-profiles.xml")).thenReturn(
+                optionalFile.toURI().toURL());
         ProfilesConf.getProfilePluginsMap("HBase");
-        Mockito.verify(log).warn("Duplicate profile definition found in " + mandatoryFileName + " for: HBase");
+        Mockito.verify(LOG).warn(
+                "Duplicate profile definition found in " + mandatoryFileName
+                        + " for: HBase");
     }
 }
 
 class ProfilesConfTestOverrideProfile extends ProfilesConfTest {
     @Test
     public void overrideProfile() throws Exception {
-        writeFile(mandatoryFile, "<profiles><profile><name>HBase</name><plugins><plugin1>X</plugin1></plugins></profile></profiles>");
-        writeFile(optionalFile, "<profiles><profile><name>HBase</name><plugins><plugin1>Y</plugin1><plugin2>YY</plugin2></plugins></profile></profiles>");
-        when(classLoader.getResource("pxf-profiles-default.xml")).thenReturn(mandatoryFile.toURI().toURL());
-        when(classLoader.getResource("pxf-profiles.xml")).thenReturn(optionalFile.toURI().toURL());
+        writeFile(
+                mandatoryFile,
+                "<profiles><profile><name>HBase</name><plugins><plugin1>X</plugin1></plugins></profile></profiles>");
+        writeFile(
+                optionalFile,
+                "<profiles><profile><name>HBase</name><plugins><plugin1>Y</plugin1><plugin2>YY</plugin2></plugins></profile></profiles>");
+        when(classLoader.getResource("pxf-profiles-default.xml")).thenReturn(
+                mandatoryFile.toURI().toURL());
+        when(classLoader.getResource("pxf-profiles.xml")).thenReturn(
+                optionalFile.toURI().toURL());
         Map profile = ProfilesConf.getProfilePluginsMap("HBase");
         assertEquals(2, profile.keySet().size());
         assertEquals(profile.get("X-GP-PLUGIN1"), "Y");
@@ -143,11 +179,16 @@ class ProfilesConfTestEmptyProfileFile extends ProfilesConfTest {
     @Test
     public void emptyProfileFile() throws Exception {
         writeFile(mandatoryFile, "<profiles/>");
-        writeFile(optionalFile, "<profiles><profile><name>HBase</name><plugins><plugin1>Y</plugin1></plugins></profile></profiles>");
-        when(classLoader.getResource("pxf-profiles-default.xml")).thenReturn(mandatoryFile.toURI().toURL());
-        when(classLoader.getResource("pxf-profiles.xml")).thenReturn(optionalFile.toURI().toURL());
+        writeFile(
+                optionalFile,
+                "<profiles><profile><name>HBase</name><plugins><plugin1>Y</plugin1></plugins></profile></profiles>");
+        when(classLoader.getResource("pxf-profiles-default.xml")).thenReturn(
+                mandatoryFile.toURI().toURL());
+        when(classLoader.getResource("pxf-profiles.xml")).thenReturn(
+                optionalFile.toURI().toURL());
         ProfilesConf.getProfilePluginsMap("HBase");
-        Mockito.verify(log).warn("Profile file: " + mandatoryFileName + " is empty");
+        Mockito.verify(LOG).warn(
+                "Profile file: " + mandatoryFileName + " is empty");
     }
 }
 
@@ -155,14 +196,20 @@ class ProfilesConfTestMalformedProfileFile extends ProfilesConfTest {
     @Test
     public void malformedProfileFile() throws Exception {
         writeFile(mandatoryFile, "I'm a malford x.m.l@#$#<%");
-        writeFile(optionalFile, "<profiles><profile><name>HBase</name><plugins><plugin1>Y</plugin1></plugins></profile></profiles>");
-        when(classLoader.getResource("pxf-profiles-default.xml")).thenReturn(mandatoryFile.toURI().toURL());
-        when(classLoader.getResource("pxf-profiles.xml")).thenReturn(optionalFile.toURI().toURL());
+        writeFile(
+                optionalFile,
+                "<profiles><profile><name>HBase</name><plugins><plugin1>Y</plugin1></plugins></profile></profiles>");
+        when(classLoader.getResource("pxf-profiles-default.xml")).thenReturn(
+                mandatoryFile.toURI().toURL());
+        when(classLoader.getResource("pxf-profiles.xml")).thenReturn(
+                optionalFile.toURI().toURL());
         try {
             ProfilesConf.getProfilePluginsMap("HBase");
             fail("malformed profile file should have thrown exception");
         } catch (ExceptionInInitializerError pce) {
-            assertTrue(pce.getCause().getMessage().contains(mandatoryFileName + " could not be loaded: org.xml.sax.SAXParseException"));
+            assertTrue(pce.getCause().getMessage().contains(
+                    mandatoryFileName
+                            + " could not be loaded: org.xml.sax.SAXParseException"));
         }
     }
 }
@@ -170,13 +217,17 @@ class ProfilesConfTestMalformedProfileFile extends ProfilesConfTest {
 class ProfilesConfTestMissingMandatoryProfileFile extends ProfilesConfTest {
     @Test
     public void missingMandatoryProfileFile() throws Exception {
-        when(classLoader.getResource("pxf-profiles-default.xml")).thenReturn(null);
+        when(classLoader.getResource("pxf-profiles-default.xml")).thenReturn(
+                null);
         try {
             ProfilesConf.getProfilePluginsMap("HBase");
             fail("missing mandatory profile file should have thrown exception");
         } catch (ExceptionInInitializerError pce) {
-            Mockito.verify(log).warn("pxf-profiles-default.xml not found in the classpath");
-            assertEquals(pce.getCause().getMessage(), String.format(PROFILES_FILE_NOT_FOUND.getFormat(), "pxf-profiles-default.xml"));
+            Mockito.verify(LOG).warn(
+                    "pxf-profiles-default.xml not found in the classpath");
+            assertEquals(pce.getCause().getMessage(), String.format(
+                    PROFILES_FILE_NOT_FOUND.getFormat(),
+                    "pxf-profiles-default.xml"));
         }
     }
 }
@@ -184,11 +235,14 @@ class ProfilesConfTestMissingMandatoryProfileFile extends ProfilesConfTest {
 class ProfilesConfTestMissingOptionalProfileFile extends ProfilesConfTest {
     @Test
     public void missingOptionalProfileFile() throws Exception {
-        writeFile(mandatoryFile, "<profiles><profile><name>HBase</name><plugins><plugin1>Y</plugin1></plugins></profile></profiles>");
-        when(classLoader.getResource("pxf-profiles-default.xml")).thenReturn(mandatoryFile.toURI().toURL());
+        writeFile(
+                mandatoryFile,
+                "<profiles><profile><name>HBase</name><plugins><plugin1>Y</plugin1></plugins></profile></profiles>");
+        when(classLoader.getResource("pxf-profiles-default.xml")).thenReturn(
+                mandatoryFile.toURI().toURL());
         when(classLoader.getResource("pxf-profiles.xml")).thenReturn(null);
         Map<String, String> hbaseProfile = ProfilesConf.getProfilePluginsMap("HBase");
         assertEquals("Y", hbaseProfile.get("X-GP-PLUGIN1"));
-        Mockito.verify(log).warn("pxf-profiles.xml not found in the classpath");
+        Mockito.verify(LOG).warn("pxf-profiles.xml not found in the classpath");
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-hbase/src/main/java/org/apache/hawq/pxf/plugins/hbase/utilities/HBaseLookupTable.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hbase/src/main/java/org/apache/hawq/pxf/plugins/hbase/utilities/HBaseLookupTable.java b/pxf/pxf-hbase/src/main/java/org/apache/hawq/pxf/plugins/hbase/utilities/HBaseLookupTable.java
index f68c3c2..1515687 100644
--- a/pxf/pxf-hbase/src/main/java/org/apache/hawq/pxf/plugins/hbase/utilities/HBaseLookupTable.java
+++ b/pxf/pxf-hbase/src/main/java/org/apache/hawq/pxf/plugins/hbase/utilities/HBaseLookupTable.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.plugins.hbase.utilities;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -19,7 +19,6 @@ package org.apache.hawq.pxf.plugins.hbase.utilities;
  * under the License.
  */
 
-
 import org.apache.commons.collections.MapUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -36,19 +35,23 @@ import java.util.HashMap;
 import java.util.Map;
 
 /**
- * HBaseLookupTable will load a table's lookup information
- * from HBase pxflookup table if exists.<br>
- * This table holds mappings between HAWQ column names (key) and HBase column names (value).<br>
- * E.g. for an HBase table "hbase_table", mappings between HAWQ column names and HBase column names,
- * when <code>"hawq1"</code> is mapped to <code>"cf1:hbase1"</code> and
- * <code>"hawq2"</code> is mapped to <code>"cf1:hbase2"</code>, will be:<br>
+ * HBaseLookupTable will load a table's lookup information from HBase pxflookup
+ * table if exists.<br>
+ * This table holds mappings between HAWQ column names (key) and HBase column
+ * names (value).<br>
+ * E.g. for an HBase table "hbase_table", mappings between HAWQ column names and
+ * HBase column names, when <code>"hawq1"</code> is mapped to
+ * <code>"cf1:hbase1"</code> and <code>"hawq2"</code> is mapped to
+ * <code>"cf1:hbase2"</code>, will be:<br>
+ *
  * <pre>
  * 	ROW                     COLUMN+CELL
  *  hbase_table             column=mapping:hawq1, value=cf1:hbase1
  *  hbase_table             column=mapping:hawq2, value=cf1:hbase2
  * </pre>
  *
- * Data is returned as a map of string and byte array from {@link #getMappings(String)}.
+ * Data is returned as a map of string and byte array from
+ * {@link #getMappings(String)}.
  * <p>
  * Once created, {@link #close()} MUST be called to cleanup resources.
  */
@@ -65,8 +68,8 @@ public class HBaseLookupTable implements Closeable {
     private Table lookupTable;
 
     /**
-     * Constructs a connector to HBase lookup table.
-     * Requires calling {@link #close()} to close {@link HBaseAdmin} instance.
+     * Constructs a connector to HBase lookup table. Requires calling
+     * {@link #close()} to close {@link HBaseAdmin} instance.
      *
      * @param conf HBase configuration
      * @throws IOException when initializing HBaseAdmin fails
@@ -76,14 +79,14 @@ public class HBaseLookupTable implements Closeable {
         connection = ConnectionFactory.createConnection(hbaseConfiguration);
         admin = connection.getAdmin();
         ClusterStatus cs = admin.getClusterStatus();
-        LOG.debug("HBase cluster has " + cs.getServersSize() + " region servers " +
-                "(" + cs.getDeadServers() + " dead)");
+        LOG.debug("HBase cluster has " + cs.getServersSize()
+                + " region servers " + "(" + cs.getDeadServers() + " dead)");
     }
 
     /**
      * Returns mappings for given table name between its HAWQ column names and
-     * HBase column names.
-     * If lookup table doesn't exist or no mappings for the table exist, returns null.
+     * HBase column names. If lookup table doesn't exist or no mappings for the
+     * table exist, returns null.
      * <p>
      * All HAWQ column names are returns in low case.
      *
@@ -119,12 +122,12 @@ public class HBaseLookupTable implements Closeable {
      * @return whether lookup table is valid
      */
     private boolean lookupTableValid() throws IOException {
-        return (HBaseUtilities.isTableAvailable(admin, LOOKUPTABLENAME) &&
-                lookupHasCorrectStructure());
+        return (HBaseUtilities.isTableAvailable(admin, LOOKUPTABLENAME) && lookupHasCorrectStructure());
     }
 
     /**
-     * Returns true if {@link #LOOKUPTABLENAME} has {@value #LOOKUPCOLUMNFAMILY} family.
+     * Returns true if {@link #LOOKUPTABLENAME} has {@value #LOOKUPCOLUMNFAMILY}
+     * family.
      *
      * @return whether lookup has expected column family name
      */
@@ -145,22 +148,21 @@ public class HBaseLookupTable implements Closeable {
     }
 
     /**
-     * Returns true if lookup table has no relevant mappings.
-     * Should be called after {@link #loadMappingMap(String)}.
+     * Returns true if lookup table has no relevant mappings. Should be called
+     * after {@link #loadMappingMap(String)}.
      */
     private boolean tableHasNoMappings() {
         return MapUtils.isEmpty(rawTableMapping);
     }
 
     /**
-     * Returns a map of mappings between HAWQ and HBase column names,
-     * with the HAWQ column values in lower case.
+     * Returns a map of mappings between HAWQ and HBase column names, with the
+     * HAWQ column values in lower case.
      */
     private Map<String, byte[]> lowerCaseMappings() {
         Map<String, byte[]> lowCaseKeys = new HashMap<String, byte[]>();
         for (Map.Entry<byte[], byte[]> entry : rawTableMapping.entrySet()) {
-            lowCaseKeys.put(lowerCase(entry.getKey()),
-                    entry.getValue());
+            lowCaseKeys.put(lowerCase(entry.getKey()), entry.getValue());
         }
 
         return lowCaseKeys;
@@ -174,8 +176,9 @@ public class HBaseLookupTable implements Closeable {
     }
 
     /**
-     * Loads mappings for given table name from the lookup table {@link #LOOKUPTABLENAME}.
-     * The table name should be in the row key, and the family name should be {@link #LOOKUPCOLUMNFAMILY}.
+     * Loads mappings for given table name from the lookup table
+     * {@link #LOOKUPTABLENAME}. The table name should be in the row key, and
+     * the family name should be {@link #LOOKUPCOLUMNFAMILY}.
      *
      * @param tableName HBase table name
      * @throws IOException when HBase operations fail
@@ -188,8 +191,9 @@ public class HBaseLookupTable implements Closeable {
 
         row = lookupTable.get(lookupRow);
         rawTableMapping = row.getFamilyMap(LOOKUPCOLUMNFAMILY);
-        LOG.debug("lookup table mapping for " + tableName +
-                " has " + (rawTableMapping == null ? 0 : rawTableMapping.size()) + " entries");
+        LOG.debug("lookup table mapping for " + tableName + " has "
+                + (rawTableMapping == null ? 0 : rawTableMapping.size())
+                + " entries");
     }
 
     private void closeLookupTable() throws IOException {

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-hdfs/src/main/java/org/apache/hawq/pxf/plugins/hdfs/LineBreakAccessor.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hdfs/src/main/java/org/apache/hawq/pxf/plugins/hdfs/LineBreakAccessor.java b/pxf/pxf-hdfs/src/main/java/org/apache/hawq/pxf/plugins/hdfs/LineBreakAccessor.java
index 92853a3..40ca2fa 100644
--- a/pxf/pxf-hdfs/src/main/java/org/apache/hawq/pxf/plugins/hdfs/LineBreakAccessor.java
+++ b/pxf/pxf-hdfs/src/main/java/org/apache/hawq/pxf/plugins/hdfs/LineBreakAccessor.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.plugins.hdfs;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -46,7 +46,7 @@ public class LineBreakAccessor extends HdfsSplittableDataAccessor implements
     private Configuration conf;
     private FileSystem fs;
     private Path file;
-    private static Log Log = LogFactory.getLog(LineBreakAccessor.class);
+    private static final Log LOG = LogFactory.getLog(LineBreakAccessor.class);
 
     /**
      * Constructs a LineReaderAccessor.
@@ -93,7 +93,7 @@ public class LineBreakAccessor extends HdfsSplittableDataAccessor implements
         org.apache.hadoop.fs.Path parent = file.getParent();
         if (!fs.exists(parent)) {
             fs.mkdirs(parent);
-            Log.debug("Created new dir " + parent.toString());
+            LOG.debug("Created new dir " + parent.toString());
         }
 
         // create output stream - do not allow overwriting existing file
@@ -132,7 +132,7 @@ public class LineBreakAccessor extends HdfsSplittableDataAccessor implements
     @Override
     public void closeForWrite() throws Exception {
         if ((dos != null) && (fsdos != null)) {
-            Log.debug("Closing writing stream for path " + file);
+            LOG.debug("Closing writing stream for path " + file);
             dos.flush();
             /*
              * From release 0.21.0 sync() is deprecated in favor of hflush(),

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-hdfs/src/main/java/org/apache/hawq/pxf/plugins/hdfs/SequenceFileAccessor.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hdfs/src/main/java/org/apache/hawq/pxf/plugins/hdfs/SequenceFileAccessor.java b/pxf/pxf-hdfs/src/main/java/org/apache/hawq/pxf/plugins/hdfs/SequenceFileAccessor.java
index aef063b..a395d09 100644
--- a/pxf/pxf-hdfs/src/main/java/org/apache/hawq/pxf/plugins/hdfs/SequenceFileAccessor.java
+++ b/pxf/pxf-hdfs/src/main/java/org/apache/hawq/pxf/plugins/hdfs/SequenceFileAccessor.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.plugins.hdfs;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -56,7 +56,7 @@ public class SequenceFileAccessor extends HdfsSplittableDataAccessor implements
     private SequenceFile.Writer writer;
     private LongWritable defaultKey; // used when recordkey is not defined
 
-    private static Log Log = LogFactory.getLog(SequenceFileAccessor.class);;
+    private static final Log LOG = LogFactory.getLog(SequenceFileAccessor.class);;
 
     /**
      * Constructs a SequenceFileAccessor.
@@ -99,7 +99,7 @@ public class SequenceFileAccessor extends HdfsSplittableDataAccessor implements
         parent = file.getParent();
         if (!fs.exists(parent)) {
             fs.mkdirs(parent);
-            Log.debug("Created new dir " + parent);
+            LOG.debug("Created new dir " + parent);
         }
 
         writer = null;
@@ -136,7 +136,7 @@ public class SequenceFileAccessor extends HdfsSplittableDataAccessor implements
                         "Compression type must be defined");
             }
 
-            Log.debug("Compression ON: " + "compression codec: "
+            LOG.debug("Compression ON: " + "compression codec: "
                     + userCompressCodec + ", compression type: "
                     + compressionType);
         }
@@ -178,7 +178,7 @@ public class SequenceFileAccessor extends HdfsSplittableDataAccessor implements
         if (codec != null) {
             fileName += codec.getDefaultExtension();
         }
-        Log.debug("File name for write: " + fileName);
+        LOG.debug("File name for write: " + fileName);
         return fileName;
     }
 
@@ -202,7 +202,7 @@ public class SequenceFileAccessor extends HdfsSplittableDataAccessor implements
         try {
             writer.append((key == null) ? defaultKey : key, value);
         } catch (IOException e) {
-            Log.error("Failed to write data to file: " + e.getMessage());
+            LOG.error("Failed to write data to file: " + e.getMessage());
             return false;
         }
 

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-hdfs/src/main/java/org/apache/hawq/pxf/plugins/hdfs/utilities/HdfsUtilities.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hdfs/src/main/java/org/apache/hawq/pxf/plugins/hdfs/utilities/HdfsUtilities.java b/pxf/pxf-hdfs/src/main/java/org/apache/hawq/pxf/plugins/hdfs/utilities/HdfsUtilities.java
index 68f0a94..aa8c4b4 100644
--- a/pxf/pxf-hdfs/src/main/java/org/apache/hawq/pxf/plugins/hdfs/utilities/HdfsUtilities.java
+++ b/pxf/pxf-hdfs/src/main/java/org/apache/hawq/pxf/plugins/hdfs/utilities/HdfsUtilities.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.plugins.hdfs.utilities;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -49,7 +49,7 @@ import java.util.List;
  * HdfsUtilities class exposes helper methods for PXF classes.
  */
 public class HdfsUtilities {
-    private static Log Log = LogFactory.getLog(HdfsUtilities.class);
+    private static final Log LOG = LogFactory.getLog(HdfsUtilities.class);
     private static Configuration config = new Configuration();
     private static CompressionCodecFactory factory = new CompressionCodecFactory(
             config);
@@ -107,7 +107,7 @@ public class HdfsUtilities {
         if (codec != null) {
             codecClass = codec.getClass();
         }
-        Log.debug((codecClass == null ? "No codec" : "Codec " + codecClass)
+        LOG.debug((codecClass == null ? "No codec" : "Codec " + codecClass)
                 + " was found for file " + path);
         return codecClass;
     }
@@ -191,7 +191,7 @@ public class HdfsUtilities {
             FileSplit fileSplit = new FileSplit(new Path(
                     inputData.getDataSource()), start, end, hosts);
 
-            Log.debug("parsed file split: path " + inputData.getDataSource()
+            LOG.debug("parsed file split: path " + inputData.getDataSource()
                     + ", start " + start + ", end " + end + ", hosts "
                     + ArrayUtils.toString(hosts));
 

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-hdfs/src/main/java/org/apache/hawq/pxf/plugins/hdfs/utilities/RecordkeyAdapter.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hdfs/src/main/java/org/apache/hawq/pxf/plugins/hdfs/utilities/RecordkeyAdapter.java b/pxf/pxf-hdfs/src/main/java/org/apache/hawq/pxf/plugins/hdfs/utilities/RecordkeyAdapter.java
index 1016f72..2c189a2 100644
--- a/pxf/pxf-hdfs/src/main/java/org/apache/hawq/pxf/plugins/hdfs/utilities/RecordkeyAdapter.java
+++ b/pxf/pxf-hdfs/src/main/java/org/apache/hawq/pxf/plugins/hdfs/utilities/RecordkeyAdapter.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.plugins.hdfs.utilities;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -19,7 +19,6 @@ package org.apache.hawq.pxf.plugins.hdfs.utilities;
  * under the License.
  */
 
-
 import org.apache.hawq.pxf.api.OneField;
 import org.apache.hawq.pxf.api.OneRow;
 import org.apache.hawq.pxf.api.utilities.ColumnDescriptor;
@@ -31,16 +30,21 @@ import org.apache.hadoop.io.*;
 import java.util.List;
 
 /**
- * Adapter used for adding a recordkey field to the records output {@code List<OneField>}.
+ * Adapter used for adding a recordkey field to the records output
+ * {@code List<OneField>}.
  */
 public class RecordkeyAdapter {
-    private Log Log;
+
+    private static final Log LOG = LogFactory.getLog(RecordkeyAdapter.class);
+
+    // private Log LOG = LogFactory.getLog(RecordkeyAdapter.class);
 
     /*
-     * We need to transform Record keys to java primitive types.
-     * Since the type of the key is the same throughout the file we do the type resolution
-     * in the first call (for the first record) and then use a "Java variation on Function pointer"
-     * to do the extraction for the rest of the records.
+     * We need to transform Record keys to java primitive types. Since the type
+     * of the key is the same throughout the file we do the type resolution in
+     * the first call (for the first record) and then use a
+     * "Java variation on Function pointer" to do the extraction for the rest of
+     * the records.
      */
     private interface ValExtractor {
         public Object get(Object key);
@@ -58,58 +62,59 @@ public class RecordkeyAdapter {
      * Constructs a RecordkeyAdapter.
      */
     public RecordkeyAdapter() {
-        Log = LogFactory.getLog(RecordkeyAdapter.class);
     }
 
     /**
-     *  Adds the recordkey to the end of the passed in recFields list.
-     *  <p>
-     *  This method also verifies cases in which record keys are not supported
-     *  by the underlying source type, and therefore "illegally" requested.
+     * Adds the recordkey to the end of the passed in recFields list.
+     * <p>
+     * This method also verifies cases in which record keys are not supported by
+     * the underlying source type, and therefore "illegally" requested.
      *
-     * @param recFields existing list of record (non-key) fields and their values.
+     * @param recFields existing list of record (non-key) fields and their
+     *            values.
      * @param input all input parameters coming from the client request
-     * @param onerow a row object which is used here in order to find out if
-     *        the given type supports recordkeys or not.
+     * @param onerow a row object which is used here in order to find out if the
+     *            given type supports recordkeys or not.
      * @return 0 if record key not needed, or 1 if record key was appended
      * @throws NoSuchFieldException when the given record type does not support
-     *         recordkeys
+     *             recordkeys
      */
-    public int appendRecordkeyField(List<OneField> recFields,
-                                    InputData input,
+    public int appendRecordkeyField(List<OneField> recFields, InputData input,
                                     OneRow onerow) throws NoSuchFieldException {
 
-		/*
-		 * user did not request the recordkey field in the
-		 * "create external table" statement
-		 */
+        /*
+         * user did not request the recordkey field in the
+         * "create external table" statement
+         */
         ColumnDescriptor recordkeyColumn = input.getRecordkeyColumn();
         if (recordkeyColumn == null) {
             return 0;
         }
 
-		/*
-		 * The recordkey was filled in the fileAccessor during execution of
-		 * method readNextObject. The current accessor implementations are
-		 * SequenceFileAccessor, LineBreakAccessor and AvroFileAccessor from
-		 * HdfsSplittableDataAccessor and QuotedLineBreakAccessor from
-		 * HdfsAtomicDataAccessor. For SequenceFileAccessor, LineBreakAccessor
-		 * the recordkey is set, since it is returned by the
-		 * SequenceFileRecordReader or LineRecordReader(for text file). But Avro
-		 * files do not have keys, so the AvroRecordReader will not return a key
-		 * and in this case recordkey will be null. If the user specified a
-		 * recordkey attribute in the CREATE EXTERNAL TABLE statement and he
-		 * reads from an AvroFile, we will throw an exception since the Avro
-		 * file does not have keys In the future, additional implementations of
-		 * FileAccessors will have to set recordkey during readNextObject().
-		 * Otherwise it is null by default and we will throw an exception here,
-		 * that is if we get here... a careful user will not specify recordkey
-		 * in the CREATE EXTERNAL statement and then we will leave this function
-		 * one line above.
-		 */
+        /*
+         * The recordkey was filled in the fileAccessor during execution of
+         * method readNextObject. The current accessor implementations are
+         * SequenceFileAccessor, LineBreakAccessor and AvroFileAccessor from
+         * HdfsSplittableDataAccessor and QuotedLineBreakAccessor from
+         * HdfsAtomicDataAccessor. For SequenceFileAccessor, LineBreakAccessor
+         * the recordkey is set, since it is returned by the
+         * SequenceFileRecordReader or LineRecordReader(for text file). But Avro
+         * files do not have keys, so the AvroRecordReader will not return a key
+         * and in this case recordkey will be null. If the user specified a
+         * recordkey attribute in the CREATE EXTERNAL TABLE statement and he
+         * reads from an AvroFile, we will throw an exception since the Avro
+         * file does not have keys In the future, additional implementations of
+         * FileAccessors will have to set recordkey during readNextObject().
+         * Otherwise it is null by default and we will throw an exception here,
+         * that is if we get here... a careful user will not specify recordkey
+         * in the CREATE EXTERNAL statement and then we will leave this function
+         * one line above.
+         */
         Object recordkey = onerow.getKey();
         if (recordkey == null) {
-            throw new NoSuchFieldException("Value for field \"recordkey\" was requested but the queried HDFS resource type does not support key");
+            throw new NoSuchFieldException(
+                    "Value for field \"recordkey\" was requested but the "
+                            + "queried HDFS resource type does not support key");
         }
 
         OneField oneField = new OneField();
@@ -120,11 +125,11 @@ public class RecordkeyAdapter {
     }
 
     /*
-	 * Extracts a java primitive type value from the recordkey. If the key is a
-	 * Writable implementation we extract the value as a Java primitive. If the
-	 * key is already a Java primitive we returned it as is If it is an unknown
-	 * type we throw an exception
-	 */
+     * Extracts a java primitive type value from the recordkey. If the key is a
+     * Writable implementation we extract the value as a Java primitive. If the
+     * key is already a Java primitive we returned it as is If it is an unknown
+     * type we throw an exception
+     */
     private Object extractVal(Object key) {
         if (extractor == null) {
             extractor = InitializeExtractor(key);
@@ -197,17 +202,19 @@ public class RecordkeyAdapter {
             return new ValExtractor() {
                 @Override
                 public Object get(Object key) {
-                    throw new UnsupportedOperationException("Unsupported recordkey data type " + key.getClass().getName());
+                    throw new UnsupportedOperationException(
+                            "Unsupported recordkey data type "
+                                    + key.getClass().getName());
                 }
             };
         }
     }
 
     /**
-     * Converts given key object to its matching Writable.
-     * Supported types: Integer, Byte, Boolean, Double, Float, Long, String.
-     * The type is only checked once based on the key, all consequent calls
-     * must be of the same type.
+     * Converts given key object to its matching Writable. Supported types:
+     * Integer, Byte, Boolean, Double, Float, Long, String. The type is only
+     * checked once based on the key, all consequent calls must be of the same
+     * type.
      *
      * @param key object to convert
      * @return Writable object matching given key
@@ -215,8 +222,8 @@ public class RecordkeyAdapter {
     public Writable convertKeyValue(Object key) {
         if (converter == null) {
             converter = initializeConverter(key);
-            Log.debug("converter initialized for type " + key.getClass() +
-                    " (key value: " + key + ")");
+            LOG.debug("converter initialized for type " + key.getClass()
+                    + " (key value: " + key + ")");
         }
 
         return converter.get(key);
@@ -277,7 +284,9 @@ public class RecordkeyAdapter {
             return new ValConverter() {
                 @Override
                 public Writable get(Object key) {
-                    throw new UnsupportedOperationException("Unsupported recordkey data type " + key.getClass().getName());
+                    throw new UnsupportedOperationException(
+                            "Unsupported recordkey data type "
+                                    + key.getClass().getName());
                 }
             };
         }

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-hdfs/src/test/java/org/apache/hawq/pxf/plugins/hdfs/StringPassResolverTest.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hdfs/src/test/java/org/apache/hawq/pxf/plugins/hdfs/StringPassResolverTest.java b/pxf/pxf-hdfs/src/test/java/org/apache/hawq/pxf/plugins/hdfs/StringPassResolverTest.java
index 826920f..d03cec8 100644
--- a/pxf/pxf-hdfs/src/test/java/org/apache/hawq/pxf/plugins/hdfs/StringPassResolverTest.java
+++ b/pxf/pxf-hdfs/src/test/java/org/apache/hawq/pxf/plugins/hdfs/StringPassResolverTest.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.plugins.hdfs;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -19,48 +19,59 @@ package org.apache.hawq.pxf.plugins.hdfs;
  * under the License.
  */
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Mockito.mock;
 
+import java.io.ByteArrayInputStream;
+import java.io.DataInputStream;
+import java.util.Arrays;
+import java.util.List;
+
+import org.apache.commons.logging.LogFactory;
 import org.apache.hawq.pxf.api.OneField;
 import org.apache.hawq.pxf.api.OneRow;
 import org.apache.hawq.pxf.api.OutputFormat;
 import org.apache.hawq.pxf.service.BridgeInputBuilder;
 import org.apache.hawq.pxf.service.io.Text;
 import org.apache.hawq.pxf.service.utilities.ProtocolData;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.powermock.api.mockito.PowerMockito;
 import org.powermock.core.classloader.annotations.PrepareForTest;
 import org.powermock.modules.junit4.PowerMockRunner;
 
-import java.io.ByteArrayInputStream;
-import java.io.DataInputStream;
-import java.util.Arrays;
-import java.util.List;
-
-import static org.junit.Assert.*;
-import static org.mockito.Matchers.any;
-import static org.mockito.Mockito.*;
-
 @RunWith(PowerMockRunner.class)
-@PrepareForTest({Text.class, BridgeInputBuilder.class, ProtocolData.class, LogFactory.class})
+@PrepareForTest({
+        Text.class,
+        BridgeInputBuilder.class,
+        ProtocolData.class,
+        LogFactory.class })
 public class StringPassResolverTest {
     ProtocolData mockProtocolData;
-    Log mockLog;
-    
+
     @Test
     /*
      * Test the setFields method: small \n terminated input
-	 */
+     */
     public void testSetFields() throws Exception {
         StringPassResolver resolver = buildResolver();
 
-        byte[] data = new byte[]{(int) 'a', (int) 'b', (int) 'c', (int) 'd', (int) '\n',
-                (int) 'n', (int) 'o', (int) '\n'};
-
-        DataInputStream inputStream = new DataInputStream(new ByteArrayInputStream(data));
-        BridgeInputBuilder inputBuilder = new BridgeInputBuilder(mockProtocolData);
+        byte[] data = new byte[] {
+                (int) 'a',
+                (int) 'b',
+                (int) 'c',
+                (int) 'd',
+                (int) '\n',
+                (int) 'n',
+                (int) 'o',
+                (int) '\n' };
+
+        DataInputStream inputStream = new DataInputStream(
+                new ByteArrayInputStream(data));
+        BridgeInputBuilder inputBuilder = new BridgeInputBuilder(
+                mockProtocolData);
         List<OneField> record = inputBuilder.makeInput(inputStream);
 
         OneRow oneRow = resolver.setFields(record);
@@ -74,7 +85,7 @@ public class StringPassResolverTest {
     @Test
     /*
      * Test the setFields method: input > buffer size, \n terminated
-	 */
+     */
     public void testSetFieldsBigArray() throws Exception {
 
         StringPassResolver resolver = buildResolver();
@@ -85,8 +96,10 @@ public class StringPassResolverTest {
         }
         bigArray[1999] = (byte) '\n';
 
-        DataInputStream inputStream = new DataInputStream(new ByteArrayInputStream(bigArray));
-        BridgeInputBuilder inputBuilder = new BridgeInputBuilder(mockProtocolData);
+        DataInputStream inputStream = new DataInputStream(
+                new ByteArrayInputStream(bigArray));
+        BridgeInputBuilder inputBuilder = new BridgeInputBuilder(
+                mockProtocolData);
         List<OneField> record = inputBuilder.makeInput(inputStream);
 
         OneRow oneRow = resolver.setFields(record);
@@ -97,60 +110,56 @@ public class StringPassResolverTest {
     @Test
     /*
      * Test the setFields method: input > buffer size, no \n
-	 */
+     */
     public void testSetFieldsBigArrayNoNewLine() throws Exception {
 
-    	PowerMockito.mockStatic(LogFactory.class);
-        mockLog = mock(Log.class);
-        PowerMockito.when(LogFactory.getLog(any(Class.class))).thenReturn(mockLog);
-
-    	StringPassResolver resolver = buildResolver();
+        StringPassResolver resolver = buildResolver();
 
         byte[] bigArray = new byte[2000];
         for (int i = 0; i < 2000; ++i) {
             bigArray[i] = (byte) (i % 10 + 60);
         }
 
-        DataInputStream inputStream = new DataInputStream(new ByteArrayInputStream(bigArray));
-        BridgeInputBuilder inputBuilder = new BridgeInputBuilder(mockProtocolData);
+        DataInputStream inputStream = new DataInputStream(
+                new ByteArrayInputStream(bigArray));
+        BridgeInputBuilder inputBuilder = new BridgeInputBuilder(
+                mockProtocolData);
         List<OneField> record = inputBuilder.makeInput(inputStream);
 
         OneRow oneRow = resolver.setFields(record);
 
         verifyOneRow(oneRow, bigArray);
-
-        //verify(mockLog, atLeastOnce()).info(anyString());
-        //Mockito.verify(mockLog).warn("Stream ended without line breaksdfljsldkj");
-        //verifyWarning();
     }
 
     @Test
     /*
-	 * Test the setFields method: empty stream (returns -1)
-	 */
+     * Test the setFields method: empty stream (returns -1)
+     */
     public void testSetFieldsEmptyStream() throws Exception {
 
         StringPassResolver resolver = buildResolver();
 
         byte[] empty = new byte[0];
 
-        DataInputStream inputStream = new DataInputStream(new ByteArrayInputStream(empty));
-        BridgeInputBuilder inputBuilder = new BridgeInputBuilder(mockProtocolData);
+        DataInputStream inputStream = new DataInputStream(
+                new ByteArrayInputStream(empty));
+        BridgeInputBuilder inputBuilder = new BridgeInputBuilder(
+                mockProtocolData);
         List<OneField> record = inputBuilder.makeInput(inputStream);
 
         OneRow oneRow = resolver.setFields(record);
 
         assertNull(oneRow);
     }
-	
-	/*
-	 * helpers functions
-	 */
-    private StringPassResolver buildResolver()
-            throws Exception {
- 
+
+    /*
+     * helpers functions
+     */
+    private StringPassResolver buildResolver() throws Exception {
+
         mockProtocolData = mock(ProtocolData.class);
-        PowerMockito.when(mockProtocolData.outputFormat()).thenReturn(OutputFormat.TEXT);
+        PowerMockito.when(mockProtocolData.outputFormat()).thenReturn(
+                OutputFormat.TEXT);
 
         return new StringPassResolver(mockProtocolData);
     }
@@ -162,8 +171,4 @@ public class StringPassResolverTest {
         assertEquals(result.length, expected.length);
         assertTrue(Arrays.equals(result, expected));
     }
-
-//    private void verifyWarning() {
-//        Mockito.verify(Log).warn("Stream ended without line break");
-//    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-hdfs/src/test/java/org/apache/hawq/pxf/plugins/hdfs/utilities/RecordkeyAdapterTest.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hdfs/src/test/java/org/apache/hawq/pxf/plugins/hdfs/utilities/RecordkeyAdapterTest.java b/pxf/pxf-hdfs/src/test/java/org/apache/hawq/pxf/plugins/hdfs/utilities/RecordkeyAdapterTest.java
index b0e220c..304f14f 100644
--- a/pxf/pxf-hdfs/src/test/java/org/apache/hawq/pxf/plugins/hdfs/utilities/RecordkeyAdapterTest.java
+++ b/pxf/pxf-hdfs/src/test/java/org/apache/hawq/pxf/plugins/hdfs/utilities/RecordkeyAdapterTest.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.plugins.hdfs.utilities;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -26,19 +26,20 @@ import org.apache.hadoop.io.*;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.mockito.Mockito;
-import org.powermock.api.mockito.PowerMockito;
 import org.powermock.core.classloader.annotations.PrepareForTest;
+import org.powermock.core.classloader.annotations.SuppressStaticInitializationFor;
 import org.powermock.modules.junit4.PowerMockRunner;
+import org.powermock.reflect.Whitebox;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.fail;
 import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.when;
 
 @RunWith(PowerMockRunner.class)
+@SuppressStaticInitializationFor("RecordkeyAdapter")
 @PrepareForTest({RecordkeyAdapter.class, LogFactory.class})
 public class RecordkeyAdapterTest {
-    Log Log;
+    Log LOG;
     RecordkeyAdapter recordkeyAdapter;
 
     /**
@@ -159,16 +160,15 @@ public class RecordkeyAdapterTest {
     }
 
     private void mockLog() {
-        PowerMockito.mockStatic(LogFactory.class);
-        Log = mock(Log.class);
-        when(LogFactory.getLog(RecordkeyAdapter.class)).thenReturn(Log);
+        LOG = mock(Log.class);
+        Whitebox.setInternalState(RecordkeyAdapter.class, LOG);
     }
 
     private void verifyLog(String msg) {
-        Mockito.verify(Log).debug(msg);
+        Mockito.verify(LOG).debug(msg);
     }
 
     private void verifyLogOnlyOnce() {
-        Mockito.verify(Log, Mockito.times(1)).debug(Mockito.any());
+        Mockito.verify(LOG, Mockito.times(1)).debug(Mockito.any());
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveAccessor.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveAccessor.java b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveAccessor.java
index c1b9c6b..ab40b3c 100644
--- a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveAccessor.java
+++ b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveAccessor.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.plugins.hive;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -19,7 +19,6 @@ package org.apache.hawq.pxf.plugins.hive;
  * under the License.
  */
 
-
 import org.apache.hawq.pxf.api.FilterParser;
 import org.apache.hawq.pxf.api.utilities.ColumnDescriptor;
 import org.apache.hawq.pxf.api.utilities.InputData;

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveDataFragmenter.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveDataFragmenter.java b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveDataFragmenter.java
index 79260bd..af1a666 100644
--- a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveDataFragmenter.java
+++ b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveDataFragmenter.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.plugins.hive;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -19,7 +19,6 @@ package org.apache.hawq.pxf.plugins.hive;
  * under the License.
  */
 
-
 import java.io.ByteArrayOutputStream;
 import java.util.List;
 import java.util.ListIterator;
@@ -55,8 +54,7 @@ import org.apache.hawq.pxf.plugins.hdfs.utilities.HdfsUtilities;
 import org.apache.hawq.pxf.plugins.hive.utilities.HiveUtilities;
 
 /**
- * Fragmenter class for HIVE tables.
- * <br>
+ * Fragmenter class for HIVE tables. <br>
  * Given a Hive table and its partitions divide the data into fragments (here a
  * data fragment is actually a HDFS file block) and return a list of them. Each
  * data fragment will contain the following information:
@@ -469,6 +467,7 @@ public class HiveDataFragmenter extends Fragmenter {
      */
     @Override
     public FragmentsStats getFragmentsStats() throws Exception {
-        throw new UnsupportedOperationException("ANALYZE for Hive plugin is not supported");
+        throw new UnsupportedOperationException(
+                "ANALYZE for Hive plugin is not supported");
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveResolver.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveResolver.java b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveResolver.java
index 103ae4d..59245d0 100644
--- a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveResolver.java
+++ b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveResolver.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.plugins.hive;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -19,7 +19,6 @@ package org.apache.hawq.pxf.plugins.hive;
  * under the License.
  */
 
-
 import org.apache.hawq.pxf.api.*;
 import org.apache.hawq.pxf.api.io.DataType;
 import org.apache.hawq.pxf.api.utilities.InputData;
@@ -144,7 +143,10 @@ public class HiveResolver extends Plugin implements ReadResolver {
                 : input.getUserProperty("MAPKEY_DELIM");
     }
 
-    /* Gets and init the deserializer for the records of this Hive data fragment. */
+    /*
+     * Gets and init the deserializer for the records of this Hive data
+     * fragment.
+     */
     void initSerde(InputData inputData) throws Exception {
         Properties serdeProperties;
 
@@ -588,10 +590,9 @@ public class HiveResolver extends Plugin implements ReadResolver {
 
         String userDelim = input.getUserProperty("DELIMITER");
 
-	if (userDelim == null) {
-            throw new IllegalArgumentException(
-                    "DELIMITER is a required option" ) ;
-	}
+        if (userDelim == null) {
+            throw new IllegalArgumentException("DELIMITER is a required option");
+        }
 
         final int VALID_LENGTH = 1;
         final int VALID_LENGTH_HEX = 4;

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/BridgeOutputBuilder.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/BridgeOutputBuilder.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/BridgeOutputBuilder.java
index 1ebb66d..c59fbea 100644
--- a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/BridgeOutputBuilder.java
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/BridgeOutputBuilder.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.service;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -19,7 +19,6 @@ package org.apache.hawq.pxf.service;
  * under the License.
  */
 
-
 import org.apache.hawq.pxf.api.BadRecordException;
 import org.apache.hawq.pxf.api.OneField;
 import org.apache.hawq.pxf.api.OutputFormat;
@@ -281,13 +280,13 @@ public class BridgeOutputBuilder {
     /**
      * Breaks raw bytes into lines. Used only for sampling.
      *
-     * When sampling a data source, we have to make sure that
-     * we deal with actual rows (lines) and not bigger chunks of
-     * data such as used by LineBreakAccessor for performance.
-     * The input byte array is broken into lines, each one stored in
-     * the outputList. In case the read data doesn't end with a line delimiter,
-     * which can happen when reading chunks of bytes, the partial line is
-     * stored separately, and is being completed when reading the next chunk of data.
+     * When sampling a data source, we have to make sure that we deal with
+     * actual rows (lines) and not bigger chunks of data such as used by
+     * LineBreakAccessor for performance. The input byte array is broken into
+     * lines, each one stored in the outputList. In case the read data doesn't
+     * end with a line delimiter, which can happen when reading chunks of bytes,
+     * the partial line is stored separately, and is being completed when
+     * reading the next chunk of data.
      *
      * @param val input raw data to break into lines
      */

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/FragmentsResponse.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/FragmentsResponse.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/FragmentsResponse.java
index 515ee61..d6efcae 100644
--- a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/FragmentsResponse.java
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/FragmentsResponse.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.service;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -19,7 +19,6 @@ package org.apache.hawq.pxf.service;
  * under the License.
  */
 
-
 import java.io.DataOutputStream;
 import java.io.IOException;
 import java.io.OutputStream;
@@ -35,14 +34,14 @@ import org.codehaus.jackson.map.ObjectMapper;
 import org.apache.hawq.pxf.api.Fragment;
 
 /**
- * Class for serializing fragments metadata in JSON format.
- * The class implements {@link StreamingOutput} so the serialization will be
- * done in a stream and not in one bulk, this in order to avoid running
- * out of memory when processing a lot of fragments.
+ * Class for serializing fragments metadata in JSON format. The class implements
+ * {@link StreamingOutput} so the serialization will be done in a stream and not
+ * in one bulk, this in order to avoid running out of memory when processing a
+ * lot of fragments.
  */
 public class FragmentsResponse implements StreamingOutput {
 
-    private static Log Log = LogFactory.getLog(FragmentsResponse.class);
+    private static final Log Log = LogFactory.getLog(FragmentsResponse.class);
 
     private List<Fragment> fragments;
 
@@ -56,10 +55,17 @@ public class FragmentsResponse implements StreamingOutput {
     }
 
     /**
-     * Serializes a fragments list in JSON,
-     * To be used as the result string for HAWQ.
-     * An example result is as follows:
-     * &lt;code&gt;{"PXFFragments":[{"replicas":["sdw1.corp.emc.com","sdw3.corp.emc.com","sdw8.corp.emc.com"],"sourceName":"text2.csv", "index":"0", "metadata":"&lt;base64 metadata for fragment&gt;", "userData":"&lt;data_specific_to_third_party_fragmenter&gt;"},{"replicas":["sdw2.corp.emc.com","sdw4.corp.emc.com","sdw5.corp.emc.com"],"sourceName":"text_data.csv","index":"0","metadata":"&lt;base64 metadata for fragment&gt;","userData":"&lt;data_specific_to_third_party_fragmenter&gt;"}]}&lt;/code&gt;
+     * Serializes a fragments list in JSON, To be used as the result string for
+     * HAWQ. An example result is as follows:
+     * <code>{"PXFFragments":[{"replicas":
+     * ["sdw1.corp.emc.com","sdw3.corp.emc.com","sdw8.corp.emc.com"],
+     * "sourceName":"text2.csv", "index":"0","metadata":"&lt;base64 metadata for fragment&gt;",
+     * "userData":"&lt;data_specific_to_third_party_fragmenter&gt;"
+     * },{"replicas":["sdw2.corp.emc.com","sdw4.corp.emc.com","sdw5.corp.emc.com"
+     * ],"sourceName":"text_data.csv","index":"0","metadata":
+     * "&lt;base64 metadata for fragment&gt;"
+     * ,"userData":"&lt;data_specific_to_third_party_fragmenter&gt;"
+     * }]}</code>
      */
     @Override
     public void write(OutputStream output) throws IOException,

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/FragmentsResponseFormatter.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/FragmentsResponseFormatter.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/FragmentsResponseFormatter.java
index 1148078..14e87f9 100644
--- a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/FragmentsResponseFormatter.java
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/FragmentsResponseFormatter.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.service;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -19,7 +19,6 @@ package org.apache.hawq.pxf.service;
  * under the License.
  */
 
-
 import org.apache.hawq.pxf.api.Fragment;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -30,23 +29,25 @@ import java.util.HashMap;
 import java.util.List;
 
 /**
- * Utility class for converting Fragments into a {@link FragmentsResponse}
- * that will serialize them into JSON format.
+ * Utility class for converting Fragments into a {@link FragmentsResponse} that
+ * will serialize them into JSON format.
  */
 public class FragmentsResponseFormatter {
 
-    private static Log LOG = LogFactory.getLog(FragmentsResponseFormatter.class);
+    private static final Log LOG = LogFactory.getLog(FragmentsResponseFormatter.class);
 
     /**
-     * Converts Fragments list to FragmentsResponse
-     * after replacing host name by their respective IPs.
+     * Converts Fragments list to FragmentsResponse after replacing host name by
+     * their respective IPs.
      *
      * @param fragments list of fragments
      * @param data data (e.g. path) related to the fragments
      * @return FragmentsResponse with given fragments
      * @throws UnknownHostException if converting host names to IP fails
      */
-    public static FragmentsResponse formatResponse(List<Fragment> fragments, String data) throws UnknownHostException   {
+    public static FragmentsResponse formatResponse(List<Fragment> fragments,
+                                                   String data)
+            throws UnknownHostException {
         /* print the raw fragment list to log when in debug level */
         if (LOG.isDebugEnabled()) {
             LOG.debug("Fragments before conversion to IP list:");
@@ -58,7 +59,7 @@ public class FragmentsResponseFormatter {
 
         updateFragmentIndex(fragments);
 
-	/* print the fragment list to log when in debug level */
+        /* print the fragment list to log when in debug level */
         if (LOG.isDebugEnabled()) {
             FragmentsResponseFormatter.printList(fragments, data);
         }
@@ -92,7 +93,8 @@ public class FragmentsResponseFormatter {
      *
      * @throws UnknownHostException if converting host name to IP fails
      */
-    private static void convertHostsToIPs(List<Fragment> fragments) throws UnknownHostException {
+    private static void convertHostsToIPs(List<Fragment> fragments)
+            throws UnknownHostException {
         /* host converted to IP map. Used to limit network calls. */
         HashMap<String, String> hostToIpMap = new HashMap<String, String>();
 
@@ -123,29 +125,30 @@ public class FragmentsResponseFormatter {
 
     /*
      * Converts a fragments list to a readable string and prints it to the log.
-     * Intended for debugging purposes only.
-     * 'datapath' is the data path part of the original URI (e.g., table name, *.csv, etc).
-	 */
+     * Intended for debugging purposes only. 'datapath' is the data path part of
+     * the original URI (e.g., table name, *.csv, etc).
+     */
     private static void printList(List<Fragment> fragments, String datapath) {
-        LOG.debug("List of " +
-                (fragments.isEmpty() ? "no" : fragments.size()) + "fragments for \"" +
-                 datapath + "\"");
+        LOG.debug("List of " + (fragments.isEmpty() ? "no" : fragments.size())
+                + "fragments for \"" + datapath + "\"");
 
         int i = 0;
         for (Fragment fragment : fragments) {
             StringBuilder result = new StringBuilder();
-            result.append("Fragment #").append(++i).append(": [")
-                .append("Source: ").append(fragment.getSourceName())
-                .append(", Index: ").append(fragment.getIndex())
-                .append(", Replicas:");
+            result.append("Fragment #").append(++i).append(": [").append(
+                    "Source: ").append(fragment.getSourceName()).append(
+                    ", Index: ").append(fragment.getIndex()).append(
+                    ", Replicas:");
             for (String host : fragment.getReplicas()) {
                 result.append(" ").append(host);
             }
 
-            result.append(", Metadata: ").append(new String(fragment.getMetadata()));
+            result.append(", Metadata: ").append(
+                    new String(fragment.getMetadata()));
 
             if (fragment.getUserData() != null) {
-                result.append(", User Data: ").append(new String(fragment.getUserData()));
+                result.append(", User Data: ").append(
+                        new String(fragment.getUserData()));
             }
             result.append("] ");
             LOG.debug(result);

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponseFormatter.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponseFormatter.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponseFormatter.java
index 4f710ed..eb83627 100644
--- a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponseFormatter.java
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponseFormatter.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.service;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -34,7 +34,7 @@ import org.apache.hawq.pxf.api.Metadata;
  */
 public class MetadataResponseFormatter {
 
-    private static Log Log = LogFactory.getLog(MetadataResponseFormatter.class);
+    private static final Log LOG = LogFactory.getLog(MetadataResponseFormatter.class);
 
     /**
      * Converts {@link Metadata} to JSON String format.
@@ -45,7 +45,7 @@ public class MetadataResponseFormatter {
      */
     public static String formatResponseString(Metadata metadata) throws IOException {
         /* print the metadata before serialization */
-        Log.debug(MetadataResponseFormatter.metadataToString(metadata));
+        LOG.debug(MetadataResponseFormatter.metadataToString(metadata));
 
         return MetadataResponseFormatter.metadataToJSON(metadata);
     }

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/ReadBridge.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/ReadBridge.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/ReadBridge.java
index ed764d6..0f3c968 100644
--- a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/ReadBridge.java
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/ReadBridge.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.service;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -19,7 +19,6 @@ package org.apache.hawq.pxf.service;
  * under the License.
  */
 
-
 import org.apache.hawq.pxf.api.BadRecordException;
 import org.apache.hawq.pxf.api.OneRow;
 import org.apache.hawq.pxf.api.ReadAccessor;
@@ -51,7 +50,7 @@ public class ReadBridge implements Bridge {
     BridgeOutputBuilder outputBuilder = null;
     LinkedList<Writable> outputQueue = null;
 
-    private static final Log Log = LogFactory.getLog(ReadBridge.class);
+    private static final Log LOG = LogFactory.getLog(ReadBridge.class);
 
     /**
      * C'tor - set the implementation of the bridge.
@@ -94,7 +93,7 @@ public class ReadBridge implements Bridge {
                     fileAccessor.closeForRead();
                     output = outputBuilder.getPartialLine();
                     if (output != null) {
-                        Log.warn("A partial record in the end of the fragment");
+                        LOG.warn("A partial record in the end of the fragment");
                     }
                     // if there is a partial line, return it now, otherwise it
                     // will return null
@@ -121,10 +120,10 @@ public class ReadBridge implements Bridge {
                 row_info = onerow.toString();
             }
             if (ex.getCause() != null) {
-                Log.debug("BadRecordException " + ex.getCause().toString()
+                LOG.debug("BadRecordException " + ex.getCause().toString()
                         + ": " + row_info);
             } else {
-                Log.debug(ex.toString() + ": " + row_info);
+                LOG.debug(ex.toString() + ": " + row_info);
             }
             output = outputBuilder.getErrorOutput(ex);
         } catch (Exception ex) {
@@ -174,7 +173,7 @@ public class ReadBridge implements Bridge {
     public boolean isThreadSafe() {
         boolean result = ((Plugin) fileAccessor).isThreadSafe()
                 && ((Plugin) fieldsResolver).isThreadSafe();
-        Log.debug("Bridge is " + (result ? "" : "not ") + "thread safe");
+        LOG.debug("Bridge is " + (result ? "" : "not ") + "thread safe");
         return result;
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/ReadSamplingBridge.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/ReadSamplingBridge.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/ReadSamplingBridge.java
index e7bfa4e..d5ae66a 100644
--- a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/ReadSamplingBridge.java
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/ReadSamplingBridge.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.service;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -19,7 +19,6 @@ package org.apache.hawq.pxf.service;
  * under the License.
  */
 
-
 import java.io.DataInputStream;
 import java.util.BitSet;
 
@@ -52,7 +51,7 @@ public class ReadSamplingBridge implements Bridge {
     int sampleSize;
     int curIndex;
 
-    static private Log Log = LogFactory.getLog(ReadSamplingBridge.class);;
+    private static final Log LOG = LogFactory.getLog(ReadSamplingBridge.class);
 
     /**
      * C'tor - set the implementation of the bridge.
@@ -86,7 +85,7 @@ public class ReadSamplingBridge implements Bridge {
             bitSetSize /= 10;
             sampleSize /= 10;
         }
-        Log.debug("bit set size = " + bitSetSize + " sample size = "
+        LOG.debug("bit set size = " + bitSetSize + " sample size = "
                 + sampleSize);
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/127cac3e/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/io/GPDBWritable.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/io/GPDBWritable.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/io/GPDBWritable.java
index da541b2..5bc26f1 100644
--- a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/io/GPDBWritable.java
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/io/GPDBWritable.java
@@ -8,9 +8,9 @@ package org.apache.hawq.pxf.service.io;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -46,7 +46,7 @@ public class GPDBWritable implements Writable {
      * For var length type, col val is <4 byte length><payload val>
 	 */
 
-    private static Log Log = LogFactory.getLog(GPDBWritable.class);
+    private static final Log LOG = LogFactory.getLog(GPDBWritable.class);
     private static final int EOF = -1;
 
     /*
@@ -156,11 +156,11 @@ public class GPDBWritable implements Writable {
         try {
             pktlen = in.readInt();
         } catch (EOFException e) {
-            Log.debug("Reached end of stream (EOFException)");
+            LOG.debug("Reached end of stream (EOFException)");
             return EOF;
         }
         if (pktlen == EOF) {
-            Log.debug("Reached end of stream (returned -1)");
+            LOG.debug("Reached end of stream (returned -1)");
         }
 
         return pktlen;