You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hawq.apache.org by sh...@apache.org on 2015/11/03 01:36:05 UTC

[02/15] incubator-hawq git commit: HAWQ-45. PXF package namespace refactor

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/f053e053/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/AnalyzerResource.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/AnalyzerResource.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/AnalyzerResource.java
new file mode 100644
index 0000000..9d678fa
--- /dev/null
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/AnalyzerResource.java
@@ -0,0 +1,102 @@
+package org.apache.hawq.pxf.service.rest;
+
+import org.apache.hawq.pxf.api.Analyzer;
+import org.apache.hawq.pxf.api.AnalyzerStats;
+import org.apache.hawq.pxf.service.AnalyzerFactory;
+import org.apache.hawq.pxf.service.utilities.ProtocolData;
+import org.apache.hawq.pxf.service.utilities.SecuredHDFS;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+import javax.servlet.ServletContext;
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import java.io.IOException;
+import java.util.Map;
+
+/*
+ * Class enhances the API of the WEBHDFS REST server.
+ * Returns the data fragments that a data resource is made of, enabling parallel processing of the data resource.
+ * Example for querying API ANALYZER from a web client
+ * curl -i "http://localhost:50070/pxf/v2/Analyzer/getEstimatedStats?path=/dir1/dir2/*txt"
+ * /pxf/ is made part of the path when there is a webapp by that name in tcServer.
+ */
+@Path("/" + Version.PXF_PROTOCOL_VERSION + "/Analyzer/")
+public class AnalyzerResource extends RestResource {
+    private Log Log;
+
+
+    public AnalyzerResource() throws IOException {
+        Log = LogFactory.getLog(AnalyzerResource.class);
+    }
+
+    /*
+     * Returns estimated statistics for the given path (data source).
+     * Example for querying API ANALYZER from a web client
+     * curl -i "http://localhost:50070/pxf/v2/Analyzer/getEstimatedStats?path=/dir1/dir2/*txt"
+     * A default answer, unless an analyzer implements getEstimatedStats, would be:
+     * {"PXFDataSourceStats":[{"blockSize":67108864,"numberOfBlocks":1000,"numberOfTuples":1000000}]}
+     * Currently only HDFS is implemented to calculate the block size and block number,
+     * and returns -1 for number of tuples.
+     * Example:
+     * {"PXFDataSourceStats":[{"blockSize":67108864,"numberOfBlocks":3,"numberOfTuples":-1}]}
+     *
+     * @param servletContext Servlet context contains attributes required by SecuredHDFS
+     * @param headers Holds HTTP headers from request
+     * @param path Holds URI path option used in this request
+     */
+    @GET
+    @Path("getEstimatedStats")
+    @Produces("application/json")
+    public Response getEstimatedStats(@Context ServletContext servletContext,
+                                      @Context final HttpHeaders headers,
+                                      @QueryParam("path") String path) throws Exception {
+
+        if (Log.isDebugEnabled()) {
+            StringBuilder startmsg = new StringBuilder("ANALYZER/getEstimatedStats started for path \"" + path + "\"");
+            if (headers != null) {
+                for (String header : headers.getRequestHeaders().keySet()) {
+                    startmsg.append(" Header: ").append(header).append(" Value: ").append(headers.getRequestHeader(header));
+                }
+            }
+            Log.debug(startmsg);
+        }
+
+		/* Convert headers into a regular map */
+        Map<String, String> params = convertToCaseInsensitiveMap(headers.getRequestHeaders());
+
+        /* Store protocol level properties and verify */
+        final ProtocolData protData = new ProtocolData(params);
+        SecuredHDFS.verifyToken(protData, servletContext);
+        
+        /*
+         * Analyzer is a special case in which it is hard to tell if user didn't
+         * specify one, or specified a profile that doesn't include one, or it's
+         * an actual protocol violation. Since we can only test protocol level
+         * logic, we assume (like before) that it's a user error, which is the
+         * case in most likelihood. When analyzer module is removed in the near
+         * future, this assumption will go away with it.
+         */
+        if (protData.getAnalyzer() == null) {
+			throw new IllegalArgumentException(
+					"PXF 'Analyzer' class was not found. Please supply it in the LOCATION clause or use it in a PXF profile in order to run ANALYZE on this table");
+        }
+        
+        /* Create an analyzer instance with API level parameters */
+        final Analyzer analyzer = AnalyzerFactory.create(protData);
+
+		/*
+         * Function queries the pxf Analyzer for the data fragments of the resource
+		 * The fragments are returned in a string formatted in JSON	 
+		 */
+        String jsonOutput = AnalyzerStats.dataToJSON(analyzer.getEstimatedStats(path));
+
+        return Response.ok(jsonOutput, MediaType.APPLICATION_JSON_TYPE).build();
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/f053e053/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/BridgeResource.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/BridgeResource.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/BridgeResource.java
new file mode 100644
index 0000000..588845d
--- /dev/null
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/BridgeResource.java
@@ -0,0 +1,151 @@
+package org.apache.hawq.pxf.service.rest;
+
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.util.Map;
+import java.util.concurrent.locks.ReentrantLock;
+
+import javax.servlet.ServletContext;
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.StreamingOutput;
+
+import org.apache.catalina.connector.ClientAbortException;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+import org.apache.hawq.pxf.service.Bridge;
+import org.apache.hawq.pxf.service.ReadBridge;
+import org.apache.hawq.pxf.service.io.Writable;
+import org.apache.hawq.pxf.service.utilities.ProtocolData;
+import org.apache.hawq.pxf.service.utilities.SecuredHDFS;
+
+/*
+ * This class handles the subpath /<version>/Bridge/ of this
+ * REST component
+ */
+@Path("/" + Version.PXF_PROTOCOL_VERSION + "/Bridge/")
+public class BridgeResource extends RestResource {
+
+    private static Log Log = LogFactory.getLog(BridgeResource.class);
+    /**
+     * Lock is needed here in the case of a non-thread-safe plugin.
+     * Using synchronized methods is not enough because the bridge work
+     * is called by jetty ({@link StreamingOutput}), after we are getting
+     * out of this class's context.
+     * <p/>
+     * BRIDGE_LOCK is accessed through lock() and unlock() functions, based on the
+     * isThreadSafe parameter that is determined by the bridge.
+     */
+    private static final ReentrantLock BRIDGE_LOCK = new ReentrantLock();
+
+    public BridgeResource() {
+    }
+
+    /*
+     * Used to be HDFSReader. Creates a bridge instance and iterates over
+     * its records, printing it out to outgoing stream.
+     * Outputs GPDBWritable.
+     *
+     * Parameters come through HTTP header.
+     *
+     * @param servletContext Servlet context contains attributes required by SecuredHDFS
+     * @param headers Holds HTTP headers from request
+     */
+    @GET
+    @Produces(MediaType.APPLICATION_OCTET_STREAM)
+    public Response read(@Context final ServletContext servletContext,
+                         @Context HttpHeaders headers) throws Exception {
+        // Convert headers into a regular map
+        Map<String, String> params = convertToCaseInsensitiveMap(headers.getRequestHeaders());
+
+        Log.debug("started with parameters: " + params);
+
+        ProtocolData protData = new ProtocolData(params);
+        SecuredHDFS.verifyToken(protData, servletContext);
+        Bridge bridge = new ReadBridge(protData);
+        String dataDir = protData.getDataSource();
+        // THREAD-SAFE parameter has precedence
+        boolean isThreadSafe = protData.isThreadSafe() && bridge.isThreadSafe();
+        Log.debug("Request for " + dataDir + " will be handled " +
+                (isThreadSafe ? "without" : "with") + " synchronization");
+
+        return readResponse(bridge, protData, isThreadSafe);
+    }
+
+    Response readResponse(final Bridge bridge, ProtocolData protData, final boolean threadSafe) throws Exception {
+        final int fragment = protData.getDataFragment();
+        final String dataDir = protData.getDataSource();
+
+        // Creating an internal streaming class
+        // which will iterate the records and put them on the
+        // output stream
+        final StreamingOutput streaming = new StreamingOutput() {
+            @Override
+            public void write(final OutputStream out) throws IOException, WebApplicationException {
+                long recordCount = 0;
+
+                if (!threadSafe) {
+                    lock(dataDir);
+                }
+                try {
+
+                    if (!bridge.beginIteration()) {
+                        return;
+                    }
+
+                    Writable record;
+                    DataOutputStream dos = new DataOutputStream(out);
+                    Log.debug("Starting streaming fragment " + fragment + " of resource " + dataDir);
+                    while ((record = bridge.getNext()) != null) {
+						record.write(dos);
+                        ++recordCount;
+                    }
+                    Log.debug("Finished streaming fragment " + fragment + " of resource " + dataDir + ", " + recordCount + " records.");
+                } catch (ClientAbortException e) {
+                    // Occurs whenever client (HAWQ) decides the end the connection
+                    Log.error("Remote connection closed by HAWQ", e);
+                } catch (Exception e) {
+                    Log.error("Exception thrown when streaming", e);
+                    throw new IOException(e.getMessage());
+                } finally {
+                    Log.debug("Stopped streaming fragment " + fragment + " of resource " + dataDir + ", " + recordCount + " records.");
+                    if (!threadSafe) {
+                        unlock(dataDir);
+                    }
+                }
+            }
+        };
+
+        return Response.ok(streaming, MediaType.APPLICATION_OCTET_STREAM).build();
+    }
+
+    /**
+     * Lock BRIDGE_LOCK
+     *
+     * @param path path for the request, used for logging.
+     */
+    private void lock(String path) {
+        Log.trace("Locking BridgeResource for " + path);
+        BRIDGE_LOCK.lock();
+        Log.trace("Locked BridgeResource for " + path);
+    }
+
+    /**
+     * Unlock BRIDGE_LOCK
+     *
+     * @param path path for the request, used for logging.
+     */
+    private void unlock(String path) {
+        Log.trace("Unlocking BridgeResource for " + path);
+        BRIDGE_LOCK.unlock();
+        Log.trace("Unlocked BridgeResource for " + path);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/f053e053/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/ClusterNodesResource.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/ClusterNodesResource.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/ClusterNodesResource.java
new file mode 100644
index 0000000..6421ef0
--- /dev/null
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/ClusterNodesResource.java
@@ -0,0 +1,117 @@
+package org.apache.hawq.pxf.service.rest;
+
+import org.apache.catalina.connector.ClientAbortException;
+import org.apache.commons.lang.StringUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.hdfs.DistributedFileSystem;
+import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
+import org.apache.hadoop.hdfs.protocol.HdfsConstants.DatanodeReportType;
+
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import java.io.IOException;
+
+/*
+ * Class enhances the API of the HBASE rest server.
+ * Example for querying API getClusterNodesInfo from a web client
+ * curl "http://localhost:50070/pxf/v2/HadoopCluster/getNodesInfo"
+ * /pxf/ is made part of the path when there is a webapp by that name in tcServer.
+ */
+@Path("/" + Version.PXF_PROTOCOL_VERSION + "/HadoopCluster/")
+public class ClusterNodesResource {
+    private Log Log;
+
+    public ClusterNodesResource() throws IOException {
+        Log = LogFactory.getLog(ClusterNodesResource.class);
+    }
+
+
+    /*
+     * Function queries the Hadoop namenode with the getDataNodeStats API
+     * It gets the host's IP and REST port of every HDFS data node in the
+     * cluster. Then, it packs the results in JSON format and writes to the
+     * HTTP response stream.
+     * Response Examples:
+     * a. When there are no datanodes - getDataNodeStats returns an empty array
+     *    {"regions":[]}
+     * b. When there are datanodes
+     *    {"regions":[{"host":"1.2.3.1","port":50075},{"host":"1.2.3.2","port":50075}]}
+     */
+    @GET
+    @Path("getNodesInfo")
+    @Produces("application/json")
+    public Response read() throws Exception {
+        Log.debug("getNodesInfo started");
+        StringBuilder jsonOutput = new StringBuilder("{\"regions\":[");
+        try {
+            /* 1. Initialize the HADOOP client side API for a distributed file system */
+            Configuration conf = new Configuration();
+            FileSystem fs = FileSystem.get(conf);
+            DistributedFileSystem dfs = (DistributedFileSystem) fs;
+
+			/* 2. Query the namenode for the datanodes info.  
+			 *    Only live nodes are returned  - in accordance with the results returned by 
+			 *    org.apache.hadoop.hdfs.tools.DFSAdmin#report().
+			 */
+            DatanodeInfo[] liveNodes = dfs.getDataNodeStats(DatanodeReportType.LIVE);
+
+			/* 3. Pack the datanodes info in a JSON text format and write it 
+             *    to the HTTP output stream.
+			 */
+            String prefix = "";
+            for (DatanodeInfo node : liveNodes) {
+                verifyNode(node);
+                jsonOutput.append(prefix).append(writeNode(node)); // write one node to the HTTP stream
+                prefix = ",";
+            }
+            jsonOutput.append("]}");
+            Log.debug("getNodesCluster output: " + jsonOutput);
+        } catch (NodeDataException e) {
+            Log.error("Nodes verification failed", e);
+            throw e;
+        } catch (ClientAbortException e) {
+            Log.error("Remote connection closed by HAWQ", e);
+            throw e;
+        } catch (java.io.IOException e) {
+            Log.error("Unhandled exception thrown", e);
+            throw e;
+        }
+
+        return Response.ok(jsonOutput.toString(), MediaType.APPLICATION_JSON_TYPE).build();
+    }
+
+    private class NodeDataException extends java.io.IOException {
+
+        /**
+         *
+         */
+        private static final long serialVersionUID = 1L;
+
+        public NodeDataException(String paramString) {
+            super(paramString);
+        }
+    }
+
+    private void verifyNode(DatanodeInfo node) throws NodeDataException {
+        int port = node.getInfoPort();
+        String ip = node.getIpAddr();
+
+        if (StringUtils.isEmpty(ip)) {
+            throw new NodeDataException("Invalid IP: " + ip + " (Node " + node + ")");
+        }
+
+        if (port <= 0) {
+            throw new NodeDataException("Invalid port: " + port + " (Node " + node + ")");
+        }
+    }
+
+    String writeNode(DatanodeInfo node) throws java.io.IOException {
+        return "{\"host\":\"" + node.getIpAddr() + "\",\"port\":" + node.getInfoPort() + "}";
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/f053e053/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/FragmenterResource.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/FragmenterResource.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/FragmenterResource.java
new file mode 100644
index 0000000..258f8c2
--- /dev/null
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/FragmenterResource.java
@@ -0,0 +1,84 @@
+package org.apache.hawq.pxf.service.rest;
+
+import org.apache.hawq.pxf.api.Fragment;
+import org.apache.hawq.pxf.api.Fragmenter;
+import org.apache.hawq.pxf.service.FragmenterFactory;
+import org.apache.hawq.pxf.service.FragmentsResponse;
+import org.apache.hawq.pxf.service.FragmentsResponseFormatter;
+import org.apache.hawq.pxf.service.utilities.ProtocolData;
+import org.apache.hawq.pxf.service.utilities.SecuredHDFS;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+import javax.servlet.ServletContext;
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+
+/*
+ * Class enhances the API of the WEBHDFS REST server.
+ * Returns the data fragments that a data resource is made of, enabling parallel processing of the data resource.
+ * Example for querying API FRAGMENTER from a web client
+ * curl -i "http://localhost:50070/pxf/v2/Fragmenter/getFragments?path=/dir1/dir2/*txt"
+ * /pxf/ is made part of the path when there is a webapp by that name in tcServer.
+ */
+@Path("/" + Version.PXF_PROTOCOL_VERSION + "/Fragmenter/")
+public class FragmenterResource extends RestResource {
+    private Log Log;
+
+    public FragmenterResource() throws IOException {
+        Log = LogFactory.getLog(FragmenterResource.class);
+    }
+
+    /*
+     * The function is called when http://nn:port/pxf/vx/Fragmenter/getFragments?path=...
+     * is used
+     *
+     * @param servletContext Servlet context contains attributes required by SecuredHDFS
+     * @param headers Holds HTTP headers from request
+     * @param path Holds URI path option used in this request
+     */
+    @GET
+    @Path("getFragments")
+    @Produces("application/json")
+    public Response getFragments(@Context final ServletContext servletContext,
+            @Context final HttpHeaders headers,
+            @QueryParam("path") final String path) throws Exception {
+
+        if (Log.isDebugEnabled()) {
+            StringBuilder startMsg = new StringBuilder("FRAGMENTER started for path \"" + path + "\"");
+            for (String header : headers.getRequestHeaders().keySet()) {
+                startMsg.append(" Header: ").append(header).append(" Value: ").append(headers.getRequestHeader(header));
+            }
+            Log.debug(startMsg);
+        }
+
+        /* Convert headers into a case-insensitive regular map */
+        Map<String, String> params = convertToCaseInsensitiveMap(headers.getRequestHeaders());
+
+        /* Store protocol level properties and verify */
+        ProtocolData protData = new ProtocolData(params);
+        if (protData.getFragmenter() == null) {
+            protData.protocolViolation("fragmenter");
+        }
+        SecuredHDFS.verifyToken(protData, servletContext);
+
+        /* Create a fragmenter instance with API level parameters */
+        final Fragmenter fragmenter = FragmenterFactory.create(protData);
+
+        List<Fragment> fragments = fragmenter.getFragments();
+        FragmentsResponse fragmentsResponse = FragmentsResponseFormatter.formatResponse(fragments, path);
+
+        return Response.ok(fragmentsResponse, MediaType.APPLICATION_JSON_TYPE).build();
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/f053e053/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/InvalidPathResource.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/InvalidPathResource.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/InvalidPathResource.java
new file mode 100644
index 0000000..2316bc7
--- /dev/null
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/InvalidPathResource.java
@@ -0,0 +1,129 @@
+package org.apache.hawq.pxf.service.rest;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+import javax.ws.rs.GET;
+import javax.ws.rs.POST;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.Response.ResponseBuilder;
+import javax.ws.rs.core.UriInfo;
+import java.io.IOException;
+
+
+class Version {
+    final static String PXF_PROTOCOL_VERSION = "v13";
+}
+
+/**
+ * Class for catching paths that are not defined by other resources.
+ * NOTE: This resource must be accessible without any security checks
+ * as it is used to verify proper load of the PXF webapp.
+ *
+ * For each path, the version is compared to the current version PXF_VERSION.
+ * The expected format of a path is "{@code http://<host>:<port>/pxf/<version>/<rest of path>}"
+ *
+ * The returned value is always a Server Error code (500).
+ * If the version is different than the current version, an appropriate error is returned with version details.
+ * Otherwise, an error about unknown path is returned.
+ */
+@Path("/")
+public class InvalidPathResource {
+    @Context
+    UriInfo rootUri;
+
+    private Log Log;
+
+    public InvalidPathResource() throws IOException {
+        super();
+        Log = LogFactory.getLog(InvalidPathResource.class);
+    }
+
+    /*
+     * Catch path /pxf/
+     */
+    @GET
+    @Path("/")
+    public Response noPathGet() throws Exception {
+        return noPath();
+    }
+
+    @POST
+    @Path("/")
+    public Response noPathPost() throws Exception {
+        return noPath();
+    }
+
+    private Response noPath() throws Exception {
+        String errmsg = "Unknown path " + rootUri.getAbsolutePath();
+        return sendErrorMessage(errmsg);
+    }
+
+    /*
+     * Catch paths of pattern /pxf/*
+     */
+    @GET
+    @Path("/{path:.*}")
+    public Response wrongPathGet(@PathParam("path") String path) throws Exception {
+        return wrongPath(path);
+    }
+
+    /*
+     * Catch paths of pattern /pxf/*
+     */
+    @POST
+    @Path("/{path:.*}")
+    public Response wrongPathPost(@PathParam("path") String path) throws Exception {
+        return wrongPath(path);
+    }
+
+
+    private Response wrongPath(String path) throws Exception {
+
+        String errmsg;
+        String version = parseVersion(path);
+
+        Log.debug("REST request: " + rootUri.getAbsolutePath() + ". " +
+                "Version " + version + ", supported version is " + Version.PXF_PROTOCOL_VERSION);
+
+        if (version.equals(Version.PXF_PROTOCOL_VERSION)) {
+            errmsg = "Unknown path " + rootUri.getAbsolutePath();
+        } else {
+            errmsg = "Wrong version " + version + ", supported version is " + Version.PXF_PROTOCOL_VERSION;
+        }
+
+        return sendErrorMessage(errmsg);
+    }
+
+    /*
+     * Return error message
+     */
+    private Response sendErrorMessage(String message) {
+        ResponseBuilder b = Response.serverError();
+        b.entity(message);
+        b.type(MediaType.TEXT_PLAIN_TYPE);
+        return b.build();
+    }
+
+    /*
+     * Parse the version part from the path.
+     * The the absolute path is
+     * http://<host>:<port>/pxf/<version>/<rest of path>
+     *
+     * path - the path part after /pxf/
+     * returns the first element after /pxf/
+     */
+    private String parseVersion(String path) {
+
+        int slash = path.indexOf('/');
+        if (slash == -1) {
+            return path;
+        }
+
+        return path.substring(0, slash);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/f053e053/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/MetadataResource.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/MetadataResource.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/MetadataResource.java
new file mode 100644
index 0000000..42a2466
--- /dev/null
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/MetadataResource.java
@@ -0,0 +1,91 @@
+package org.apache.hawq.pxf.service.rest;
+
+import java.io.IOException;
+
+import javax.servlet.ServletContext;
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+
+import org.apache.catalina.connector.ClientAbortException;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+import org.apache.hawq.pxf.api.Metadata;
+import org.apache.hawq.pxf.api.MetadataFetcher;
+import org.apache.hawq.pxf.service.MetadataFetcherFactory;
+import org.apache.hawq.pxf.service.MetadataResponseFormatter;
+
+/**
+ * Class enhances the API of the WEBHDFS REST server.
+ * Returns the metadata of a given hcatalog table.
+ * <br>
+ * Example for querying API FRAGMENTER from a web client:<br>
+ * <code>curl -i "http://localhost:51200/pxf/v13/Metadata/getTableMetadata?table=t1"</code><br>
+ * /pxf/ is made part of the path when there is a webapp by that name in tomcat.
+ */
+@Path("/" + Version.PXF_PROTOCOL_VERSION + "/Metadata/")
+public class MetadataResource extends RestResource {
+    private Log Log;
+
+    public MetadataResource() throws IOException {
+        Log = LogFactory.getLog(MetadataResource.class);
+    }
+
+    /**
+     * This function queries the HiveMetaStore to get the given table's metadata:
+     * Table name, field names, field types.
+     * The types are converted from HCatalog types to HAWQ types.
+     * Supported HCatalog types:
+     * TINYINT, SMALLINT, INT, BIGINT, BOOLEAN, FLOAT, DOUBLE,
+     * STRING, BINARY, TIMESTAMP, DATE, DECIMAL, VARCHAR, CHAR.
+     * <br>
+     * Unsupported types result in an error.
+     * <br>
+     * Response Examples:<br>
+     * For a table <code>default.t1</code> with 2 fields (a int, b float) will be returned as:
+     *      <code>{"PXFMetadata":[{"table":{"dbName":"default","tableName":"t1"},"fields":[{"name":"a","type":"int"},{"name":"b","type":"float"}]}]}</code>
+     *
+     * @param servletContext servlet context
+     * @param headers http headers
+     * @param table HCatalog table name
+     * @return JSON formatted response with metadata for given table
+     * @throws Exception if connection to Hcatalog failed, table didn't exist or its type or fields are not supported
+     */
+    @GET
+    @Path("getTableMetadata")
+    @Produces("application/json")
+    public Response read(@Context final ServletContext servletContext,
+            			 @Context final HttpHeaders headers,
+            			 @QueryParam("table") final String table) throws Exception {
+        Log.debug("getTableMetadata started");
+        String jsonOutput;
+        try {
+        	// 1. start MetadataFetcher
+        	MetadataFetcher metadataFetcher =
+        	        MetadataFetcherFactory.create("org.apache.hawq.pxf.plugins.hive.HiveMetadataFetcher"); //TODO: nhorn - 09-03-15 - pass as param
+
+        	// 2. get Metadata
+        	Metadata metadata = metadataFetcher.getTableMetadata(table);
+
+        	// 3. serialize to JSON
+        	jsonOutput = MetadataResponseFormatter.formatResponseString(metadata);
+
+            Log.debug("getTableMetadata output: " + jsonOutput);
+
+        } catch (ClientAbortException e) {
+            Log.error("Remote connection closed by HAWQ", e);
+            throw e;
+        } catch (java.io.IOException e) {
+            Log.error("Unhandled exception thrown", e);
+            throw e;
+        }
+
+        return Response.ok(jsonOutput, MediaType.APPLICATION_JSON_TYPE).build();
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/f053e053/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/RestResource.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/RestResource.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/RestResource.java
new file mode 100644
index 0000000..e98607d
--- /dev/null
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/RestResource.java
@@ -0,0 +1,48 @@
+package org.apache.hawq.pxf.service.rest;
+
+import javax.ws.rs.core.MultivaluedMap;
+
+import org.apache.commons.codec.CharEncoding;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+import java.io.UnsupportedEncodingException;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+
+/**
+ * Super of all PXF REST classes
+ */
+public abstract class RestResource {
+
+    private static Log Log = LogFactory.getLog(RestResource.class);
+
+    /**
+     * Converts the request headers multivalued map to a case-insensitive regular map
+     * by taking only first values and storing them in a CASE_INSENSITIVE_ORDER TreeMap.
+     * All values are converted from ISO_8859_1 (ISO-LATIN-1) to UTF_8.
+     *
+     * @param requestHeaders request headers multi map.
+     * @return a regular case-insensitive map.
+     * @throws UnsupportedEncodingException if the named charsets ISO_8859_1 and UTF_8 are not supported
+     */
+    public Map<String, String> convertToCaseInsensitiveMap(MultivaluedMap<String, String> requestHeaders)
+            throws UnsupportedEncodingException {
+        Map<String, String> result = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
+        for (Map.Entry<String, List<String>> entry : requestHeaders.entrySet()) {
+            String key = entry.getKey();
+            List<String> values = entry.getValue();
+            if (values != null) {
+                String value = values.get(0);
+                if (value != null) {
+                    // converting to value UTF-8 encoding
+                    value = new String(value.getBytes(CharEncoding.ISO_8859_1), CharEncoding.UTF_8);
+                    Log.trace("key: " + key + ". value: " + value);
+                    result.put(key, value.replace("\\\"", "\""));
+                }
+            }
+        }
+        return result;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/f053e053/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/ServletLifecycleListener.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/ServletLifecycleListener.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/ServletLifecycleListener.java
new file mode 100644
index 0000000..86bf354
--- /dev/null
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/ServletLifecycleListener.java
@@ -0,0 +1,43 @@
+package org.apache.hawq.pxf.service.rest;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+import javax.servlet.ServletContextListener;
+import javax.servlet.ServletContextEvent;
+
+import org.apache.hawq.pxf.service.utilities.Log4jConfigure;
+import org.apache.hawq.pxf.service.utilities.SecureLogin;
+
+/** 
+ * Listener on lifecycle events of our webapp
+ */
+public class ServletLifecycleListener implements ServletContextListener {
+
+    private static Log LOG = LogFactory.getLog(ServletContextListener.class);
+
+	/**
+	 * Called after the webapp has been initialized.
+	 * 
+	 * 1. Initializes log4j.
+	 * 2. Initiates a Kerberos login when Hadoop security is on.
+	 */
+	@Override
+	public void contextInitialized(ServletContextEvent event) {	
+		// 1. Initialize log4j:
+		Log4jConfigure.configure(event);
+		
+		LOG.info("webapp initialized");
+		
+		// 2. Initiate secure login
+		SecureLogin.login();
+	}
+
+	/**
+	 * Called before the webapp is about to go down
+	 */
+	@Override
+	public void contextDestroyed(ServletContextEvent event) {
+		LOG.info("webapp about to go down");
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/f053e053/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/WritableResource.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/WritableResource.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/WritableResource.java
new file mode 100644
index 0000000..f059eec
--- /dev/null
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/rest/WritableResource.java
@@ -0,0 +1,151 @@
+package org.apache.hawq.pxf.service.rest;
+
+import java.io.DataInputStream;
+import java.io.InputStream;
+import java.util.Map;
+
+import javax.servlet.ServletContext;
+import javax.ws.rs.Consumes;
+import javax.ws.rs.POST;
+import javax.ws.rs.Path;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+
+import org.apache.catalina.connector.ClientAbortException;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+import org.apache.hawq.pxf.service.Bridge;
+import org.apache.hawq.pxf.service.WriteBridge;
+import org.apache.hawq.pxf.service.utilities.ProtocolData;
+import org.apache.hawq.pxf.service.utilities.SecuredHDFS;
+
+/*
+ * Running this resource manually:
+ *
+ * run: 
+ 	curl -i -X post "http://localhost:50070/pxf/v5w/Writable/stream?path=/data/curl/curl`date \"+%h%d_%H%M%s\"`" \
+ 	--header "X-GP-Accessor: TextFileWAccessor" \
+ 	--header "X-GP-Resolver: TextWResolver" \
+ 	--header "Content-Type:application/octet-stream" \
+ 	--header "Expect: 100-continue" \
+  	--header "X-GP-ALIGNMENT: 4" \
+ 	--header "X-GP-SEGMENT-ID: 0" \
+ 	--header "X-GP-SEGMENT-COUNT: 3" \
+ 	--header "X-GP-HAS-FILTER: 0" \
+ 	--header "X-GP-FORMAT: TEXT" \
+ 	--header "X-GP-URI: pxf://localhost:50070/data/curl/?Accessor=TextFileWAccessor&Resolver=TextWResolver" \
+ 	--header "X-GP-URL-HOST: localhost" \
+ 	--header "X-GP-URL-PORT: 50070" \
+ 	--header "X-GP-ATTRS: 0" \
+ 	--header "X-GP-DATA-DIR: data/curl/" \
+ 	  -d "data111" -d "data222"
+
+ * 	result:
+
+  	HTTP/1.1 200 OK
+	Content-Type: text/plain;charset=UTF-8
+	Content-Type: text/plain
+	Transfer-Encoding: chunked
+	Server: Jetty(7.6.10.v20130312)
+
+	wrote 15 bytes to curlAug11_17271376231245
+
+	file content:
+	bin/hdfs dfs -cat /data/curl/*45 
+	data111&data222
+
+ */
+
+
+/*
+ * This class handles the subpath /<version>/Writable/ of this
+ * REST component
+ */
+@Path("/" + Version.PXF_PROTOCOL_VERSION + "/Writable/")
+public class WritableResource extends RestResource{
+    private static final Log LOG = LogFactory.getLog(WritableResource.class);
+
+    public WritableResource() {
+    }
+
+    /*
+     * This function is called when http://nn:port/pxf/vx/Writable/stream?path=...
+	 * is used.
+	 *
+	 * @param servletContext Servlet context contains attributes required by SecuredHDFS
+	 * @param headers Holds HTTP headers from request
+	 * @param path Holds URI path option used in this request
+	 * @param inputStream stream of bytes to write from Hawq
+     */
+    @POST
+    @Path("stream")
+    @Consumes(MediaType.APPLICATION_OCTET_STREAM)
+    public Response stream(@Context final ServletContext servletContext,
+                           @Context HttpHeaders headers,
+                           @QueryParam("path") String path,
+                           InputStream inputStream) throws Exception {
+
+        /* Convert headers into a case-insensitive regular map */
+        Map<String, String> params = convertToCaseInsensitiveMap(headers.getRequestHeaders());
+        if (LOG.isDebugEnabled()) {
+            LOG.debug("WritableResource started with parameters: " + params + " and write path: " + path);
+        }
+
+        ProtocolData protData = new ProtocolData(params);
+        protData.setDataSource(path);
+        
+        SecuredHDFS.verifyToken(protData, servletContext);
+        Bridge bridge = new WriteBridge(protData);
+
+        // THREAD-SAFE parameter has precedence
+        boolean isThreadSafe = protData.isThreadSafe() && bridge.isThreadSafe();
+        LOG.debug("Request for " + path + " handled " +
+                (isThreadSafe ? "without" : "with") + " synchronization");
+
+        return isThreadSafe ?
+                writeResponse(bridge, path, inputStream) :
+                synchronizedWriteResponse(bridge, path, inputStream);
+    }
+
+    private static synchronized Response synchronizedWriteResponse(Bridge bridge,
+                                                                   String path,
+                                                                   InputStream inputStream)
+            throws Exception {
+        return writeResponse(bridge, path, inputStream);
+    }
+
+    private static Response writeResponse(Bridge bridge,
+                                          String path,
+                                          InputStream inputStream) throws Exception {
+
+        String returnMsg;
+
+        // Open the output file
+        bridge.beginIteration();
+
+        DataInputStream dataStream = new DataInputStream(inputStream);
+
+        long totalWritten = 0;
+
+        try {
+            while (bridge.setNext(dataStream)) {
+                ++totalWritten;
+            }
+        } catch (ClientAbortException e) {
+            LOG.debug("Remote connection closed by HAWQ", e);
+        } catch (Exception ex) {
+            LOG.debug("totalWritten so far " + totalWritten + " to " + path);
+            throw ex;
+        } finally {
+            inputStream.close();
+        }
+        returnMsg = "wrote " + totalWritten + " bulks to " + path;
+        LOG.debug(returnMsg);
+
+        return Response.ok(returnMsg).build();
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/f053e053/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/CustomWebappLoader.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/CustomWebappLoader.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/CustomWebappLoader.java
new file mode 100644
index 0000000..e8ceb25
--- /dev/null
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/CustomWebappLoader.java
@@ -0,0 +1,211 @@
+package org.apache.hawq.pxf.service.utilities;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.net.URI;
+import java.nio.file.DirectoryStream;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+
+import org.apache.catalina.LifecycleException;
+import org.apache.catalina.loader.WebappLoader;
+import org.apache.juli.logging.Log;
+import org.apache.juli.logging.LogFactory;
+
+/**
+ * A WebappLoader that allows a customized classpath to be added through configuration in context xml.
+ * Any additional classpath entry will be added to the default webapp classpath.
+ *
+ * <pre>
+ * &lt;Context&gt;
+ *   &lt;Loader className="org.apache.hawq.pxf.service.utilities.CustomWebappLoader"
+ *              classpathFiles="/somedir/classpathFile1;/somedir/classpathFile2"/&gt;
+ * &lt;/Context&gt;
+ * </pre>
+ */
+public class CustomWebappLoader extends WebappLoader {
+
+	/**
+	 * Because this class belongs in tcServer itself, logs go into tcServer's log facility that is separate
+	 * from the web app's log facility.
+	 *
+	 * Logs are directed to catalina.log file. By default only INFO or higher messages are logged.
+	 * To change log level, add the following line to {catalina.base}/conf/logging.properties
+	 * <code>org.apache.hawq.pxf.level = FINE/INFO/WARNING</code> (FINE = debug).
+	 */
+	private static final Log LOG = LogFactory.getLog(CustomWebappLoader.class);
+
+	/**
+	 * Classpath files containing path entries, separated by new line.
+	 * Globbing is supported for the file name.
+	 * e.g:
+	 * somedir
+	 * anotherdir/somejar.jar
+	 * anotherone/hadoop*.jar
+	 * anotherone/pxf*[0-9].jar
+	 * Unix wildcard convention can be used to match a number of files
+	 * (e.g. <code>*</code>, <code>[0-9]</code>, <code>?</code>), but not a number of directories.
+	 *
+	 * The files specified under classpathFiles must exist - if they can't be read an exception will be thrown.
+	 */
+	private String classpathFiles;
+	/**
+	 * Secondary classpath files - if these files are unavailable only a warning will be logged.
+	 */
+	private String secondaryClasspathFiles;
+
+	/**
+	 * Constructs a WebappLoader with no defined parent class loader (actual parent will be the system class loader).
+	 */
+	public CustomWebappLoader() {
+		super();
+	}
+
+	/**
+	 * Constructs a WebappLoader with the specified class loader to be defined as the parent for this ClassLoader.
+	 *
+	 * @param parent The parent class loader
+	 */
+	public CustomWebappLoader(ClassLoader parent) {
+		super(parent);
+	}
+
+	/**
+	 * <code>classpathFiles</code> attribute is automatically set from the context xml file.
+	 *
+	 * @param classpathFiles Files separated by <code>;</code> Which contains <code>;</code> separated list of path entries.
+	 */
+	public void setClasspathFiles(String classpathFiles) {
+		this.classpathFiles = classpathFiles;
+	}
+
+	/**
+	 * <code>secondaryClasspathFiles</code> attribute is automatically set from the context xml file.
+	 *
+	 * @param secondaryClasspathFiles Files separated by <code>;</code> Which contains <code>;</code> separated list of path entries.
+	 */
+	public void setSecondaryClasspathFiles(String secondaryClasspathFiles) {
+		this.secondaryClasspathFiles = secondaryClasspathFiles;
+	}
+
+	/**
+	 * Implements {@link org.apache.catalina.util.LifecycleBase#startInternal()}.
+	 *
+	 * @throws LifecycleException if this component detects a fatal error that prevents this component from being used.
+	 */
+	@Override
+	protected void startInternal() throws LifecycleException {
+
+		addRepositories(classpathFiles, true);
+		addRepositories(secondaryClasspathFiles, false);
+
+		super.startInternal();
+	}
+
+	private void addRepositories(String classpathFiles, boolean throwException) throws LifecycleException {
+
+		for (String classpathFile : classpathFiles.split(";")) {
+
+			String classpath = readClasspathFile(classpathFile, throwException);
+			if (classpath == null) {
+				continue;
+			}
+
+			ArrayList<String> classpathEntries = trimEntries(classpath.split("[\\r\\n]+"));
+			LOG.info("Classpath file " + classpathFile + " has " + classpathEntries.size() + " entries");
+
+			for (String entry : classpathEntries) {
+				LOG.debug("Trying to load entry " + entry);
+				int repositoriesCount = 0;
+				Path pathEntry = Paths.get(entry);
+				/*
+				 * For each entry, we look at the parent directory and try to match each of the files
+				 * inside it to the file name or pattern in the file name (the last part of the path).
+				 * E.g., for path '/some/path/with/pattern*', the parent directory will be '/some/path/with/'
+				 * and the file name will be 'pattern*'. Each file under that directory matching
+				 * this pattern will be added to the class loader repository.
+				 */
+				try (DirectoryStream<Path> repositories = Files.newDirectoryStream(pathEntry.getParent(),
+						pathEntry.getFileName().toString())) {
+					for (Path repository : repositories) {
+						if (addPathToRepository(repository, entry)) {
+							repositoriesCount++;
+						}
+					}
+				} catch (IOException e) {
+					LOG.warn("Failed to load entry " + entry + ": " + e);
+				}
+				if (repositoriesCount == 0) {
+					LOG.warn("Entry " + entry + " doesn't match any files");
+				}
+				LOG.debug("Loaded " + repositoriesCount + " repositories from entry " + entry);
+			}
+		}
+	}
+
+	private String readClasspathFile(String classpathFile, boolean throwException) throws LifecycleException {
+		String classpath = null;
+		try {
+			LOG.info("Trying to read classpath file " + classpathFile);
+			classpath = new String(Files.readAllBytes(Paths.get(classpathFile)));
+		} catch (IOException ioe) {
+			LOG.warn("Failed to read classpath file: " + ioe);
+			if (throwException) {
+				throw new LifecycleException("Failed to read classpath file: " + ioe, ioe);
+			}
+		}
+		return classpath;
+	}
+
+	/**
+	 * Returns a list of valid classpath entries, excluding null, empty and comment lines.
+	 * @param classpathEntries original entries
+	 * @return valid entries
+	 */
+	private ArrayList<String> trimEntries(String[] classpathEntries) {
+
+		ArrayList<String> trimmed = new ArrayList<String>();
+		int line = 0;
+		for (String entry : classpathEntries) {
+
+			line++;
+			if (entry == null) {
+				LOG.debug("Skipping entry #" + line + " (null)");
+				continue;
+			}
+
+			entry = entry.trim();
+			if (entry.isEmpty() || entry.startsWith("#")) {
+				LOG.debug("Skipping entry #" + line + " (" + entry + ")");
+				continue;
+			}
+			trimmed.add(entry);
+		}
+		return trimmed;
+	}
+
+	private boolean addPathToRepository(Path path, String entry) {
+
+		try {
+			URI pathUri = path.toUri();
+			String pathUriStr = pathUri.toString();
+			File file = new File(pathUri);
+			if (!file.canRead()) {
+				throw new FileNotFoundException(pathUriStr + " cannot be read");
+			}
+			addRepository(pathUriStr);
+			LOG.debug("Repository " + pathUriStr + " added from entry " + entry);
+			return true;
+		} catch (Exception e) {
+			LOG.warn("Failed to load path " + path + " to repository: " + e);
+		}
+
+		return false;
+	}
+
+}
+
+

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/f053e053/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/Log4jConfigure.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/Log4jConfigure.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/Log4jConfigure.java
new file mode 100644
index 0000000..9e3589a
--- /dev/null
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/Log4jConfigure.java
@@ -0,0 +1,45 @@
+package org.apache.hawq.pxf.service.utilities;
+
+import java.io.File;
+
+import javax.servlet.ServletContext;
+import javax.servlet.ServletContextEvent;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.log4j.PropertyConfigurator;
+
+public class Log4jConfigure {
+
+    private static Log LOG = LogFactory.getLog(Log4jConfigure.class);
+	
+    /**
+     * Initializes log4j logging for the webapp.
+     * 
+     * Reads log4j properties file location from log4jConfigLocation parameter in web.xml.
+     * When not using aboslute path, the path starts from the webapp root directory.
+	 * If the file can't be read, reverts to default configuration file under
+	 * WEB-INF/classes/pxf-log4j.properties.
+	 * 
+     * @param event Servlet context, used to determine webapp root directory.
+     */
+	public static void configure(ServletContextEvent event) {
+		
+		final String defaultLog4jLocation = "WEB-INF/classes/pxf-log4j.properties";
+
+		ServletContext context = event.getServletContext();
+        String log4jConfigLocation = context.getInitParameter("log4jConfigLocation");
+
+        if (!log4jConfigLocation.startsWith(File.separator)) {
+        	log4jConfigLocation = context.getRealPath("") + File.separator + log4jConfigLocation;
+        }
+        
+        // revert to default properties file if file doesn't exist
+        File log4jConfigFile = new File (log4jConfigLocation);
+        if (!log4jConfigFile.canRead()) {
+        	log4jConfigLocation = context.getRealPath("") + File.separator + defaultLog4jLocation;
+        }
+		PropertyConfigurator.configure(log4jConfigLocation); 
+        LOG.info("log4jConfigLocation = " + log4jConfigLocation);
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/f053e053/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/ProtocolData.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/ProtocolData.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/ProtocolData.java
new file mode 100644
index 0000000..06ff72b
--- /dev/null
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/ProtocolData.java
@@ -0,0 +1,374 @@
+package org.apache.hawq.pxf.service.utilities;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.codec.binary.Base64;
+import org.apache.commons.lang.StringUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.security.UserGroupInformation;
+
+import org.apache.hawq.pxf.api.OutputFormat;
+import org.apache.hawq.pxf.api.utilities.ColumnDescriptor;
+import org.apache.hawq.pxf.api.utilities.InputData;
+import org.apache.hawq.pxf.api.utilities.ProfilesConf;
+
+/**
+ * Common configuration of all MetaData classes. Provides read-only access to
+ * common parameters supplied using system properties.
+ */
+public class ProtocolData extends InputData {
+
+    private static final String TRUE_LCASE = "true";
+    private static final String FALSE_LCASE = "false";
+    private static final String PROP_PREFIX = "X-GP-";
+    public static final int INVALID_SPLIT_IDX = -1;
+
+    private static final Log LOG = LogFactory.getLog(ProtocolData.class);
+
+    protected OutputFormat outputFormat;
+    protected int port;
+    protected String host;
+    protected String profile;
+    protected String token;
+
+    /**
+     * Constructs a ProtocolData. Parses X-GP-* configuration variables.
+     *
+     * @param paramsMap contains all query-specific parameters from Hawq
+     */
+    public ProtocolData(Map<String, String> paramsMap) {
+
+        requestParametersMap = paramsMap;
+        segmentId = getIntProperty("SEGMENT-ID");
+        totalSegments = getIntProperty("SEGMENT-COUNT");
+        filterStringValid = getBoolProperty("HAS-FILTER");
+
+        if (filterStringValid) {
+            filterString = getProperty("FILTER");
+        }
+
+        parseFormat(getProperty("FORMAT"));
+
+        host = getProperty("URL-HOST");
+        port = getIntProperty("URL-PORT");
+
+        tupleDescription = new ArrayList<ColumnDescriptor>();
+        recordkeyColumn = null;
+        parseTupleDescription();
+
+        /*
+         * accessor - will throw exception from getPropery() if outputFormat is
+         * BINARY and the user did not supply accessor=... or profile=...
+         * resolver - will throw exception from getPropery() if outputFormat is
+         * BINARY and the user did not supply resolver=... or profile=...
+         */
+        profile = getOptionalProperty("PROFILE");
+        if (profile != null) {
+            setProfilePlugins();
+        }
+        accessor = getProperty("ACCESSOR");
+        resolver = getProperty("RESOLVER");
+        analyzer = getOptionalProperty("ANALYZER");
+        fragmenter = getOptionalProperty("FRAGMENTER");
+        dataSource = getProperty("DATA-DIR");
+
+        /* Kerberos token information */
+        if (UserGroupInformation.isSecurityEnabled()) {
+            token = getProperty("TOKEN");
+        }
+
+        parseFragmentMetadata();
+        parseUserData();
+        parseThreadSafe();
+        parseRemoteCredentials();
+
+        dataFragment = INVALID_SPLIT_IDX;
+        parseDataFragment(getOptionalProperty("DATA-FRAGMENT"));
+
+        // Store alignment for global use as a system property
+        System.setProperty("greenplum.alignment", getProperty("ALIGNMENT"));
+    }
+
+    /**
+     * Constructs an InputDataBuilder from a copy. Used to create from an
+     * extending class.
+     *
+     * @param copy the input data to copy
+     */
+    public ProtocolData(ProtocolData copy) {
+        this.requestParametersMap = copy.requestParametersMap;
+        this.segmentId = copy.segmentId;
+        this.totalSegments = copy.totalSegments;
+        this.outputFormat = copy.outputFormat;
+        this.host = copy.host;
+        this.port = copy.port;
+        this.fragmentMetadata = copy.fragmentMetadata;
+        this.userData = copy.userData;
+        this.tupleDescription = copy.tupleDescription;
+        this.recordkeyColumn = copy.recordkeyColumn;
+        this.filterStringValid = copy.filterStringValid;
+        this.filterString = copy.filterString;
+        this.dataSource = copy.dataSource;
+        this.accessor = copy.accessor;
+        this.resolver = copy.resolver;
+        this.fragmenter = copy.fragmenter;
+        this.analyzer = copy.analyzer;
+        this.threadSafe = copy.threadSafe;
+        this.remoteLogin = copy.remoteLogin;
+        this.remoteSecret = copy.remoteSecret;
+        this.token = copy.token;
+    }
+
+    public String getToken() {
+        return token;
+    }
+
+    /**
+     * Sets the requested profile plugins from profile file into
+     * {@link #requestParametersMap}.
+     */
+    private void setProfilePlugins() {
+        Map<String, String> pluginsMap = ProfilesConf.getProfilePluginsMap(profile);
+        checkForDuplicates(pluginsMap, requestParametersMap);
+        requestParametersMap.putAll(pluginsMap);
+    }
+
+    /**
+     * Verifies there are no duplicates between parameters declared in the table
+     * definition and parameters defined in a profile.
+     *
+     * The parameters' names are case insensitive.
+     */
+    private void checkForDuplicates(Map<String, String> plugins,
+                                    Map<String, String> params) {
+        List<String> duplicates = new ArrayList<>();
+        for (String key : plugins.keySet()) {
+            if (params.containsKey(key)) {
+                duplicates.add(key);
+            }
+        }
+
+        if (!duplicates.isEmpty()) {
+            throw new IllegalArgumentException("Profile '" + profile
+                    + "' already defines: "
+                    + String.valueOf(duplicates).replace("X-GP-", ""));
+        }
+    }
+
+    /**
+     * Returns the request parameters.
+     *
+     * @return map of request parameters
+     */
+    public Map<String, String> getParametersMap() {
+        return requestParametersMap;
+    }
+
+    /**
+     * Throws an exception when the given property value is missing in request.
+     *
+     * @param property missing property name
+     * @throws IllegalArgumentException throws an exception with the property
+     *             name in the error message
+     */
+    public void protocolViolation(String property) {
+        String error = "Internal server error. Property \"" + property
+                + "\" has no value in current request";
+
+        LOG.error(error);
+        throw new IllegalArgumentException(error);
+    }
+
+    /**
+     * Returns the value to which the specified property is mapped in
+     * {@link #requestParametersMap}.
+     *
+     * @param property the lookup property key
+     * @throws IllegalArgumentException if property key is missing
+     */
+    private String getProperty(String property) {
+        String result = requestParametersMap.get(PROP_PREFIX + property);
+
+        if (result == null) {
+            protocolViolation(property);
+        }
+
+        return result;
+    }
+
+    /**
+     * Returns the optional property value. Unlike {@link #getProperty}, it will
+     * not fail if the property is not found. It will just return null instead.
+     *
+     * @param property the lookup optional property
+     * @return property value as a String
+     */
+    private String getOptionalProperty(String property) {
+        return requestParametersMap.get(PROP_PREFIX + property);
+    }
+
+    /**
+     * Returns a property value as an int type.
+     *
+     * @param property the lookup property
+     * @return property value as an int type
+     * @throws NumberFormatException if the value is missing or can't be
+     *             represented by an Integer
+     */
+    private int getIntProperty(String property) {
+        return Integer.parseInt(getProperty(property));
+    }
+
+    /**
+     * Returns a property value as boolean type. A boolean property is defined
+     * as an int where 0 means false, and anything else true (like C).
+     *
+     * @param property the lookup property
+     * @return property value as boolean
+     * @throws NumberFormatException if the value is missing or can't be
+     *             represented by an Integer
+     */
+    private boolean getBoolProperty(String property) {
+        return getIntProperty(property) != 0;
+    }
+
+    /**
+     * Returns the current output format, either {@link OutputFormat#TEXT} or
+     * {@link OutputFormat#BINARY}.
+     *
+     * @return output format
+     */
+    public OutputFormat outputFormat() {
+        return outputFormat;
+    }
+
+    /**
+     * Returns the server name providing the service.
+     *
+     * @return server name
+     */
+    public String serverName() {
+        return host;
+    }
+
+    /**
+     * Returns the server port providing the service.
+     *
+     * @return server port
+     */
+    public int serverPort() {
+        return port;
+    }
+
+    /**
+     * Sets the thread safe parameter. Default value - true.
+     */
+    private void parseThreadSafe() {
+
+        threadSafe = true;
+        String threadSafeStr = getOptionalProperty("THREAD-SAFE");
+        if (threadSafeStr != null) {
+            threadSafe = parseBooleanValue(threadSafeStr);
+        }
+    }
+
+    private boolean parseBooleanValue(String threadSafeStr) {
+
+        if (threadSafeStr.equalsIgnoreCase(TRUE_LCASE)) {
+            return true;
+        }
+        if (threadSafeStr.equalsIgnoreCase(FALSE_LCASE)) {
+            return false;
+        }
+        throw new IllegalArgumentException("Illegal boolean value '"
+                + threadSafeStr + "'." + " Usage: [TRUE|FALSE]");
+    }
+
+    /**
+     * Sets the format type based on the input string. Allowed values are:
+     * "TEXT", "GPDBWritable".
+     *
+     * @param formatString format string
+     */
+    protected void parseFormat(String formatString) {
+        switch (formatString) {
+            case "TEXT":
+                outputFormat = OutputFormat.TEXT;
+                break;
+            case "GPDBWritable":
+                outputFormat = OutputFormat.BINARY;
+                break;
+            default:
+                throw new IllegalArgumentException(
+                        "Wrong value for greenplum.format " + formatString);
+        }
+    }
+
+    /*
+     * Sets the tuple description for the record
+     */
+    void parseTupleDescription() {
+        int columns = getIntProperty("ATTRS");
+        for (int i = 0; i < columns; ++i) {
+            String columnName = getProperty("ATTR-NAME" + i);
+            int columnTypeCode = getIntProperty("ATTR-TYPECODE" + i);
+            String columnTypeName = getProperty("ATTR-TYPENAME" + i);
+
+            ColumnDescriptor column = new ColumnDescriptor(columnName,
+                    columnTypeCode, i, columnTypeName);
+            tupleDescription.add(column);
+
+            if (columnName.equalsIgnoreCase(ColumnDescriptor.RECORD_KEY_NAME)) {
+                recordkeyColumn = column;
+            }
+        }
+    }
+
+    /**
+     * Sets the index of the allocated data fragment
+     *
+     * @param fragment the allocated data fragment
+     */
+    protected void parseDataFragment(String fragment) {
+
+        /*
+         * Some resources don't require a fragment, hence the list can be empty.
+         */
+        if (StringUtils.isEmpty(fragment)) {
+            return;
+        }
+        dataFragment = Integer.parseInt(fragment);
+    }
+
+    private void parseFragmentMetadata() {
+        fragmentMetadata = parseBase64("FRAGMENT-METADATA",
+                "Fragment metadata information");
+    }
+
+    private void parseUserData() {
+        userData = parseBase64("FRAGMENT-USER-DATA", "Fragment user data");
+    }
+
+    private byte[] parseBase64(String key, String errName) {
+        String encoded = getOptionalProperty(key);
+        if (encoded == null) {
+            return null;
+        }
+        if (!Base64.isArrayByteBase64(encoded.getBytes())) {
+            throw new IllegalArgumentException(errName
+                    + " must be Base64 encoded." + "(Bad value: " + encoded
+                    + ")");
+        }
+        byte[] parsed = Base64.decodeBase64(encoded);
+        LOG.debug("decoded " + key + ": " + new String(parsed));
+        return parsed;
+    }
+
+    private void parseRemoteCredentials() {
+        remoteLogin = getOptionalProperty("REMOTE-USER");
+        remoteSecret = getOptionalProperty("REMOTE-PASS");
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/f053e053/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/SecureLogin.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/SecureLogin.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/SecureLogin.java
new file mode 100644
index 0000000..5f30670
--- /dev/null
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/SecureLogin.java
@@ -0,0 +1,40 @@
+package org.apache.hawq.pxf.service.utilities;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.security.SecurityUtil;
+
+/*
+ * This class relies heavily on Hadoop API to
+ * - Check need for secure login in Hadoop
+ * - Parse and load .xml configuration file
+ * - Do a Kerberos login with a kaytab file
+ * - convert _HOST in Kerberos principal to current hostname
+ *
+ * It uses Hadoop Configuration to parse XML configuration files
+ * It uses Hadoop Security to modify principal and perform the login
+ *
+ * The major limitation in this class is its dependency
+ * on Hadoop. If Hadoop security is off, no login will be performed
+ * regardless of connector being used.
+ */
+public class SecureLogin {
+    private static Log LOG = LogFactory.getLog(SecureLogin.class);
+	private static final String CONFIG_KEY_SERVICE_KEYTAB = "pxf.service.kerberos.keytab";
+	private static final String CONFIG_KEY_SERVICE_PRINCIPAL = "pxf.service.kerberos.principal";
+
+	public static void login() {
+		try {
+			Configuration config = new Configuration();
+			config.addResource("pxf-site.xml");
+
+			SecurityUtil.login(config, CONFIG_KEY_SERVICE_KEYTAB, CONFIG_KEY_SERVICE_PRINCIPAL);
+		} catch (Exception e)
+		{
+			LOG.error("PXF service login failed");
+			throw new RuntimeException(e);
+		}
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/f053e053/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/SecuredHDFS.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/SecuredHDFS.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/SecuredHDFS.java
new file mode 100644
index 0000000..a1d42d1
--- /dev/null
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/SecuredHDFS.java
@@ -0,0 +1,93 @@
+package org.apache.hawq.pxf.service.utilities;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
+import org.apache.hadoop.hdfs.server.namenode.NameNode;
+import org.apache.hadoop.hdfs.server.namenode.NameNodeHttpServer;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.token.Token;
+
+import javax.servlet.ServletContext;
+import java.io.ByteArrayInputStream;
+import java.io.DataInputStream;
+import java.io.IOException;
+
+/*
+ * The class handles security functions for handling
+ * secured HDFS
+ */
+public class SecuredHDFS {
+    private static final Log LOG = LogFactory.getLog(SecuredHDFS.class);
+
+    /*
+     * The function will get the token information from parameters and call
+     * SecuredHDFS to verify the token.
+     *
+     * All token properties will be deserialized from string to a Token object
+     *
+     * @throws SecurityException Thrown when authentication fails
+     */
+    public static void verifyToken(ProtocolData protData, ServletContext context) {
+        try {
+            if (UserGroupInformation.isSecurityEnabled()) {
+                Token<DelegationTokenIdentifier> token = new Token<DelegationTokenIdentifier>();
+                String tokenString = protData.getToken();
+                token.decodeFromUrlString(tokenString);
+
+                verifyToken(token.getIdentifier(), token.getPassword(),
+                        token.getKind(), token.getService(), context);
+            }
+        } catch (IOException e) {
+            throw new SecurityException("Failed to verify delegation token "
+                    + e, e);
+        }
+    }
+
+    /*
+     * The function will verify the token with NameNode if available and will
+     * create a UserGroupInformation.
+     *
+     * Code in this function is copied from JspHelper.getTokenUGI
+     *
+     * @param identifier Delegation token identifier
+     *
+     * @param password Delegation token password
+     *
+     * @param servletContext Jetty servlet context which contains the NN address
+     *
+     * @throws SecurityException Thrown when authentication fails
+     */
+    private static void verifyToken(byte[] identifier, byte[] password,
+                                    Text kind, Text service,
+                                    ServletContext servletContext) {
+        try {
+            Token<DelegationTokenIdentifier> token = new Token<DelegationTokenIdentifier>(
+                    identifier, password, kind, service);
+
+            ByteArrayInputStream buf = new ByteArrayInputStream(
+                    token.getIdentifier());
+            DataInputStream in = new DataInputStream(buf);
+            DelegationTokenIdentifier id = new DelegationTokenIdentifier();
+            id.readFields(in);
+
+            final NameNode nn = NameNodeHttpServer.getNameNodeFromContext(servletContext);
+            if (nn != null) {
+                nn.getNamesystem().verifyToken(id, token.getPassword());
+            }
+
+            UserGroupInformation userGroupInformation = id.getUser();
+            userGroupInformation.addToken(token);
+            LOG.debug("user " + userGroupInformation.getUserName() + " ("
+                    + userGroupInformation.getShortUserName()
+                    + ") authenticated");
+
+            // re-login if necessary
+            userGroupInformation.checkTGTAndReloginFromKeytab();
+        } catch (IOException e) {
+            throw new SecurityException("Failed to verify delegation token "
+                    + e, e);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/f053e053/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/Utilities.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/Utilities.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/Utilities.java
new file mode 100644
index 0000000..b29a863
--- /dev/null
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/Utilities.java
@@ -0,0 +1,100 @@
+package org.apache.hawq.pxf.service.utilities;
+
+import java.lang.reflect.Constructor;
+import java.lang.reflect.InvocationTargetException;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+import org.apache.hawq.pxf.api.utilities.InputData;
+
+/**
+ * Utilities class exposes helper method for PXF classes
+ */
+public class Utilities {
+    private static final Log LOG = LogFactory.getLog(Utilities.class);
+
+    /**
+     * Creates an object using the class name. The class name has to be a class
+     * located in the webapp's CLASSPATH.
+     *
+     * @param confClass the class of the metaData used to initialize the
+     *            instance
+     * @param className a class name to be initialized.
+     * @param metaData input data used to initialize the class
+     * @return Initialized instance of given className
+     * @throws Exception throws exception if classname was not found in
+     *             classpath, didn't have expected constructor or failed to be
+     *             instantiated
+     */
+    public static Object createAnyInstance(Class<?> confClass,
+                                           String className, InputData metaData)
+            throws Exception {
+        Class<?> cls = Class.forName(className);
+        Constructor<?> con = cls.getConstructor(confClass);
+        return instantiate(con, metaData);
+    }
+
+    /**
+     * Creates an object using the class name with its default constructor. The
+     * class name has to be a class located in the webapp's CLASSPATH.
+     *
+     * @param className a class name to be initialized
+     * @return initialized instance of given className
+     * @throws Exception throws exception if classname was not found in
+     *             classpath, didn't have expected constructor or failed to be
+     *             instantiated
+     */
+    public static Object createAnyInstance(String className) throws Exception {
+        Class<?> cls = Class.forName(className);
+        Constructor<?> con = cls.getConstructor();
+        return instantiate(con);
+    }
+
+    private static Object instantiate(Constructor<?> con, Object... args)
+            throws Exception {
+        try {
+            return con.newInstance(args);
+        } catch (InvocationTargetException e) {
+            /*
+             * We are creating resolvers, accessors, fragmenters, etc. using the
+             * reflection framework. If for example, a resolver, during its
+             * instantiation - in the c'tor, will throw an exception, the
+             * Resolver's exception will reach the Reflection layer and there it
+             * will be wrapped inside an InvocationTargetException. Here we are
+             * above the Reflection layer and we need to unwrap the Resolver's
+             * initial exception and throw it instead of the wrapper
+             * InvocationTargetException so that our initial Exception text will
+             * be displayed in psql instead of the message:
+             * "Internal Server Error"
+             */
+            throw (e.getCause() != null) ? new Exception(e.getCause()) : e;
+        }
+    }
+
+    /**
+     * Transforms a byte array into a string of octal codes in the form
+     * \\xyz\\xyz
+     *
+     * We double escape each char because it is required in postgres bytea for
+     * some bytes. In the minimum all non-printables, backslash, null and single
+     * quote. Easier to just escape everything see
+     * http://www.postgresql.org/docs/9.0/static/datatype-binary.html
+     *
+     * Octal codes must be padded to 3 characters (001, 012)
+     *
+     * @param bytes bytes to escape
+     * @param sb octal codes of given bytes
+     */
+    public static void byteArrayToOctalString(byte[] bytes, StringBuilder sb) {
+        if ((bytes == null) || (sb == null)) {
+            return;
+        }
+
+        sb.ensureCapacity(sb.length()
+                + (bytes.length * 5 /* characters per byte */));
+        for (int b : bytes) {
+            sb.append(String.format("\\\\%03o", b & 0xff));
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/f053e053/pxf/pxf-service/src/main/resources/pxf-log4j.properties
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/resources/pxf-log4j.properties b/pxf/pxf-service/src/main/resources/pxf-log4j.properties
index cd30dc4..b8a213e 100644
--- a/pxf/pxf-service/src/main/resources/pxf-log4j.properties
+++ b/pxf/pxf-service/src/main/resources/pxf-log4j.properties
@@ -2,7 +2,7 @@
 log4j.rootLogger=INFO, ROLLINGFILE
 
 # uncomment to see DEBUG messages from PXF service
-#log4j.logger.com.pivotal.pxf=DEBUG
+#log4j.logger.org.apache.hawq.pxf=DEBUG
 
 log4j.appender.ROLLINGFILE=org.apache.log4j.RollingFileAppender
 # use the same value directory as set for $PXF_LOGDIR

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/f053e053/pxf/pxf-service/src/main/resources/pxf-profiles-default.xml
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/resources/pxf-profiles-default.xml b/pxf/pxf-service/src/main/resources/pxf-profiles-default.xml
index 13aad87..6952e5d 100644
--- a/pxf/pxf-service/src/main/resources/pxf-profiles-default.xml
+++ b/pxf/pxf-service/src/main/resources/pxf-profiles-default.xml
@@ -17,18 +17,18 @@
         <name>HBase</name>
         <description>This profile is suitable for using when connecting to an HBase data store engine</description>
         <plugins>
-            <fragmenter>com.pivotal.pxf.plugins.hbase.HBaseDataFragmenter</fragmenter>
-            <accessor>com.pivotal.pxf.plugins.hbase.HBaseAccessor</accessor>
-            <resolver>com.pivotal.pxf.plugins.hbase.HBaseResolver</resolver>
+            <fragmenter>org.apache.hawq.pxf.plugins.hbase.HBaseDataFragmenter</fragmenter>
+            <accessor>org.apache.hawq.pxf.plugins.hbase.HBaseAccessor</accessor>
+            <resolver>org.apache.hawq.pxf.plugins.hbase.HBaseResolver</resolver>
         </plugins>
     </profile>
     <profile>
         <name>Hive</name>
         <description>This profile is suitable for using when connecting to Hive</description>
         <plugins>
-            <fragmenter>com.pivotal.pxf.plugins.hive.HiveDataFragmenter</fragmenter>
-            <accessor>com.pivotal.pxf.plugins.hive.HiveAccessor</accessor>
-            <resolver>com.pivotal.pxf.plugins.hive.HiveResolver</resolver>
+            <fragmenter>org.apache.hawq.pxf.plugins.hive.HiveDataFragmenter</fragmenter>
+            <accessor>org.apache.hawq.pxf.plugins.hive.HiveAccessor</accessor>
+            <resolver>org.apache.hawq.pxf.plugins.hive.HiveResolver</resolver>
         </plugins>
     </profile>
     <profile>
@@ -39,9 +39,9 @@
             DELIMITER parameter is mandatory.
         </description>
         <plugins>
-            <fragmenter>com.pivotal.pxf.plugins.hive.HiveInputFormatFragmenter</fragmenter>
-            <accessor>com.pivotal.pxf.plugins.hive.HiveRCFileAccessor</accessor>
-            <resolver>com.pivotal.pxf.plugins.hive.HiveColumnarSerdeResolver</resolver>
+            <fragmenter>org.apache.hawq.pxf.plugins.hive.HiveInputFormatFragmenter</fragmenter>
+            <accessor>org.apache.hawq.pxf.plugins.hive.HiveRCFileAccessor</accessor>
+            <resolver>org.apache.hawq.pxf.plugins.hive.HiveColumnarSerdeResolver</resolver>
         </plugins>
     </profile>
     <profile>
@@ -51,9 +51,9 @@
             DELIMITER parameter is mandatory.
         </description>
         <plugins>
-            <fragmenter>com.pivotal.pxf.plugins.hive.HiveInputFormatFragmenter</fragmenter>
-            <accessor>com.pivotal.pxf.plugins.hive.HiveLineBreakAccessor</accessor>
-            <resolver>com.pivotal.pxf.plugins.hive.HiveStringPassResolver</resolver>
+            <fragmenter>org.apache.hawq.pxf.plugins.hive.HiveInputFormatFragmenter</fragmenter>
+            <accessor>org.apache.hawq.pxf.plugins.hive.HiveLineBreakAccessor</accessor>
+            <resolver>org.apache.hawq.pxf.plugins.hive.HiveStringPassResolver</resolver>
         </plugins>
     </profile>
     <profile>
@@ -62,10 +62,10 @@
             on HDFS
         </description>
         <plugins>
-            <fragmenter>com.pivotal.pxf.plugins.hdfs.HdfsDataFragmenter</fragmenter>
-            <accessor>com.pivotal.pxf.plugins.hdfs.LineBreakAccessor</accessor>
-            <resolver>com.pivotal.pxf.plugins.hdfs.StringPassResolver</resolver>
-            <analyzer>com.pivotal.pxf.plugins.hdfs.HdfsAnalyzer</analyzer>
+            <fragmenter>org.apache.hawq.pxf.plugins.hdfs.HdfsDataFragmenter</fragmenter>
+            <accessor>org.apache.hawq.pxf.plugins.hdfs.LineBreakAccessor</accessor>
+            <resolver>org.apache.hawq.pxf.plugins.hdfs.StringPassResolver</resolver>
+            <analyzer>org.apache.hawq.pxf.plugins.hdfs.HdfsAnalyzer</analyzer>
         </plugins>
     </profile>
     <profile>
@@ -74,20 +74,20 @@
             linefeeds) from plain text files on HDFS. It is not splittable (non parallel) and slower than HdfsTextSimple.
         </description>
         <plugins>
-            <fragmenter>com.pivotal.pxf.plugins.hdfs.HdfsDataFragmenter</fragmenter>
-            <accessor>com.pivotal.pxf.plugins.hdfs.QuotedLineBreakAccessor</accessor>
-            <resolver>com.pivotal.pxf.plugins.hdfs.StringPassResolver</resolver>
-            <analyzer>com.pivotal.pxf.plugins.hdfs.HdfsAnalyzer</analyzer>
+            <fragmenter>org.apache.hawq.pxf.plugins.hdfs.HdfsDataFragmenter</fragmenter>
+            <accessor>org.apache.hawq.pxf.plugins.hdfs.QuotedLineBreakAccessor</accessor>
+            <resolver>org.apache.hawq.pxf.plugins.hdfs.StringPassResolver</resolver>
+            <analyzer>org.apache.hawq.pxf.plugins.hdfs.HdfsAnalyzer</analyzer>
         </plugins>
     </profile>
     <profile>
         <name>Avro</name>
         <description>This profile is suitable for using when reading Avro files (i.e fileName.avro)</description>
         <plugins>
-            <fragmenter>com.pivotal.pxf.plugins.hdfs.HdfsDataFragmenter</fragmenter>
-            <accessor>com.pivotal.pxf.plugins.hdfs.AvroFileAccessor</accessor>
-            <resolver>com.pivotal.pxf.plugins.hdfs.AvroResolver</resolver>
-            <analyzer>com.pivotal.pxf.plugins.hdfs.HdfsAnalyzer</analyzer>
+            <fragmenter>org.apache.hawq.pxf.plugins.hdfs.HdfsDataFragmenter</fragmenter>
+            <accessor>org.apache.hawq.pxf.plugins.hdfs.AvroFileAccessor</accessor>
+            <resolver>org.apache.hawq.pxf.plugins.hdfs.AvroResolver</resolver>
+            <analyzer>org.apache.hawq.pxf.plugins.hdfs.HdfsAnalyzer</analyzer>
         </plugins>
     </profile>
     <profile>
@@ -97,19 +97,19 @@
             usage: pxf://nn:50070/path/to/file?profile=SequenceWritable&amp;data-schema=CustomClass
         </description>
         <plugins>
-            <fragmenter>com.pivotal.pxf.plugins.hdfs.HdfsDataFragmenter</fragmenter>
-            <accessor>com.pivotal.pxf.plugins.hdfs.SequenceFileAccessor</accessor>
-            <resolver>com.pivotal.pxf.plugins.hdfs.WritableResolver</resolver>
-            <analyzer>com.pivotal.pxf.plugins.hdfs.HdfsAnalyzer</analyzer>
+            <fragmenter>org.apache.hawq.pxf.plugins.hdfs.HdfsDataFragmenter</fragmenter>
+            <accessor>org.apache.hawq.pxf.plugins.hdfs.SequenceFileAccessor</accessor>
+            <resolver>org.apache.hawq.pxf.plugins.hdfs.WritableResolver</resolver>
+            <analyzer>org.apache.hawq.pxf.plugins.hdfs.HdfsAnalyzer</analyzer>
         </plugins>
     </profile>
     <profile>
         <name>GemFireXD</name>
         <description>This profile is suitable for using when connecting to GemFireXD</description>
         <plugins>
-            <fragmenter>com.pivotal.pxf.plugins.gemfirexd.GemFireXDFragmenter</fragmenter>
-            <accessor>com.pivotal.pxf.plugins.gemfirexd.GemFireXDAccessor</accessor>
-            <resolver>com.pivotal.pxf.plugins.gemfirexd.GemFireXDResolver</resolver>
+            <fragmenter>org.apache.hawq.pxf.plugins.gemfirexd.GemFireXDFragmenter</fragmenter>
+            <accessor>org.apache.hawq.pxf.plugins.gemfirexd.GemFireXDAccessor</accessor>
+            <resolver>org.apache.hawq.pxf.plugins.gemfirexd.GemFireXDResolver</resolver>
         </plugins>
     </profile>
 </profiles>

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/f053e053/pxf/pxf-service/src/main/webapp/META-INF/context.xml
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/webapp/META-INF/context.xml b/pxf/pxf-service/src/main/webapp/META-INF/context.xml
index 81dbe3d..2877f3c 100644
--- a/pxf/pxf-service/src/main/webapp/META-INF/context.xml
+++ b/pxf/pxf-service/src/main/webapp/META-INF/context.xml
@@ -1,6 +1,6 @@
 <?xml version="1.0" encoding="UTF-8"?>
 <Context>
-     <Loader className="com.pivotal.pxf.service.utilities.CustomWebappLoader" 
+     <Loader className="org.apache.hawq.pxf.service.utilities.CustomWebappLoader"
              classpathFiles="/etc/pxf/conf/pxf-private.classpath"
              secondaryClasspathFiles="/etc/pxf/conf/pxf-public.classpath"/>
 </Context>

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/f053e053/pxf/pxf-service/src/main/webapp/WEB-INF/web.xml
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/webapp/WEB-INF/web.xml b/pxf/pxf-service/src/main/webapp/WEB-INF/web.xml
index f01e0d8..2ade5d1 100644
--- a/pxf/pxf-service/src/main/webapp/WEB-INF/web.xml
+++ b/pxf/pxf-service/src/main/webapp/WEB-INF/web.xml
@@ -26,7 +26,7 @@
         </servlet-class>
         <init-param>
             <param-name>com.sun.jersey.config.property.packages</param-name>
-            <param-value>com.pivotal.pxf.service.rest</param-value>
+            <param-value>org.apache.hawq.pxf.service.rest</param-value>
         </init-param>
         <init-param>
             <param-name>jersey.config.server.provider.scanning.recursive</param-name>
@@ -39,7 +39,7 @@
         <url-pattern>/*</url-pattern>
     </servlet-mapping>
     <listener>
-	   <listener-class>com.pivotal.pxf.service.rest.ServletLifecycleListener</listener-class>
+	   <listener-class>org.apache.hawq.pxf.service.rest.ServletLifecycleListener</listener-class>
     </listener>
 
     <!-- log4j configuration