You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by cn...@apache.org on 2016/03/19 22:05:48 UTC

hadoop git commit: HDFS-10179. Ozone: Adding logging support. Contributed by Anu Engineer.

Repository: hadoop
Updated Branches:
  refs/heads/HDFS-7240 2e517a64d -> 3f708bf71


HDFS-10179. Ozone: Adding logging support. Contributed by Anu Engineer.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/3f708bf7
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/3f708bf7
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/3f708bf7

Branch: refs/heads/HDFS-7240
Commit: 3f708bf7116236e364201feb63f2142f90de6793
Parents: 2e517a6
Author: Chris Nauroth <cn...@apache.org>
Authored: Sat Mar 19 13:59:00 2016 -0700
Committer: Chris Nauroth <cn...@apache.org>
Committed: Sat Mar 19 13:59:00 2016 -0700

----------------------------------------------------------------------
 .../hadoop-hdfs/src/main/bin/hdfs               |  4 ++
 .../hadoop/hdfs/server/datanode/DataNode.java   |  3 +-
 .../web/exceptions/OzoneExceptionMapper.java    |  8 +++
 .../ozone/web/handlers/BucketHandler.java       |  6 +++
 .../web/handlers/BucketProcessTemplate.java     | 47 +++++++++++++----
 .../ozone/web/handlers/KeyProcessTemplate.java  | 53 +++++++++++++-------
 .../ozone/web/handlers/VolumeHandler.java       | 12 +++++
 .../web/handlers/VolumeProcessTemplate.java     | 47 +++++++++++------
 .../hadoop/ozone/web/utils/OzoneConsts.java     |  7 +++
 .../src/test/resources/log4j.properties         | 24 +++++++++
 10 files changed, 168 insertions(+), 43 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/3f708bf7/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs
index a6644d1..bc0c156 100755
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs
@@ -50,6 +50,7 @@ function hadoop_usage
   hadoop_add_subcommand "oev" "apply the offline edits viewer to an edits file"
   hadoop_add_subcommand "oiv" "apply the offline fsimage viewer to an fsimage"
   hadoop_add_subcommand "oiv_legacy" "apply the offline fsimage viewer to a legacy fsimage"
+  hadoop_add_subcommand "oz" "command line interface for ozone"
   hadoop_add_subcommand "portmap" "run a portmap service"
   hadoop_add_subcommand "secondarynamenode" "run the DFS secondary namenode"
   hadoop_add_subcommand "snapshotDiff" "diff two snapshots of a directory or diff the current directory contents with a snapshot"
@@ -223,6 +224,9 @@ case ${COMMAND} in
   oiv_legacy)
     CLASS=org.apache.hadoop.hdfs.tools.offlineImageViewer.OfflineImageViewer
   ;;
+  oz)
+  CLASS=org.apache.hadoop.ozone.web.ozShell.Shell
+  ;;
   portmap)
     supportdaemonization="true"
     CLASS=org.apache.hadoop.portmap.Portmap

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3f708bf7/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
index 2b8ee24..cbbb9a6 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
@@ -1314,13 +1314,14 @@ public class DataNode extends ReconfigurableBase
    * Initializes the object store handler.  This must be called before
    * initialization of the HTTP server.
    *
-   * @param conf configuration
+   * @param config configuration
    * @throws IOException if there is an I/O error
    */
   private void initObjectStoreHandler(Configuration config) throws IOException {
     if (config.getBoolean(DFS_OBJECTSTORE_ENABLED_KEY,
         DFS_OBJECTSTORE_ENABLED_DEFAULT)) {
       this.objectStoreHandler = new ObjectStoreHandler(conf);
+      LOG.info("ozone is enabled.");
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3f708bf7/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/web/exceptions/OzoneExceptionMapper.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/web/exceptions/OzoneExceptionMapper.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/web/exceptions/OzoneExceptionMapper.java
index d16a64f..5b27210 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/web/exceptions/OzoneExceptionMapper.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/web/exceptions/OzoneExceptionMapper.java
@@ -22,14 +22,22 @@ package org.apache.hadoop.ozone.web.exceptions;
 import javax.ws.rs.core.Response;
 import javax.ws.rs.ext.ExceptionMapper;
 
+import org.apache.log4j.MDC;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 /**
  *  Class the represents various errors returned by the
  *  Object Layer.
  */
 public class OzoneExceptionMapper implements ExceptionMapper<OzoneException> {
+  private static final Logger LOG =
+      LoggerFactory.getLogger(OzoneExceptionMapper.class);
 
   @Override
   public Response toResponse(OzoneException exception) {
+    LOG.info("Returning exception. ex: {}", exception.toJsonString());
+    MDC.clear();
     return Response.status((int)exception.getHttpCode())
       .entity(exception.toJsonString()).build();
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3f708bf7/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/web/handlers/BucketHandler.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/web/handlers/BucketHandler.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/web/handlers/BucketHandler.java
index baa6142..da09353 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/web/handlers/BucketHandler.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/web/handlers/BucketHandler.java
@@ -27,6 +27,7 @@ import org.apache.hadoop.ozone.web.interfaces.Bucket;
 import org.apache.hadoop.ozone.web.interfaces.StorageHandler;
 import org.apache.hadoop.ozone.web.utils.OzoneConsts;
 import org.apache.hadoop.ozone.web.utils.OzoneUtils;
+import org.slf4j.MDC;
 
 import javax.ws.rs.core.HttpHeaders;
 import javax.ws.rs.core.Request;
@@ -36,6 +37,7 @@ import java.io.IOException;
 
 import static java.net.HttpURLConnection.HTTP_CREATED;
 import static java.net.HttpURLConnection.HTTP_OK;
+import static org.apache.hadoop.ozone.web.utils.OzoneConsts.OZONE_FUNCTION;
 
 
 /**
@@ -59,6 +61,7 @@ public class BucketHandler implements Bucket {
   public Response createBucket(String volume, String bucket, Request req,
                                UriInfo info, HttpHeaders headers)
       throws OzoneException {
+    MDC.put(OZONE_FUNCTION, "createBucket");
     return new BucketProcessTemplate() {
       @Override
       public Response doProcess(BucketArgs args)
@@ -94,6 +97,7 @@ public class BucketHandler implements Bucket {
   public Response updateBucket(String volume, String bucket, Request req,
                                UriInfo info, HttpHeaders headers)
       throws OzoneException {
+    MDC.put(OZONE_FUNCTION, "updateBucket");
     return new BucketProcessTemplate() {
       @Override
       public Response doProcess(BucketArgs args)
@@ -136,6 +140,7 @@ public class BucketHandler implements Bucket {
   public Response deleteBucket(String volume, String bucket, Request req,
                                UriInfo info, HttpHeaders headers)
       throws OzoneException {
+    MDC.put(OZONE_FUNCTION, "deleteBucket");
     return new BucketProcessTemplate() {
       @Override
       public Response doProcess(BucketArgs args)
@@ -169,6 +174,7 @@ public class BucketHandler implements Bucket {
                              final String startPage, Request req,
                              UriInfo uriInfo, HttpHeaders headers)
       throws OzoneException {
+    MDC.put(OZONE_FUNCTION, "listBucket");
     return new BucketProcessTemplate() {
       @Override
       public Response doProcess(BucketArgs args)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3f708bf7/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/web/handlers/BucketProcessTemplate.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/web/handlers/BucketProcessTemplate.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/web/handlers/BucketProcessTemplate.java
index 7046b8f..2639e23 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/web/handlers/BucketProcessTemplate.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/web/handlers/BucketProcessTemplate.java
@@ -30,6 +30,11 @@ import org.apache.hadoop.ozone.web.response.ListKeys;
 import org.apache.hadoop.ozone.web.utils.OzoneConsts;
 import org.apache.hadoop.ozone.web.utils.OzoneUtils;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.slf4j.MDC;
+
+
 import javax.ws.rs.core.HttpHeaders;
 import javax.ws.rs.core.Request;
 import javax.ws.rs.core.Response;
@@ -42,6 +47,11 @@ import java.util.LinkedList;
 import java.util.List;
 
 import static java.net.HttpURLConnection.HTTP_OK;
+import static org.apache.hadoop.ozone.web.utils.OzoneConsts.OZONE_COMPONENT;
+import static org.apache.hadoop.ozone.web.utils.OzoneConsts.OZONE_RESOURCE;
+import static org.apache.hadoop.ozone.web.utils.OzoneConsts.OZONE_REQUEST;
+import static org.apache.hadoop.ozone.web.utils.OzoneConsts.OZONE_USER;
+
 
 
 /**
@@ -49,6 +59,8 @@ import static java.net.HttpURLConnection.HTTP_OK;
  * Bucket handling code.
  */
 public abstract class BucketProcessTemplate {
+  private static final Logger LOG =
+      LoggerFactory.getLogger(BucketProcessTemplate.class);
 
   /**
    * This function serves as the common error handling function
@@ -70,22 +82,30 @@ public abstract class BucketProcessTemplate {
     // TODO : Add logging
     String reqID = OzoneUtils.getRequestID();
     String hostName = OzoneUtils.getHostName();
+    MDC.put(OZONE_COMPONENT, "ozone");
+    MDC.put(OZONE_REQUEST, reqID);
+    UserArgs userArgs = null;
     try {
+      userArgs = new UserArgs(reqID, hostName, request, uriInfo, headers);
+
       OzoneUtils.validate(request, headers, reqID, bucket, hostName);
       OzoneUtils.verifyBucketName(bucket);
 
       UserAuth auth = UserHandlerBuilder.getAuthHandler();
-      UserArgs userArgs =
-          new UserArgs(reqID, hostName, request, uriInfo, headers);
       userArgs.setUserName(auth.getUser(userArgs));
+      MDC.put(OZONE_USER, userArgs.getUserName());
 
       BucketArgs args = new BucketArgs(volume, bucket, userArgs);
-      return doProcess(args);
-    } catch (IllegalArgumentException argExp) {
-      OzoneException ex = ErrorTable
-          .newError(ErrorTable.INVALID_BUCKET_NAME, reqID, bucket, hostName);
-      ex.setMessage(argExp.getMessage());
-      throw ex;
+      MDC.put(OZONE_RESOURCE, args.getResourceName());
+      Response response =  doProcess(args);
+      LOG.info("Success");
+      MDC.clear();
+      return response;
+
+    } catch (IllegalArgumentException argEx) {
+      LOG.debug("Invalid bucket. ex:{}", argEx);
+      throw ErrorTable.newError(ErrorTable.INVALID_BUCKET_NAME, userArgs,
+          argEx);
     } catch (IOException fsExp) {
       handleIOException(bucket, reqID, hostName, fsExp);
     }
@@ -133,6 +153,7 @@ public abstract class BucketProcessTemplate {
    */
   void handleIOException(String bucket, String reqID, String hostName,
                          IOException fsExp) throws OzoneException {
+    LOG.debug("IOException: {}", fsExp);
 
     if (fsExp instanceof FileAlreadyExistsException) {
       throw ErrorTable
@@ -224,6 +245,7 @@ public abstract class BucketProcessTemplate {
     try {
       return OzoneConsts.Versioning.valueOf(version);
     } catch (IllegalArgumentException ex) {
+      LOG.debug("Malformed Version. version: {}", version);
       throw ErrorTable.newError(ErrorTable.MALFORMED_BUCKET_VERSION, args, ex);
     }
   }
@@ -239,10 +261,11 @@ public abstract class BucketProcessTemplate {
    * @throws OzoneException
    */
   StorageType getStorageType(BucketArgs args) throws OzoneException {
-
+    List<String> storageClassString = null;
     try {
-      List<String> storageClassString =
+      storageClassString =
           args.getHeaders().getRequestHeader(Header.OZONE_STORAGE_TYPE);
+
       if (storageClassString == null) {
         return null;
       }
@@ -254,6 +277,10 @@ public abstract class BucketProcessTemplate {
       }
       return StorageType.valueOf(storageClassString.get(0).toUpperCase());
     } catch (IllegalArgumentException ex) {
+      if(storageClassString != null) {
+        LOG.debug("Malformed storage type. Type: {}",
+            storageClassString.get(0).toUpperCase());
+      }
       throw ErrorTable.newError(ErrorTable.MALFORMED_STORAGE_TYPE, args, ex);
     }
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3f708bf7/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/web/handlers/KeyProcessTemplate.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/web/handlers/KeyProcessTemplate.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/web/handlers/KeyProcessTemplate.java
index 7607434..88e9052 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/web/handlers/KeyProcessTemplate.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/web/handlers/KeyProcessTemplate.java
@@ -25,6 +25,9 @@ import org.apache.hadoop.ozone.web.headers.Header;
 import org.apache.hadoop.ozone.web.interfaces.StorageHandler;
 import org.apache.hadoop.ozone.web.interfaces.UserAuth;
 import org.apache.hadoop.ozone.web.utils.OzoneUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.slf4j.MDC;
 
 import javax.ws.rs.core.HttpHeaders;
 import javax.ws.rs.core.Request;
@@ -41,11 +44,17 @@ import static org.apache.hadoop.ozone.web.exceptions.ErrorTable.INVALID_BUCKET_N
 import static org.apache.hadoop.ozone.web.exceptions.ErrorTable.INVALID_REQUEST;
 import static org.apache.hadoop.ozone.web.exceptions.ErrorTable.SERVER_ERROR;
 import static org.apache.hadoop.ozone.web.exceptions.ErrorTable.newError;
+import static org.apache.hadoop.ozone.web.utils.OzoneConsts.OZONE_COMPONENT;
+import static org.apache.hadoop.ozone.web.utils.OzoneConsts.OZONE_REQUEST;
+import static org.apache.hadoop.ozone.web.utils.OzoneConsts.OZONE_RESOURCE;
+import static org.apache.hadoop.ozone.web.utils.OzoneConsts.OZONE_USER;
 
 /**
  * This class abstracts way the repetitive tasks in  Key handling code.
  */
 public abstract class KeyProcessTemplate {
+  private static final Logger LOG =
+      LoggerFactory.getLogger(KeyProcessTemplate.class);
 
   /**
    * This function serves as the common error handling function for all Key
@@ -63,31 +72,37 @@ public abstract class KeyProcessTemplate {
 
     String reqID = OzoneUtils.getRequestID();
     String hostName = OzoneUtils.getHostName();
+    MDC.put(OZONE_COMPONENT, "ozone");
+    MDC.put(OZONE_REQUEST, reqID);
     UserArgs userArgs = null;
     try {
+      userArgs = new UserArgs(reqID, hostName, request, info, headers);
       OzoneUtils.validate(request, headers, reqID, bucket, hostName);
       OzoneUtils.verifyBucketName(bucket);
 
       UserAuth auth = UserHandlerBuilder.getAuthHandler();
-      userArgs = new UserArgs(reqID, hostName, request, info, headers);
       userArgs.setUserName(auth.getUser(userArgs));
+      MDC.put(OZONE_USER, userArgs.getUserName());
 
       KeyArgs args = new KeyArgs(volume, bucket, key, userArgs);
-      return doProcess(args, is, request, headers, info);
+      MDC.put(OZONE_RESOURCE, args.getResourceName());
+      Response response =  doProcess(args, is, request, headers, info);
+      LOG.info("Success");
+      MDC.clear();
+      return response;
+
     } catch (IllegalArgumentException argExp) {
-      OzoneException ex =
-          newError(INVALID_BUCKET_NAME, reqID, bucket, hostName);
-      ex.setMessage(argExp.getMessage());
-      throw ex;
+      LOG.debug("Invalid bucket in key call. ex:{}", argExp);
+      throw newError(INVALID_BUCKET_NAME, userArgs, argExp);
     } catch (IOException fsExp) {
       // TODO : Handle errors from the FileSystem , let us map to server error
       // for now.
+      LOG.debug("IOException. ex : {}", fsExp);
       throw ErrorTable.newError(ErrorTable.SERVER_ERROR, userArgs, fsExp);
     } catch (NoSuchAlgorithmException algoEx) {
-      OzoneException ex =
-          ErrorTable.newError(SERVER_ERROR, reqID, key, hostName);
-      ex.setMessage(algoEx.getMessage());
-      throw ex;
+      LOG.debug("NoSuchAlgorithmException. Probably indicates an unusual java "
+          + "installation.  ex : {}", algoEx);
+      throw ErrorTable.newError(SERVER_ERROR, userArgs, algoEx);
     }
   }
 
@@ -131,10 +146,11 @@ public abstract class KeyProcessTemplate {
 
       if (!contentString.equals(computedString)) {
         fs.deleteKey(args);
-        OzoneException ex = ErrorTable.newError(BAD_DIGEST, args.getRequestID(),
-            args.getKeyName(), args.getHostName());
-        ex.setMessage(String.format("MD5 Digest mismatch. Expected %s Found " +
-            "%s", contentString, computedString));
+        OzoneException ex = ErrorTable.newError(BAD_DIGEST, args);
+        String msg = String.format("MD5 Digest mismatch. Expected %s Found " +
+            "%s", contentString, computedString);
+        ex.setMessage(msg);
+        LOG.debug(msg);
         throw ex;
       }
     }
@@ -158,10 +174,11 @@ public abstract class KeyProcessTemplate {
       throws IOException, OzoneException {
     if (bytesRead != contentLen) {
       fs.deleteKey(args);
-      OzoneException ex = ErrorTable.newError(INCOMPLETE_BODY,
-          args.getRequestID(), args.getKeyName(), args.getHostName());
-      ex.setMessage(String.format("Body length mismatch. Expected length : %d" +
-          " Found %d", contentLen, bytesRead));
+      OzoneException ex = ErrorTable.newError(INCOMPLETE_BODY, args);
+      String msg = String.format("Body length mismatch. Expected length : %d" +
+          " Found %d", contentLen, bytesRead);
+      ex.setMessage(msg);
+      LOG.debug(msg);
       throw ex;
     }
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3f708bf7/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/web/handlers/VolumeHandler.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/web/handlers/VolumeHandler.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/web/handlers/VolumeHandler.java
index 09a021b..2ce39ba 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/web/handlers/VolumeHandler.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/web/handlers/VolumeHandler.java
@@ -26,6 +26,9 @@ import org.apache.hadoop.ozone.web.interfaces.StorageHandler;
 import org.apache.hadoop.ozone.web.interfaces.UserAuth;
 import org.apache.hadoop.ozone.web.interfaces.Volume;
 import org.apache.hadoop.ozone.web.utils.OzoneUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.slf4j.MDC;
 
 import javax.ws.rs.core.HttpHeaders;
 import javax.ws.rs.core.Request;
@@ -35,6 +38,7 @@ import java.io.IOException;
 
 import static java.net.HttpURLConnection.HTTP_CREATED;
 import static java.net.HttpURLConnection.HTTP_OK;
+import static org.apache.hadoop.ozone.web.utils.OzoneConsts.OZONE_FUNCTION;
 
 /**
  * VolumeHandler handles volume specific HTTP calls.
@@ -50,6 +54,8 @@ import static java.net.HttpURLConnection.HTTP_OK;
  */
 @InterfaceAudience.Private
 public class VolumeHandler implements Volume {
+  private static final Logger LOG = LoggerFactory.getLogger(VolumeHandler
+      .class);
   /**
    * Creates a volume.
    *
@@ -67,6 +73,7 @@ public class VolumeHandler implements Volume {
   public Response createVolume(String volume, final String quota, Request req,
                                UriInfo uriInfo, HttpHeaders headers)
       throws OzoneException {
+    MDC.put(OZONE_FUNCTION, "createVolume");
     return new VolumeProcessTemplate() {
       @Override
       public Response doProcess(VolumeArgs args)
@@ -119,6 +126,7 @@ public class VolumeHandler implements Volume {
   public Response updateVolume(String volume, final String quota, Request req,
                                UriInfo uriInfo, HttpHeaders headers)
       throws OzoneException {
+    MDC.put(OZONE_FUNCTION, "updateVolume");
     return new VolumeProcessTemplate() {
       @Override
       public Response doProcess(VolumeArgs args)
@@ -171,6 +179,8 @@ public class VolumeHandler implements Volume {
   @Override
   public Response deleteVolume(String volume, Request req, UriInfo uriInfo,
                                HttpHeaders headers) throws OzoneException {
+    MDC.put(OZONE_FUNCTION, "deleteVolume");
+
     return new VolumeProcessTemplate() {
       @Override
       public Response doProcess(VolumeArgs args)
@@ -202,6 +212,7 @@ public class VolumeHandler implements Volume {
   public Response getVolumeInfo(String volume, final String info, Request req,
                                 final UriInfo uriInfo, HttpHeaders headers)
       throws OzoneException {
+    MDC.put(OZONE_FUNCTION, "getVolumeInfo");
     return new VolumeProcessTemplate() {
       @Override
       public Response doProcess(VolumeArgs args)
@@ -215,6 +226,7 @@ public class VolumeHandler implements Volume {
           case Header.OZONE_LIST_QUERY_SERVICE:
             return getVolumesByUser(args); // Return list of volumes
           default:
+            LOG.debug("Unrecognized query param : {} ", info);
             OzoneException ozoneException =
                 ErrorTable.newError(ErrorTable.INVALID_QUERY_PARAM, args);
             ozoneException.setMessage("Unrecognized query param : " + info);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3f708bf7/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/web/handlers/VolumeProcessTemplate.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/web/handlers/VolumeProcessTemplate.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/web/handlers/VolumeProcessTemplate.java
index d357da7..7ca5d47 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/web/handlers/VolumeProcessTemplate.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/web/handlers/VolumeProcessTemplate.java
@@ -27,6 +27,9 @@ import org.apache.hadoop.ozone.web.response.ListBuckets;
 import org.apache.hadoop.ozone.web.response.ListVolumes;
 import org.apache.hadoop.ozone.web.response.VolumeInfo;
 import org.apache.hadoop.ozone.web.utils.OzoneUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.slf4j.MDC;
 
 import javax.ws.rs.core.HttpHeaders;
 import javax.ws.rs.core.Request;
@@ -38,6 +41,11 @@ import java.nio.file.FileAlreadyExistsException;
 import java.nio.file.NoSuchFileException;
 
 import static java.net.HttpURLConnection.HTTP_OK;
+import static org.apache.hadoop.ozone.web.utils.OzoneConsts.OZONE_COMPONENT;
+import static org.apache.hadoop.ozone.web.utils.OzoneConsts.OZONE_RESOURCE;
+import static org.apache.hadoop.ozone.web.utils.OzoneConsts.OZONE_REQUEST;
+import static org.apache.hadoop.ozone.web.utils.OzoneConsts.OZONE_USER;
+
 
 /**
  * This class abstracts way the repetitive tasks in
@@ -45,6 +53,8 @@ import static java.net.HttpURLConnection.HTTP_OK;
  */
 @InterfaceAudience.Private
 public abstract class VolumeProcessTemplate {
+  private static final Logger LOG =
+      LoggerFactory.getLogger(VolumeProcessTemplate.class);
 
 
   /**
@@ -64,24 +74,30 @@ public abstract class VolumeProcessTemplate {
                              HttpHeaders headers) throws OzoneException {
     String reqID = OzoneUtils.getRequestID();
     String hostName = OzoneUtils.getHostName();
+    MDC.put(OZONE_COMPONENT, "ozone");
+    MDC.put(OZONE_REQUEST, reqID);
+    UserArgs userArgs  = null;
     try {
-
+      userArgs = new UserArgs(reqID, hostName, request, info, headers);
       OzoneUtils.validate(request, headers, reqID, volume, hostName);
 
       // we use the same logic for both bucket and volume names
       OzoneUtils.verifyBucketName(volume);
       UserAuth auth = UserHandlerBuilder.getAuthHandler();
-      UserArgs userArgs = new UserArgs(reqID, hostName, request, info, headers);
 
       userArgs.setUserName(auth.getUser(userArgs));
+      MDC.put(OZONE_USER, userArgs.getUserName());
       VolumeArgs args = new VolumeArgs(volume, userArgs);
 
-      return doProcess(args);
+      MDC.put(OZONE_RESOURCE, args.getResourceName());
+      Response response =  doProcess(args);
+      LOG.info("Success");
+      MDC.clear();
+      return response;
+
     } catch (IllegalArgumentException ex) {
-      OzoneException exp = ErrorTable
-          .newError(ErrorTable.INVALID_VOLUME_NAME, reqID, volume, hostName);
-      exp.setMessage(ex.getMessage());
-      throw exp;
+      LOG.debug("illegal argument. {}", ex);
+      throw ErrorTable.newError(ErrorTable.INVALID_VOLUME_NAME, userArgs, ex);
     } catch (IOException ex) {
       handleIOException(volume, reqID, hostName, ex);
     }
@@ -142,6 +158,7 @@ public abstract class VolumeProcessTemplate {
         exp.setMessage(fsExp.getMessage());
       }
     }
+    LOG.debug("IOException: {}", exp);
     throw exp;
   }
 
@@ -158,6 +175,7 @@ public abstract class VolumeProcessTemplate {
     try {
       args.setQuota(quota);
     } catch (IllegalArgumentException ex) {
+      LOG.debug("Malformed Quota: {}", ex);
       throw ErrorTable.newError(ErrorTable.MALFORMED_QUOTA, args, ex);
     }
   }
@@ -227,7 +245,9 @@ public abstract class VolumeProcessTemplate {
                        args.getRequest(), args.getUri(), args.getHeaders());
       return getVolumesByUser(user);
     } catch (IOException ex) {
-      OzoneException exp = ErrorTable.newError(ErrorTable.SERVER_ERROR, args);
+      LOG.debug("unable to get the volume list for the user. Ex: {}", ex);
+      OzoneException exp = ErrorTable.newError(ErrorTable.SERVER_ERROR,
+          args, ex);
       exp.setMessage("unable to get the volume list for the user");
       throw exp;
     }
@@ -242,20 +262,19 @@ public abstract class VolumeProcessTemplate {
    * @throws OzoneException
    */
   Response getBucketsInVolume(VolumeArgs args) throws OzoneException {
-    String requestID = OzoneUtils.getRequestID();
-    String hostName = OzoneUtils.getHostName();
     try {
-      UserAuth auth = UserHandlerBuilder.getAuthHandler();
-      // TODO : Check for ACLS access.
+      // UserAuth auth = UserHandlerBuilder.getAuthHandler();
+      // TODO : Check ACLS.
       StorageHandler fs = StorageHandlerBuilder.getStorageHandler();
       ListBuckets bucketList = fs.listBuckets(args);
       return OzoneUtils.getResponse(args, HTTP_OK, bucketList.toJsonString());
     } catch (IOException ex) {
+      LOG.debug("unable to get the bucket list for the specified volume." +
+          " Ex: {}", ex);
       OzoneException exp =
-          ErrorTable.newError(ErrorTable.SERVER_ERROR, requestID, "", hostName);
+          ErrorTable.newError(ErrorTable.SERVER_ERROR, args, ex);
       exp.setMessage("unable to get the bucket list for the specified volume.");
       throw exp;
-
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3f708bf7/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/web/utils/OzoneConsts.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/web/utils/OzoneConsts.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/web/utils/OzoneConsts.java
index fb0a7a6..80f02d6 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/web/utils/OzoneConsts.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/web/utils/OzoneConsts.java
@@ -47,6 +47,13 @@ public final class OzoneConsts {
       "EEE, dd MMM yyyy HH:mm:ss zzz";
   public static final String OZONE_TIME_ZONE = "GMT";
 
+  public static final String OZONE_COMPONENT = "component";
+  public static final String OZONE_FUNCTION  = "function";
+  public static final String OZONE_RESOURCE = "resource";
+  public static final String OZONE_USER = "user";
+  public static final String OZONE_REQUEST = "request";
+
+
   /**
    * Supports Bucket Versioning.
    */

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3f708bf7/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/log4j.properties
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/log4j.properties b/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/log4j.properties
index 7378846..4d6967a 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/log4j.properties
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/log4j.properties
@@ -47,3 +47,27 @@ log4j.appender.DNMETRICSRFA.layout=org.apache.log4j.PatternLayout
 log4j.appender.DNMETRICSRFA.layout.ConversionPattern=%d{ISO8601} %m%n
 log4j.appender.DNMETRICSRFA.MaxBackupIndex=1
 log4j.appender.DNMETRICSRFA.MaxFileSize=64MB
+
+#
+# Add a logger for ozone that is separate from the Datanode.
+#
+log4j.logger.org.apache.hadoop.ozone=DEBUG,OZONE,FILE
+
+# Do not log into datanode logs. Remove this line to have single log.
+log4j.additivity.org.apache.hadoop.ozone=false
+
+# For development purposes, log both to console and log file.
+log4j.appender.OZONE=org.apache.log4j.ConsoleAppender
+log4j.appender.OZONE.Threshold=info
+log4j.appender.OZONE.layout=org.apache.log4j.PatternLayout
+log4j.appender.OZONE.layout.ConversionPattern=%d{ISO8601} [%t] %-5p \
+ %X{component} %X{function} %X{resource} %X{user} %X{request} - %m%n
+
+# Real ozone logger that writes to ozone.log
+log4j.appender.FILE=org.apache.log4j.DailyRollingFileAppender
+log4j.appender.FILE.File=${hadoop.log.dir}/ozone.log
+log4j.appender.FILE.Threshold=debug
+log4j.appender.FILE.layout=org.apache.log4j.PatternLayout
+log4j.appender.FILE.layout.ConversionPattern=%d{ISO8601} [%t] %-5p \
+  (%F:%L) %X{function} %X{resource} %X{user} %X{request} - \
+  %m%n