You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by en...@apache.org on 2016/06/06 22:32:17 UTC
[1/5] hbase git commit: HBASE-15954 REST server should log requests
with TRACE instead of DEBUG
Repository: hbase
Updated Branches:
refs/heads/branch-1 878b1ea72 -> 4a0a9a20d
refs/heads/branch-1.1 73a746239 -> 218259c0e
refs/heads/branch-1.2 edbf2754a -> 70593efa2
refs/heads/branch-1.3 aa636bef4 -> 466eb3164
refs/heads/master b21c56e79 -> 3d7840a17
HBASE-15954 REST server should log requests with TRACE instead of DEBUG
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/3d7840a1
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/3d7840a1
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/3d7840a1
Branch: refs/heads/master
Commit: 3d7840a173aab97fb72409fa8c0f161fd7ad0e8f
Parents: b21c56e
Author: Enis Soztutar <en...@apache.org>
Authored: Mon Jun 6 10:58:37 2016 -0700
Committer: Enis Soztutar <en...@apache.org>
Committed: Mon Jun 6 10:58:37 2016 -0700
----------------------------------------------------------------------
.../hadoop/hbase/rest/MultiRowResource.java | 4 +-
.../hbase/rest/NamespacesInstanceResource.java | 24 ++++-----
.../hadoop/hbase/rest/NamespacesResource.java | 4 +-
.../hbase/rest/ProtobufStreamingUtil.java | 10 ++--
.../apache/hadoop/hbase/rest/RESTServer.java | 12 +++--
.../apache/hadoop/hbase/rest/RESTServlet.java | 5 +-
.../hadoop/hbase/rest/RegionsResource.java | 4 +-
.../apache/hadoop/hbase/rest/RootResource.java | 4 +-
.../apache/hadoop/hbase/rest/RowResource.java | 54 ++++++++++----------
.../hbase/rest/ScannerInstanceResource.java | 32 +++++++-----
.../hadoop/hbase/rest/ScannerResource.java | 17 +++---
.../hadoop/hbase/rest/SchemaResource.java | 22 ++++----
.../rest/StorageClusterStatusResource.java | 4 +-
.../rest/StorageClusterVersionResource.java | 4 +-
.../apache/hadoop/hbase/rest/TableResource.java | 26 ++++++----
.../hadoop/hbase/rest/VersionResource.java | 10 ++--
.../apache/hadoop/hbase/rest/client/Client.java | 44 ++++++++--------
.../hadoop/hbase/rest/filter/AuthFilter.java | 4 +-
.../rest/filter/RestCsrfPreventionFilter.java | 15 +++---
.../consumer/ProtobufMessageBodyConsumer.java | 6 +--
.../hadoop/hbase/util/ConnectionCache.java | 6 ++-
21 files changed, 169 insertions(+), 142 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/3d7840a1/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java
index c88ac91..8ff3ef6 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java
@@ -86,7 +86,9 @@ public class MultiRowResource extends ResourceBase implements Constants {
}
model.addRow(rowModel);
} else {
- LOG.trace("The row : " + rk + " not found in the table.");
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("The row : " + rk + " not found in the table.");
+ }
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/3d7840a1/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesInstanceResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesInstanceResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesInstanceResource.java
index 8f64738..c832905 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesInstanceResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesInstanceResource.java
@@ -91,8 +91,8 @@ public class NamespacesInstanceResource extends ResourceBase {
MIMETYPE_PROTOBUF_IETF})
public Response get(final @Context ServletContext context,
final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
@@ -135,8 +135,8 @@ public class NamespacesInstanceResource extends ResourceBase {
@Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
public Response put(final NamespacesInstanceModel model, final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
return processUpdate(model, true, uriInfo);
@@ -151,8 +151,8 @@ public class NamespacesInstanceResource extends ResourceBase {
@PUT
public Response putNoBody(final byte[] message,
final @Context UriInfo uriInfo, final @Context HttpHeaders headers) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
try{
@@ -176,8 +176,8 @@ public class NamespacesInstanceResource extends ResourceBase {
public Response post(final NamespacesInstanceModel model,
final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("POST " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("POST " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
return processUpdate(model, false, uriInfo);
@@ -192,8 +192,8 @@ public class NamespacesInstanceResource extends ResourceBase {
@POST
public Response postNoBody(final byte[] message,
final @Context UriInfo uriInfo, final @Context HttpHeaders headers) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("POST " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("POST " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
try{
@@ -287,8 +287,8 @@ public class NamespacesInstanceResource extends ResourceBase {
@DELETE
public Response deleteNoBody(final byte[] message,
final @Context UriInfo uriInfo, final @Context HttpHeaders headers) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("DELETE " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("DELETE " + uriInfo.getAbsolutePath());
}
if (servlet.isReadOnly()) {
servlet.getMetrics().incrementFailedDeleteRequests(1);
http://git-wip-us.apache.org/repos/asf/hbase/blob/3d7840a1/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesResource.java
index 0548fe8..1304fe0 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesResource.java
@@ -64,8 +64,8 @@ public class NamespacesResource extends ResourceBase {
@Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
public Response get(final @Context ServletContext context, final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
try {
http://git-wip-us.apache.org/repos/asf/hbase/blob/3d7840a1/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingUtil.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingUtil.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingUtil.java
index 93bb940..cb0f4c8 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingUtil.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingUtil.java
@@ -49,8 +49,10 @@ public class ProtobufStreamingUtil implements StreamingOutput {
this.contentType = type;
this.limit = limit;
this.fetchSize = fetchSize;
- LOG.debug("Created ScanStreamingUtil with content type = " + this.contentType + " user limit : "
- + this.limit + " scan fetch size : " + this.fetchSize);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Created ScanStreamingUtil with content type = " + this.contentType
+ + " user limit : " + this.limit + " scan fetch size : " + this.fetchSize);
+ }
}
@Override
@@ -82,7 +84,9 @@ public class ProtobufStreamingUtil implements StreamingOutput {
outStream.write(Bytes.toBytes((short)objectBytes.length));
outStream.write(objectBytes);
outStream.flush();
- LOG.trace("Wrote " + model.getRows().size() + " rows to stream successfully.");
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Wrote " + model.getRows().size() + " rows to stream successfully.");
+ }
}
private CellSetModel createModelFromResults(Result[] results) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/3d7840a1/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
index b26de54..cb37fb5 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
@@ -168,20 +168,26 @@ public class RESTServer implements Constants {
if (commandLine != null && commandLine.hasOption("port")) {
String val = commandLine.getOptionValue("port");
servlet.getConfiguration().setInt("hbase.rest.port", Integer.parseInt(val));
- LOG.debug("port set to " + val);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("port set to " + val);
+ }
}
// check if server should only process GET requests, if so override the conf
if (commandLine != null && commandLine.hasOption("readonly")) {
servlet.getConfiguration().setBoolean("hbase.rest.readonly", true);
- LOG.debug("readonly set to true");
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("readonly set to true");
+ }
}
// check for user-defined info server port setting, if so override the conf
if (commandLine != null && commandLine.hasOption("infoport")) {
String val = commandLine.getOptionValue("infoport");
servlet.getConfiguration().setInt("hbase.rest.info.port", Integer.parseInt(val));
- LOG.debug("Web UI port set to " + val);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Web UI port set to " + val);
+ }
}
@SuppressWarnings("unchecked")
http://git-wip-us.apache.org/repos/asf/hbase/blob/3d7840a1/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java
index 4da5c67..411ced8 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java
@@ -20,6 +20,8 @@ package org.apache.hadoop.hbase.rest;
import java.io.IOException;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.client.Admin;
@@ -30,14 +32,13 @@ import org.apache.hadoop.hbase.util.ConnectionCache;
import org.apache.hadoop.hbase.util.JvmPauseMonitor;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.ProxyUsers;
-import org.apache.log4j.Logger;
/**
* Singleton class encapsulating global REST servlet state and functions.
*/
@InterfaceAudience.Private
public class RESTServlet implements Constants {
- private static final Logger LOG = Logger.getLogger(RESTServlet.class);
+ private static final Log LOG = LogFactory.getLog(RESTServlet.class);
private static RESTServlet INSTANCE;
private final Configuration conf;
private final MetricsREST metrics;
http://git-wip-us.apache.org/repos/asf/hbase/blob/3d7840a1/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java
index 48721bb..f803b26 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java
@@ -71,8 +71,8 @@ public class RegionsResource extends ResourceBase {
@Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
public Response get(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
try {
http://git-wip-us.apache.org/repos/asf/hbase/blob/3d7840a1/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java
index c08bb8b..fc4c548 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java
@@ -72,8 +72,8 @@ public class RootResource extends ResourceBase {
@Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
public Response get(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
try {
http://git-wip-us.apache.org/repos/asf/hbase/blob/3d7840a1/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
index 15828ce..de84625 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
@@ -85,8 +85,8 @@ public class RowResource extends ResourceBase {
@Produces({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
public Response get(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
MultivaluedMap<String, String> params = uriInfo.getQueryParameters();
@@ -130,8 +130,8 @@ public class RowResource extends ResourceBase {
@GET
@Produces(MIMETYPE_BINARY)
public Response getBinary(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath() + " as "+ MIMETYPE_BINARY);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath() + " as "+ MIMETYPE_BINARY);
}
servlet.getMetrics().incrementRequests(1);
// doesn't make sense to use a non specific coordinate as this can only
@@ -221,8 +221,8 @@ public class RowResource extends ResourceBase {
put.addImmutable(parts[0], parts[1], cell.getTimestamp(), cell.getValue());
}
puts.add(put);
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + put.toString());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + put.toString());
}
}
table = servlet.getTable(tableResource.getName());
@@ -289,8 +289,8 @@ public class RowResource extends ResourceBase {
put.addImmutable(parts[0], parts[1], timestamp, message);
table = servlet.getTable(tableResource.getName());
table.put(put);
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + put.toString());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + put.toString());
}
servlet.getMetrics().incrementSucessfulPutRequests(1);
return Response.ok().build();
@@ -301,7 +301,7 @@ public class RowResource extends ResourceBase {
if (table != null) try {
table.close();
} catch (IOException ioe) {
- LOG.debug(ioe);
+ LOG.debug("Exception received while closing the table", ioe);
}
}
}
@@ -311,8 +311,8 @@ public class RowResource extends ResourceBase {
MIMETYPE_PROTOBUF_IETF})
public Response put(final CellSetModel model,
final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + uriInfo.getAbsolutePath()
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + uriInfo.getAbsolutePath()
+ " " + uriInfo.getQueryParameters());
}
return update(model, true);
@@ -322,8 +322,8 @@ public class RowResource extends ResourceBase {
@Consumes(MIMETYPE_BINARY)
public Response putBinary(final byte[] message,
final @Context UriInfo uriInfo, final @Context HttpHeaders headers) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + uriInfo.getAbsolutePath() + " as "+ MIMETYPE_BINARY);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + uriInfo.getAbsolutePath() + " as "+ MIMETYPE_BINARY);
}
return updateBinary(message, headers, true);
}
@@ -333,8 +333,8 @@ public class RowResource extends ResourceBase {
MIMETYPE_PROTOBUF_IETF})
public Response post(final CellSetModel model,
final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("POST " + uriInfo.getAbsolutePath()
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("POST " + uriInfo.getAbsolutePath()
+ " " + uriInfo.getQueryParameters());
}
return update(model, false);
@@ -344,16 +344,16 @@ public class RowResource extends ResourceBase {
@Consumes(MIMETYPE_BINARY)
public Response postBinary(final byte[] message,
final @Context UriInfo uriInfo, final @Context HttpHeaders headers) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("POST " + uriInfo.getAbsolutePath() + " as "+MIMETYPE_BINARY);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("POST " + uriInfo.getAbsolutePath() + " as "+MIMETYPE_BINARY);
}
return updateBinary(message, headers, false);
}
@DELETE
public Response delete(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("DELETE " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("DELETE " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
if (servlet.isReadOnly()) {
@@ -397,8 +397,8 @@ public class RowResource extends ResourceBase {
table = servlet.getTable(tableResource.getName());
table.delete(delete);
servlet.getMetrics().incrementSucessfulDeleteRequests(1);
- if (LOG.isDebugEnabled()) {
- LOG.debug("DELETE " + delete.toString());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("DELETE " + delete.toString());
}
} catch (Exception e) {
servlet.getMetrics().incrementFailedDeleteRequests(1);
@@ -407,7 +407,7 @@ public class RowResource extends ResourceBase {
if (table != null) try {
table.close();
} catch (IOException ioe) {
- LOG.debug(ioe);
+ LOG.debug("Exception received while closing the table", ioe);
}
}
return Response.ok().build();
@@ -499,8 +499,8 @@ public class RowResource extends ResourceBase {
.build();
}
- if (LOG.isDebugEnabled()) {
- LOG.debug("CHECK-AND-PUT " + put.toString() + ", returns " + retValue);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("CHECK-AND-PUT " + put.toString() + ", returns " + retValue);
}
if (!retValue) {
servlet.getMetrics().incrementFailedPutRequests(1);
@@ -517,7 +517,7 @@ public class RowResource extends ResourceBase {
} finally {
if (table != null) try {
table.close();
- } catch (IOException ioe) {
+ } catch (IOException ioe) {
LOG.debug("Exception received while closing the table", ioe);
}
}
@@ -627,8 +627,8 @@ public class RowResource extends ResourceBase {
.build();
}
- if (LOG.isDebugEnabled()) {
- LOG.debug("CHECK-AND-DELETE " + delete.toString() + ", returns "
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("CHECK-AND-DELETE " + delete.toString() + ", returns "
+ retValue);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/3d7840a1/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java
index ffb2fae..2469faa 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java
@@ -62,7 +62,7 @@ public class ScannerInstanceResource extends ResourceBase {
public ScannerInstanceResource() throws IOException { }
- public ScannerInstanceResource(String table, String id,
+ public ScannerInstanceResource(String table, String id,
ResultGenerator generator, int batch) throws IOException {
this.id = id;
this.generator = generator;
@@ -72,10 +72,10 @@ public class ScannerInstanceResource extends ResourceBase {
@GET
@Produces({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
- public Response get(final @Context UriInfo uriInfo,
+ public Response get(final @Context UriInfo uriInfo,
@QueryParam("n") int maxRows, final @QueryParam("c") int maxValues) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
if (generator == null) {
@@ -108,7 +108,9 @@ public class ScannerInstanceResource extends ResourceBase {
.build();
}
if (value == null) {
- LOG.info("generator exhausted");
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("generator exhausted");
+ }
// respond with 204 (No Content) if an empty cell set would be
// returned
if (count == limit) {
@@ -123,7 +125,7 @@ public class ScannerInstanceResource extends ResourceBase {
if (!Bytes.equals(CellUtil.cloneRow(value), rowKey)) {
// if maxRows was given as a query param, stop if we would exceed the
// specified number of rows
- if (maxRows > 0) {
+ if (maxRows > 0) {
if (--maxRows == 0) {
generator.putBack(value);
break;
@@ -134,7 +136,7 @@ public class ScannerInstanceResource extends ResourceBase {
rowModel = new RowModel(rowKey);
}
rowModel.addCell(
- new CellModel(CellUtil.cloneFamily(value), CellUtil.cloneQualifier(value),
+ new CellModel(CellUtil.cloneFamily(value), CellUtil.cloneQualifier(value),
value.getTimestamp(), CellUtil.cloneValue(value)));
} while (--count > 0);
model.addRow(rowModel);
@@ -147,21 +149,23 @@ public class ScannerInstanceResource extends ResourceBase {
@GET
@Produces(MIMETYPE_BINARY)
public Response getBinary(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath() + " as " +
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath() + " as " +
MIMETYPE_BINARY);
}
servlet.getMetrics().incrementRequests(1);
try {
Cell value = generator.next();
if (value == null) {
- LOG.info("generator exhausted");
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("generator exhausted");
+ }
return Response.noContent().build();
}
ResponseBuilder response = Response.ok(CellUtil.cloneValue(value));
response.cacheControl(cacheControl);
- response.header("X-Row", Base64.encodeBytes(CellUtil.cloneRow(value)));
- response.header("X-Column",
+ response.header("X-Row", Base64.encodeBytes(CellUtil.cloneRow(value)));
+ response.header("X-Column",
Base64.encodeBytes(
KeyValue.makeColumn(CellUtil.cloneFamily(value), CellUtil.cloneQualifier(value))));
response.header("X-Timestamp", value.getTimestamp());
@@ -182,8 +186,8 @@ public class ScannerInstanceResource extends ResourceBase {
@DELETE
public Response delete(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("DELETE " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("DELETE " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
if (servlet.isReadOnly()) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/3d7840a1/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java
index 844ea3b..71723d8 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java
@@ -31,7 +31,6 @@ import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.core.Context;
-import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriBuilder;
import javax.ws.rs.core.UriInfo;
@@ -91,7 +90,7 @@ public class ScannerResource extends ResourceBase {
spec = new RowSpec(model.getStartRow(), endRow, model.getColumns(), model.getStartTime(),
model.getEndTime(), model.getMaxVersions());
}
-
+
try {
Filter filter = ScannerResultGenerator.buildFilterFromModel(model);
String tableName = tableResource.getName();
@@ -102,8 +101,8 @@ public class ScannerResource extends ResourceBase {
ScannerInstanceResource instance =
new ScannerInstanceResource(tableName, id, gen, model.getBatch());
scanners.put(id, instance);
- if (LOG.isDebugEnabled()) {
- LOG.debug("new scanner: " + id);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("new scanner: " + id);
}
UriBuilder builder = uriInfo.getAbsolutePathBuilder();
URI uri = builder.path(id).build();
@@ -129,10 +128,10 @@ public class ScannerResource extends ResourceBase {
@PUT
@Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
- public Response put(final ScannerModel model,
+ public Response put(final ScannerModel model,
final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + uriInfo.getAbsolutePath());
}
return update(model, true, uriInfo);
}
@@ -142,8 +141,8 @@ public class ScannerResource extends ResourceBase {
MIMETYPE_PROTOBUF_IETF})
public Response post(final ScannerModel model,
final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("POST " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("POST " + uriInfo.getAbsolutePath());
}
return update(model, false, uriInfo);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/3d7840a1/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java
index c0e7153..dc34f09 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java
@@ -86,8 +86,8 @@ public class SchemaResource extends ResourceBase {
@Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
public Response get(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
try {
@@ -99,7 +99,7 @@ public class SchemaResource extends ResourceBase {
} catch (Exception e) {
servlet.getMetrics().incrementFailedGetRequests(1);
return processException(e);
- }
+ }
}
private Response replace(final TableName name, final TableSchemaModel model,
@@ -198,10 +198,10 @@ public class SchemaResource extends ResourceBase {
@PUT
@Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
- public Response put(final TableSchemaModel model,
+ public Response put(final TableSchemaModel model,
final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
return update(model, true, uriInfo);
@@ -210,10 +210,10 @@ public class SchemaResource extends ResourceBase {
@POST
@Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
- public Response post(final TableSchemaModel model,
+ public Response post(final TableSchemaModel model,
final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
return update(model, false, uriInfo);
@@ -223,8 +223,8 @@ public class SchemaResource extends ResourceBase {
justification="Expected")
@DELETE
public Response delete(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("DELETE " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("DELETE " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
if (servlet.isReadOnly()) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/3d7840a1/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java
index a7e52bd..27977c3 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java
@@ -63,8 +63,8 @@ public class StorageClusterStatusResource extends ResourceBase {
@Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
public Response get(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
try {
http://git-wip-us.apache.org/repos/asf/hbase/blob/3d7840a1/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java
index 85e81f8..b9fb5d4 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java
@@ -58,8 +58,8 @@ public class StorageClusterVersionResource extends ResourceBase {
@GET
@Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON})
public Response get(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
try {
http://git-wip-us.apache.org/repos/asf/hbase/blob/3d7840a1/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
index 45e9125..70a4538 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
@@ -133,7 +133,7 @@ public class TableResource extends ResourceBase {
@DefaultValue("-1") @QueryParam(Constants.SCAN_BATCH_SIZE) int batchSize,
@DefaultValue("0") @QueryParam(Constants.SCAN_START_TIME) long startTime,
@DefaultValue(Long.MAX_VALUE + "") @QueryParam(Constants.SCAN_END_TIME) long endTime,
- @DefaultValue("true") @QueryParam(Constants.SCAN_BATCH_SIZE) boolean cacheBlocks,
+ @DefaultValue("true") @QueryParam(Constants.SCAN_BATCH_SIZE) boolean cacheBlocks,
@DefaultValue("") @QueryParam(Constants.SCAN_FILTER) String filters) {
try {
Filter filter = null;
@@ -146,10 +146,12 @@ public class TableResource extends ResourceBase {
tableScan.setStartRow(prefixBytes);
}
}
- LOG.debug("Query parameters : Table Name = > " + this.table + " Start Row => " + startRow
- + " End Row => " + endRow + " Columns => " + column + " Start Time => " + startTime
- + " End Time => " + endTime + " Cache Blocks => " + cacheBlocks + " Max Versions => "
- + maxVersions + " Batch Size => " + batchSize);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Query parameters : Table Name = > " + this.table + " Start Row => " + startRow
+ + " End Row => " + endRow + " Columns => " + column + " Start Time => " + startTime
+ + " End Time => " + endTime + " Cache Blocks => " + cacheBlocks + " Max Versions => "
+ + maxVersions + " Batch Size => " + batchSize);
+ }
Table hTable = RESTServlet.getInstance().getTable(this.table);
tableScan.setBatch(batchSize);
tableScan.setMaxVersions(maxVersions);
@@ -162,15 +164,21 @@ public class TableResource extends ResourceBase {
String[] familysplit = csplit.trim().split(":");
if (familysplit.length == 2) {
if (familysplit[1].length() > 0) {
- LOG.debug("Scan family and column : " + familysplit[0] + " " + familysplit[1]);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Scan family and column : " + familysplit[0] + " " + familysplit[1]);
+ }
tableScan.addColumn(Bytes.toBytes(familysplit[0]), Bytes.toBytes(familysplit[1]));
} else {
tableScan.addFamily(Bytes.toBytes(familysplit[0]));
- LOG.debug("Scan family : " + familysplit[0] + " and empty qualifier.");
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Scan family : " + familysplit[0] + " and empty qualifier.");
+ }
tableScan.addColumn(Bytes.toBytes(familysplit[0]), null);
}
- } else if (StringUtils.isNotEmpty(familysplit[0])){
- LOG.debug("Scan family : " + familysplit[0]);
+ } else if (StringUtils.isNotEmpty(familysplit[0])) {
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Scan family : " + familysplit[0]);
+ }
tableScan.addFamily(Bytes.toBytes(familysplit[0]));
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/3d7840a1/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java
index ae93825..172246c 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java
@@ -68,15 +68,15 @@ public class VersionResource extends ResourceBase {
* Build a response for a version request.
* @param context servlet context
* @param uriInfo (JAX-RS context variable) request URL
- * @return a response for a version request
+ * @return a response for a version request
*/
@GET
@Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
- public Response get(final @Context ServletContext context,
+ public Response get(final @Context ServletContext context,
final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
ResponseBuilder response = Response.ok(new VersionModel(context));
@@ -89,7 +89,7 @@ public class VersionResource extends ResourceBase {
* Dispatch to StorageClusterVersionResource
*/
@Path("cluster")
- public StorageClusterVersionResource getClusterVersionResource()
+ public StorageClusterVersionResource getClusterVersionResource()
throws IOException {
return new StorageClusterVersionResource();
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/3d7840a1/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java
index 142c276..e26de63 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java
@@ -101,10 +101,10 @@ public class Client {
}
/**
- * Shut down the client. Close any open persistent connections.
+ * Shut down the client. Close any open persistent connections.
*/
public void shutdown() {
- MultiThreadedHttpConnectionManager manager =
+ MultiThreadedHttpConnectionManager manager =
(MultiThreadedHttpConnectionManager) httpClient.getHttpConnectionManager();
manager.shutdown();
}
@@ -151,7 +151,7 @@ public class Client {
* one of the members of the supplied cluster definition and iterate through
* the list until a transaction can be successfully completed. The
* definition of success here is a complete HTTP transaction, irrespective
- * of result code.
+ * of result code.
* @param cluster the cluster definition
* @param method the transaction method
* @param headers HTTP header values to send
@@ -209,8 +209,8 @@ public class Client {
long startTime = System.currentTimeMillis();
int code = httpClient.executeMethod(method);
long endTime = System.currentTimeMillis();
- if (LOG.isDebugEnabled()) {
- LOG.debug(method.getName() + " " + uri + " " + code + " " +
+ if (LOG.isTraceEnabled()) {
+ LOG.trace(method.getName() + " " + uri + " " + code + " " +
method.getStatusText() + " in " + (endTime - startTime) + " ms");
}
return code;
@@ -250,7 +250,7 @@ public class Client {
}
/**
- * Send a HEAD request
+ * Send a HEAD request
* @param path the path or URI
* @return a Response object with response detail
* @throws IOException
@@ -260,14 +260,14 @@ public class Client {
}
/**
- * Send a HEAD request
+ * Send a HEAD request
* @param cluster the cluster definition
* @param path the path or URI
* @param headers the HTTP headers to include in the request
* @return a Response object with response detail
* @throws IOException
*/
- public Response head(Cluster cluster, String path, Header[] headers)
+ public Response head(Cluster cluster, String path, Header[] headers)
throws IOException {
HeadMethod method = new HeadMethod();
try {
@@ -280,7 +280,7 @@ public class Client {
}
/**
- * Send a GET request
+ * Send a GET request
* @param path the path or URI
* @return a Response object with response detail
* @throws IOException
@@ -290,7 +290,7 @@ public class Client {
}
/**
- * Send a GET request
+ * Send a GET request
* @param cluster the cluster definition
* @param path the path or URI
* @return a Response object with response detail
@@ -301,7 +301,7 @@ public class Client {
}
/**
- * Send a GET request
+ * Send a GET request
* @param path the path or URI
* @param accept Accept header value
* @return a Response object with response detail
@@ -312,7 +312,7 @@ public class Client {
}
/**
- * Send a GET request
+ * Send a GET request
* @param cluster the cluster definition
* @param path the path or URI
* @param accept Accept header value
@@ -329,7 +329,7 @@ public class Client {
/**
* Send a GET request
* @param path the path or URI
- * @param headers the HTTP headers to include in the request,
+ * @param headers the HTTP headers to include in the request,
* <tt>Accept</tt> must be supplied
* @return a Response object with response detail
* @throws IOException
@@ -346,7 +346,7 @@ public class Client {
* @return a Response object with response detail
* @throws IOException
*/
- public Response get(Cluster c, String path, Header[] headers)
+ public Response get(Cluster c, String path, Header[] headers)
throws IOException {
GetMethod method = new GetMethod();
try {
@@ -396,7 +396,7 @@ public class Client {
* @return a Response object with response detail
* @throws IOException for error
*/
- public Response put(Cluster cluster, String path, String contentType,
+ public Response put(Cluster cluster, String path, String contentType,
byte[] content) throws IOException {
Header[] headers = new Header[1];
headers[0] = new Header("Content-Type", contentType);
@@ -413,7 +413,7 @@ public class Client {
* @return a Response object with response detail
* @throws IOException for error
*/
- public Response put(Cluster cluster, String path, String contentType,
+ public Response put(Cluster cluster, String path, String contentType,
byte[] content, Header extraHdr) throws IOException {
int cnt = extraHdr == null ? 1 : 2;
Header[] headers = new Header[cnt];
@@ -433,7 +433,7 @@ public class Client {
* @return a Response object with response detail
* @throws IOException
*/
- public Response put(String path, Header[] headers, byte[] content)
+ public Response put(String path, Header[] headers, byte[] content)
throws IOException {
return put(cluster, path, headers, content);
}
@@ -448,7 +448,7 @@ public class Client {
* @return a Response object with response detail
* @throws IOException
*/
- public Response put(Cluster cluster, String path, Header[] headers,
+ public Response put(Cluster cluster, String path, Header[] headers,
byte[] content) throws IOException {
PutMethod method = new PutMethod();
try {
@@ -498,7 +498,7 @@ public class Client {
* @return a Response object with response detail
* @throws IOException for error
*/
- public Response post(Cluster cluster, String path, String contentType,
+ public Response post(Cluster cluster, String path, String contentType,
byte[] content) throws IOException {
Header[] headers = new Header[1];
headers[0] = new Header("Content-Type", contentType);
@@ -515,7 +515,7 @@ public class Client {
* @return a Response object with response detail
* @throws IOException for error
*/
- public Response post(Cluster cluster, String path, String contentType,
+ public Response post(Cluster cluster, String path, String contentType,
byte[] content, Header extraHdr) throws IOException {
int cnt = extraHdr == null ? 1 : 2;
Header[] headers = new Header[cnt];
@@ -535,7 +535,7 @@ public class Client {
* @return a Response object with response detail
* @throws IOException
*/
- public Response post(String path, Header[] headers, byte[] content)
+ public Response post(String path, Header[] headers, byte[] content)
throws IOException {
return post(cluster, path, headers, content);
}
@@ -550,7 +550,7 @@ public class Client {
* @return a Response object with response detail
* @throws IOException
*/
- public Response post(Cluster cluster, String path, Header[] headers,
+ public Response post(Cluster cluster, String path, Header[] headers,
byte[] content) throws IOException {
PostMethod method = new PostMethod();
try {
http://git-wip-us.apache.org/repos/asf/hbase/blob/3d7840a1/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java
index e5208af..f051bc8 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java
@@ -72,7 +72,9 @@ public class AuthFilter extends AuthenticationFilter {
throw new ServletException("Failed to retrieve server principal", ie);
}
}
- LOG.debug("Setting property " + name + "=" + value);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Setting property " + name + "=" + value);
+ }
name = name.substring(REST_PREFIX_LEN);
props.setProperty(name, value);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/3d7840a1/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java
index 30eea95..dbb1447 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java
@@ -34,13 +34,12 @@ import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
/**
* This filter provides protection against cross site request forgery (CSRF)
* attacks for REST APIs. Enabling this filter on an endpoint results in the
@@ -52,8 +51,8 @@ import org.slf4j.LoggerFactory;
@InterfaceStability.Evolving
public class RestCsrfPreventionFilter implements Filter {
- private static final Logger LOG =
- LoggerFactory.getLogger(RestCsrfPreventionFilter.class);
+ private static final Log LOG =
+ LogFactory.getLog(RestCsrfPreventionFilter.class);
public static final String HEADER_USER_AGENT = "User-Agent";
public static final String BROWSER_USER_AGENT_PARAM =
@@ -87,9 +86,9 @@ public class RestCsrfPreventionFilter implements Filter {
agents = BROWSER_USER_AGENTS_DEFAULT;
}
parseBrowserUserAgents(agents);
- LOG.info("Adding cross-site request forgery (CSRF) protection, "
- + "headerName = {}, methodsToIgnore = {}, browserUserAgents = {}",
- headerName, methodsToIgnore, browserUserAgents);
+ LOG.info(String.format("Adding cross-site request forgery (CSRF) protection, "
+ + "headerName = %s, methodsToIgnore = %s, browserUserAgents = %s",
+ headerName, methodsToIgnore, browserUserAgents));
}
void parseBrowserUserAgents(String userAgents) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/3d7840a1/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java
index ec39db0..073c038 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java
@@ -45,7 +45,7 @@ import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
@Provider
@Consumes({Constants.MIMETYPE_PROTOBUF, Constants.MIMETYPE_PROTOBUF_IETF})
@InterfaceAudience.Private
-public class ProtobufMessageBodyConsumer
+public class ProtobufMessageBodyConsumer
implements MessageBodyReader<ProtobufMessageHandler> {
private static final Log LOG =
LogFactory.getLog(ProtobufMessageBodyConsumer.class);
@@ -73,8 +73,8 @@ public class ProtobufMessageBodyConsumer
baos.write(buffer, 0, read);
}
} while (read > 0);
- if (LOG.isDebugEnabled()) {
- LOG.debug(getClass() + ": read " + baos.size() + " bytes from " +
+ if (LOG.isTraceEnabled()) {
+ LOG.trace(getClass() + ": read " + baos.size() + " bytes from " +
inputStream);
}
obj = obj.getObjectFromMessage(baos.toByteArray());
http://git-wip-us.apache.org/repos/asf/hbase/blob/3d7840a1/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java
index e8a32d9..a860f20 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java
@@ -23,6 +23,7 @@ import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.locks.Lock;
+import org.apache.commons.logging.Log;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ChoreService;
import org.apache.hadoop.hbase.ScheduledChore;
@@ -37,7 +38,7 @@ import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.UserProvider;
import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.log4j.Logger;
+import org.apache.commons.logging.LogFactory;
/**
* A utility to store user specific HConnections in memory.
@@ -47,7 +48,7 @@ import org.apache.log4j.Logger;
*/
@InterfaceAudience.Private
public class ConnectionCache {
- private static final Logger LOG = Logger.getLogger(ConnectionCache.class);
+ private static final Log LOG = LogFactory.getLog(ConnectionCache.class);
private final Map<String, ConnectionInfo>
connections = new ConcurrentHashMap<String, ConnectionInfo>();
@@ -60,6 +61,7 @@ public class ConnectionCache {
private final ThreadLocal<String> effectiveUserNames =
new ThreadLocal<String>() {
+ @Override
protected String initialValue() {
return realUserName;
}
[5/5] hbase git commit: HBASE-15954 REST server should log requests
with TRACE instead of DEBUG
Posted by en...@apache.org.
HBASE-15954 REST server should log requests with TRACE instead of DEBUG
Conflicts:
hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java
Conflicts:
hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java
hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/70593efa
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/70593efa
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/70593efa
Branch: refs/heads/branch-1.2
Commit: 70593efa2760a4c0f5df353047200e5ed14c1035
Parents: edbf275
Author: Enis Soztutar <en...@apache.org>
Authored: Mon Jun 6 10:58:37 2016 -0700
Committer: Enis Soztutar <en...@apache.org>
Committed: Mon Jun 6 14:07:16 2016 -0700
----------------------------------------------------------------------
.../hadoop/hbase/rest/MultiRowResource.java | 4 +-
.../hbase/rest/NamespacesInstanceResource.java | 24 ++++-----
.../hadoop/hbase/rest/NamespacesResource.java | 4 +-
.../hbase/rest/ProtobufStreamingUtil.java | 10 ++--
.../apache/hadoop/hbase/rest/RESTServer.java | 18 ++++---
.../apache/hadoop/hbase/rest/RESTServlet.java | 5 +-
.../hadoop/hbase/rest/RegionsResource.java | 4 +-
.../apache/hadoop/hbase/rest/RootResource.java | 4 +-
.../apache/hadoop/hbase/rest/RowResource.java | 54 ++++++++++----------
.../hbase/rest/ScannerInstanceResource.java | 32 +++++++-----
.../hadoop/hbase/rest/ScannerResource.java | 18 +++----
.../hadoop/hbase/rest/SchemaResource.java | 22 ++++----
.../rest/StorageClusterStatusResource.java | 4 +-
.../rest/StorageClusterVersionResource.java | 4 +-
.../apache/hadoop/hbase/rest/TableResource.java | 26 ++++++----
.../hadoop/hbase/rest/VersionResource.java | 10 ++--
.../apache/hadoop/hbase/rest/client/Client.java | 40 +++++++--------
.../hadoop/hbase/rest/filter/AuthFilter.java | 4 +-
.../consumer/ProtobufMessageBodyConsumer.java | 6 +--
.../hadoop/hbase/util/ConnectionCache.java | 6 ++-
20 files changed, 162 insertions(+), 137 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/70593efa/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java
index c88ac91..8ff3ef6 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java
@@ -86,7 +86,9 @@ public class MultiRowResource extends ResourceBase implements Constants {
}
model.addRow(rowModel);
} else {
- LOG.trace("The row : " + rk + " not found in the table.");
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("The row : " + rk + " not found in the table.");
+ }
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/70593efa/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesInstanceResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesInstanceResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesInstanceResource.java
index 8f64738..c832905 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesInstanceResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesInstanceResource.java
@@ -91,8 +91,8 @@ public class NamespacesInstanceResource extends ResourceBase {
MIMETYPE_PROTOBUF_IETF})
public Response get(final @Context ServletContext context,
final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
@@ -135,8 +135,8 @@ public class NamespacesInstanceResource extends ResourceBase {
@Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
public Response put(final NamespacesInstanceModel model, final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
return processUpdate(model, true, uriInfo);
@@ -151,8 +151,8 @@ public class NamespacesInstanceResource extends ResourceBase {
@PUT
public Response putNoBody(final byte[] message,
final @Context UriInfo uriInfo, final @Context HttpHeaders headers) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
try{
@@ -176,8 +176,8 @@ public class NamespacesInstanceResource extends ResourceBase {
public Response post(final NamespacesInstanceModel model,
final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("POST " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("POST " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
return processUpdate(model, false, uriInfo);
@@ -192,8 +192,8 @@ public class NamespacesInstanceResource extends ResourceBase {
@POST
public Response postNoBody(final byte[] message,
final @Context UriInfo uriInfo, final @Context HttpHeaders headers) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("POST " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("POST " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
try{
@@ -287,8 +287,8 @@ public class NamespacesInstanceResource extends ResourceBase {
@DELETE
public Response deleteNoBody(final byte[] message,
final @Context UriInfo uriInfo, final @Context HttpHeaders headers) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("DELETE " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("DELETE " + uriInfo.getAbsolutePath());
}
if (servlet.isReadOnly()) {
servlet.getMetrics().incrementFailedDeleteRequests(1);
http://git-wip-us.apache.org/repos/asf/hbase/blob/70593efa/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesResource.java
index 0548fe8..1304fe0 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesResource.java
@@ -64,8 +64,8 @@ public class NamespacesResource extends ResourceBase {
@Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
public Response get(final @Context ServletContext context, final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
try {
http://git-wip-us.apache.org/repos/asf/hbase/blob/70593efa/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingUtil.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingUtil.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingUtil.java
index 93bb940..cb0f4c8 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingUtil.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingUtil.java
@@ -49,8 +49,10 @@ public class ProtobufStreamingUtil implements StreamingOutput {
this.contentType = type;
this.limit = limit;
this.fetchSize = fetchSize;
- LOG.debug("Created ScanStreamingUtil with content type = " + this.contentType + " user limit : "
- + this.limit + " scan fetch size : " + this.fetchSize);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Created ScanStreamingUtil with content type = " + this.contentType
+ + " user limit : " + this.limit + " scan fetch size : " + this.fetchSize);
+ }
}
@Override
@@ -82,7 +84,9 @@ public class ProtobufStreamingUtil implements StreamingOutput {
outStream.write(Bytes.toBytes((short)objectBytes.length));
outStream.write(objectBytes);
outStream.flush();
- LOG.trace("Wrote " + model.getRows().size() + " rows to stream successfully.");
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Wrote " + model.getRows().size() + " rows to stream successfully.");
+ }
}
private CellSetModel createModelFromResults(Result[] results) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/70593efa/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
index ad8c65d..7ccc6c1 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
@@ -127,23 +127,27 @@ public class RESTServer implements Constants {
// check for user-defined port setting, if so override the conf
if (commandLine != null && commandLine.hasOption("port")) {
String val = commandLine.getOptionValue("port");
- servlet.getConfiguration()
- .setInt("hbase.rest.port", Integer.valueOf(val));
- LOG.debug("port set to " + val);
+ servlet.getConfiguration().setInt("hbase.rest.port", Integer.parseInt(val));
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("port set to " + val);
+ }
}
// check if server should only process GET requests, if so override the conf
if (commandLine != null && commandLine.hasOption("readonly")) {
servlet.getConfiguration().setBoolean("hbase.rest.readonly", true);
- LOG.debug("readonly set to true");
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("readonly set to true");
+ }
}
// check for user-defined info server port setting, if so override the conf
if (commandLine != null && commandLine.hasOption("infoport")) {
String val = commandLine.getOptionValue("infoport");
- servlet.getConfiguration()
- .setInt("hbase.rest.info.port", Integer.valueOf(val));
- LOG.debug("Web UI port set to " + val);
+ servlet.getConfiguration().setInt("hbase.rest.info.port", Integer.parseInt(val));
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Web UI port set to " + val);
+ }
}
@SuppressWarnings("unchecked")
http://git-wip-us.apache.org/repos/asf/hbase/blob/70593efa/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java
index 1f751a6..e49298b 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java
@@ -20,6 +20,8 @@ package org.apache.hadoop.hbase.rest;
import java.io.IOException;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.client.Admin;
@@ -29,14 +31,13 @@ import org.apache.hadoop.hbase.security.UserProvider;
import org.apache.hadoop.hbase.util.ConnectionCache;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.ProxyUsers;
-import org.apache.log4j.Logger;
/**
* Singleton class encapsulating global REST servlet state and functions.
*/
@InterfaceAudience.Private
public class RESTServlet implements Constants {
- private static final Logger LOG = Logger.getLogger(RESTServlet.class);
+ private static final Log LOG = LogFactory.getLog(RESTServlet.class);
private static RESTServlet INSTANCE;
private final Configuration conf;
private final MetricsREST metrics = new MetricsREST();
http://git-wip-us.apache.org/repos/asf/hbase/blob/70593efa/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java
index 001c6b5..100dfd5 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java
@@ -71,8 +71,8 @@ public class RegionsResource extends ResourceBase {
@Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
public Response get(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
try {
http://git-wip-us.apache.org/repos/asf/hbase/blob/70593efa/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java
index c08bb8b..fc4c548 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java
@@ -72,8 +72,8 @@ public class RootResource extends ResourceBase {
@Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
public Response get(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
try {
http://git-wip-us.apache.org/repos/asf/hbase/blob/70593efa/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
index bd1ea24..4d50c54 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
@@ -85,8 +85,8 @@ public class RowResource extends ResourceBase {
@Produces({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
public Response get(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
MultivaluedMap<String, String> params = uriInfo.getQueryParameters();
@@ -130,8 +130,8 @@ public class RowResource extends ResourceBase {
@GET
@Produces(MIMETYPE_BINARY)
public Response getBinary(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath() + " as "+ MIMETYPE_BINARY);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath() + " as "+ MIMETYPE_BINARY);
}
servlet.getMetrics().incrementRequests(1);
// doesn't make sense to use a non specific coordinate as this can only
@@ -221,8 +221,8 @@ public class RowResource extends ResourceBase {
put.addImmutable(parts[0], parts[1], cell.getTimestamp(), cell.getValue());
}
puts.add(put);
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + put.toString());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + put.toString());
}
}
table = servlet.getTable(tableResource.getName());
@@ -289,8 +289,8 @@ public class RowResource extends ResourceBase {
put.addImmutable(parts[0], parts[1], timestamp, message);
table = servlet.getTable(tableResource.getName());
table.put(put);
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + put.toString());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + put.toString());
}
servlet.getMetrics().incrementSucessfulPutRequests(1);
return Response.ok().build();
@@ -301,7 +301,7 @@ public class RowResource extends ResourceBase {
if (table != null) try {
table.close();
} catch (IOException ioe) {
- LOG.debug(ioe);
+ LOG.debug("Exception received while closing the table", ioe);
}
}
}
@@ -311,8 +311,8 @@ public class RowResource extends ResourceBase {
MIMETYPE_PROTOBUF_IETF})
public Response put(final CellSetModel model,
final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + uriInfo.getAbsolutePath()
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + uriInfo.getAbsolutePath()
+ " " + uriInfo.getQueryParameters());
}
return update(model, true);
@@ -322,8 +322,8 @@ public class RowResource extends ResourceBase {
@Consumes(MIMETYPE_BINARY)
public Response putBinary(final byte[] message,
final @Context UriInfo uriInfo, final @Context HttpHeaders headers) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + uriInfo.getAbsolutePath() + " as "+ MIMETYPE_BINARY);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + uriInfo.getAbsolutePath() + " as "+ MIMETYPE_BINARY);
}
return updateBinary(message, headers, true);
}
@@ -333,8 +333,8 @@ public class RowResource extends ResourceBase {
MIMETYPE_PROTOBUF_IETF})
public Response post(final CellSetModel model,
final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("POST " + uriInfo.getAbsolutePath()
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("POST " + uriInfo.getAbsolutePath()
+ " " + uriInfo.getQueryParameters());
}
return update(model, false);
@@ -344,16 +344,16 @@ public class RowResource extends ResourceBase {
@Consumes(MIMETYPE_BINARY)
public Response postBinary(final byte[] message,
final @Context UriInfo uriInfo, final @Context HttpHeaders headers) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("POST " + uriInfo.getAbsolutePath() + " as "+MIMETYPE_BINARY);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("POST " + uriInfo.getAbsolutePath() + " as "+MIMETYPE_BINARY);
}
return updateBinary(message, headers, false);
}
@DELETE
public Response delete(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("DELETE " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("DELETE " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
if (servlet.isReadOnly()) {
@@ -397,8 +397,8 @@ public class RowResource extends ResourceBase {
table = servlet.getTable(tableResource.getName());
table.delete(delete);
servlet.getMetrics().incrementSucessfulDeleteRequests(1);
- if (LOG.isDebugEnabled()) {
- LOG.debug("DELETE " + delete.toString());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("DELETE " + delete.toString());
}
} catch (Exception e) {
servlet.getMetrics().incrementFailedDeleteRequests(1);
@@ -407,7 +407,7 @@ public class RowResource extends ResourceBase {
if (table != null) try {
table.close();
} catch (IOException ioe) {
- LOG.debug(ioe);
+ LOG.debug("Exception received while closing the table", ioe);
}
}
return Response.ok().build();
@@ -499,8 +499,8 @@ public class RowResource extends ResourceBase {
.build();
}
- if (LOG.isDebugEnabled()) {
- LOG.debug("CHECK-AND-PUT " + put.toString() + ", returns " + retValue);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("CHECK-AND-PUT " + put.toString() + ", returns " + retValue);
}
if (!retValue) {
servlet.getMetrics().incrementFailedPutRequests(1);
@@ -517,7 +517,7 @@ public class RowResource extends ResourceBase {
} finally {
if (table != null) try {
table.close();
- } catch (IOException ioe) {
+ } catch (IOException ioe) {
LOG.debug("Exception received while closing the table", ioe);
}
}
@@ -627,8 +627,8 @@ public class RowResource extends ResourceBase {
.build();
}
- if (LOG.isDebugEnabled()) {
- LOG.debug("CHECK-AND-DELETE " + delete.toString() + ", returns "
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("CHECK-AND-DELETE " + delete.toString() + ", returns "
+ retValue);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/70593efa/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java
index ffb2fae..2469faa 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java
@@ -62,7 +62,7 @@ public class ScannerInstanceResource extends ResourceBase {
public ScannerInstanceResource() throws IOException { }
- public ScannerInstanceResource(String table, String id,
+ public ScannerInstanceResource(String table, String id,
ResultGenerator generator, int batch) throws IOException {
this.id = id;
this.generator = generator;
@@ -72,10 +72,10 @@ public class ScannerInstanceResource extends ResourceBase {
@GET
@Produces({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
- public Response get(final @Context UriInfo uriInfo,
+ public Response get(final @Context UriInfo uriInfo,
@QueryParam("n") int maxRows, final @QueryParam("c") int maxValues) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
if (generator == null) {
@@ -108,7 +108,9 @@ public class ScannerInstanceResource extends ResourceBase {
.build();
}
if (value == null) {
- LOG.info("generator exhausted");
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("generator exhausted");
+ }
// respond with 204 (No Content) if an empty cell set would be
// returned
if (count == limit) {
@@ -123,7 +125,7 @@ public class ScannerInstanceResource extends ResourceBase {
if (!Bytes.equals(CellUtil.cloneRow(value), rowKey)) {
// if maxRows was given as a query param, stop if we would exceed the
// specified number of rows
- if (maxRows > 0) {
+ if (maxRows > 0) {
if (--maxRows == 0) {
generator.putBack(value);
break;
@@ -134,7 +136,7 @@ public class ScannerInstanceResource extends ResourceBase {
rowModel = new RowModel(rowKey);
}
rowModel.addCell(
- new CellModel(CellUtil.cloneFamily(value), CellUtil.cloneQualifier(value),
+ new CellModel(CellUtil.cloneFamily(value), CellUtil.cloneQualifier(value),
value.getTimestamp(), CellUtil.cloneValue(value)));
} while (--count > 0);
model.addRow(rowModel);
@@ -147,21 +149,23 @@ public class ScannerInstanceResource extends ResourceBase {
@GET
@Produces(MIMETYPE_BINARY)
public Response getBinary(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath() + " as " +
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath() + " as " +
MIMETYPE_BINARY);
}
servlet.getMetrics().incrementRequests(1);
try {
Cell value = generator.next();
if (value == null) {
- LOG.info("generator exhausted");
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("generator exhausted");
+ }
return Response.noContent().build();
}
ResponseBuilder response = Response.ok(CellUtil.cloneValue(value));
response.cacheControl(cacheControl);
- response.header("X-Row", Base64.encodeBytes(CellUtil.cloneRow(value)));
- response.header("X-Column",
+ response.header("X-Row", Base64.encodeBytes(CellUtil.cloneRow(value)));
+ response.header("X-Column",
Base64.encodeBytes(
KeyValue.makeColumn(CellUtil.cloneFamily(value), CellUtil.cloneQualifier(value))));
response.header("X-Timestamp", value.getTimestamp());
@@ -182,8 +186,8 @@ public class ScannerInstanceResource extends ResourceBase {
@DELETE
public Response delete(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("DELETE " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("DELETE " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
if (servlet.isReadOnly()) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/70593efa/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java
index 6c424ce..71723d8 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java
@@ -31,7 +31,6 @@ import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.core.Context;
-import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriBuilder;
import javax.ws.rs.core.UriInfo;
@@ -91,8 +90,7 @@ public class ScannerResource extends ResourceBase {
spec = new RowSpec(model.getStartRow(), endRow, model.getColumns(), model.getStartTime(),
model.getEndTime(), model.getMaxVersions());
}
- MultivaluedMap<String, String> params = uriInfo.getQueryParameters();
-
+
try {
Filter filter = ScannerResultGenerator.buildFilterFromModel(model);
String tableName = tableResource.getName();
@@ -103,8 +101,8 @@ public class ScannerResource extends ResourceBase {
ScannerInstanceResource instance =
new ScannerInstanceResource(tableName, id, gen, model.getBatch());
scanners.put(id, instance);
- if (LOG.isDebugEnabled()) {
- LOG.debug("new scanner: " + id);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("new scanner: " + id);
}
UriBuilder builder = uriInfo.getAbsolutePathBuilder();
URI uri = builder.path(id).build();
@@ -130,10 +128,10 @@ public class ScannerResource extends ResourceBase {
@PUT
@Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
- public Response put(final ScannerModel model,
+ public Response put(final ScannerModel model,
final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + uriInfo.getAbsolutePath());
}
return update(model, true, uriInfo);
}
@@ -143,8 +141,8 @@ public class ScannerResource extends ResourceBase {
MIMETYPE_PROTOBUF_IETF})
public Response post(final ScannerModel model,
final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("POST " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("POST " + uriInfo.getAbsolutePath());
}
return update(model, false, uriInfo);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/70593efa/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java
index 9826b67..375643a 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java
@@ -86,8 +86,8 @@ public class SchemaResource extends ResourceBase {
@Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
public Response get(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
try {
@@ -99,7 +99,7 @@ public class SchemaResource extends ResourceBase {
} catch (Exception e) {
servlet.getMetrics().incrementFailedGetRequests(1);
return processException(e);
- }
+ }
}
private Response replace(final TableName name, final TableSchemaModel model,
@@ -198,10 +198,10 @@ public class SchemaResource extends ResourceBase {
@PUT
@Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
- public Response put(final TableSchemaModel model,
+ public Response put(final TableSchemaModel model,
final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
return update(model, true, uriInfo);
@@ -210,10 +210,10 @@ public class SchemaResource extends ResourceBase {
@POST
@Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
- public Response post(final TableSchemaModel model,
+ public Response post(final TableSchemaModel model,
final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
return update(model, false, uriInfo);
@@ -223,8 +223,8 @@ public class SchemaResource extends ResourceBase {
justification="Expected")
@DELETE
public Response delete(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("DELETE " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("DELETE " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
if (servlet.isReadOnly()) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/70593efa/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java
index a7e52bd..27977c3 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java
@@ -63,8 +63,8 @@ public class StorageClusterStatusResource extends ResourceBase {
@Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
public Response get(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
try {
http://git-wip-us.apache.org/repos/asf/hbase/blob/70593efa/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java
index 85e81f8..b9fb5d4 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java
@@ -58,8 +58,8 @@ public class StorageClusterVersionResource extends ResourceBase {
@GET
@Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON})
public Response get(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
try {
http://git-wip-us.apache.org/repos/asf/hbase/blob/70593efa/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
index f87ef7e..2487a35 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
@@ -133,7 +133,7 @@ public class TableResource extends ResourceBase {
@DefaultValue("-1") @QueryParam(Constants.SCAN_BATCH_SIZE) int batchSize,
@DefaultValue("0") @QueryParam(Constants.SCAN_START_TIME) long startTime,
@DefaultValue(Long.MAX_VALUE + "") @QueryParam(Constants.SCAN_END_TIME) long endTime,
- @DefaultValue("true") @QueryParam(Constants.SCAN_BATCH_SIZE) boolean cacheBlocks,
+ @DefaultValue("true") @QueryParam(Constants.SCAN_BATCH_SIZE) boolean cacheBlocks,
@DefaultValue("") @QueryParam(Constants.SCAN_FILTER) String filters) {
try {
Filter filter = null;
@@ -146,10 +146,12 @@ public class TableResource extends ResourceBase {
tableScan.setStartRow(prefixBytes);
}
}
- LOG.debug("Query parameters : Table Name = > " + this.table + " Start Row => " + startRow
- + " End Row => " + endRow + " Columns => " + column + " Start Time => " + startTime
- + " End Time => " + endTime + " Cache Blocks => " + cacheBlocks + " Max Versions => "
- + maxVersions + " Batch Size => " + batchSize);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Query parameters : Table Name = > " + this.table + " Start Row => " + startRow
+ + " End Row => " + endRow + " Columns => " + column + " Start Time => " + startTime
+ + " End Time => " + endTime + " Cache Blocks => " + cacheBlocks + " Max Versions => "
+ + maxVersions + " Batch Size => " + batchSize);
+ }
Table hTable = RESTServlet.getInstance().getTable(this.table);
tableScan.setBatch(batchSize);
tableScan.setMaxVersions(maxVersions);
@@ -162,15 +164,21 @@ public class TableResource extends ResourceBase {
String[] familysplit = csplit.trim().split(":");
if (familysplit.length == 2) {
if (familysplit[1].length() > 0) {
- LOG.debug("Scan family and column : " + familysplit[0] + " " + familysplit[1]);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Scan family and column : " + familysplit[0] + " " + familysplit[1]);
+ }
tableScan.addColumn(Bytes.toBytes(familysplit[0]), Bytes.toBytes(familysplit[1]));
} else {
tableScan.addFamily(Bytes.toBytes(familysplit[0]));
- LOG.debug("Scan family : " + familysplit[0] + " and empty qualifier.");
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Scan family : " + familysplit[0] + " and empty qualifier.");
+ }
tableScan.addColumn(Bytes.toBytes(familysplit[0]), null);
}
- } else if (StringUtils.isNotEmpty(familysplit[0])){
- LOG.debug("Scan family : " + familysplit[0]);
+ } else if (StringUtils.isNotEmpty(familysplit[0])) {
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Scan family : " + familysplit[0]);
+ }
tableScan.addFamily(Bytes.toBytes(familysplit[0]));
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/70593efa/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java
index ae93825..172246c 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java
@@ -68,15 +68,15 @@ public class VersionResource extends ResourceBase {
* Build a response for a version request.
* @param context servlet context
* @param uriInfo (JAX-RS context variable) request URL
- * @return a response for a version request
+ * @return a response for a version request
*/
@GET
@Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
- public Response get(final @Context ServletContext context,
+ public Response get(final @Context ServletContext context,
final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
ResponseBuilder response = Response.ok(new VersionModel(context));
@@ -89,7 +89,7 @@ public class VersionResource extends ResourceBase {
* Dispatch to StorageClusterVersionResource
*/
@Path("cluster")
- public StorageClusterVersionResource getClusterVersionResource()
+ public StorageClusterVersionResource getClusterVersionResource()
throws IOException {
return new StorageClusterVersionResource();
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/70593efa/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java
index ebedf57..f511e03 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java
@@ -101,10 +101,10 @@ public class Client {
}
/**
- * Shut down the client. Close any open persistent connections.
+ * Shut down the client. Close any open persistent connections.
*/
public void shutdown() {
- MultiThreadedHttpConnectionManager manager =
+ MultiThreadedHttpConnectionManager manager =
(MultiThreadedHttpConnectionManager) httpClient.getHttpConnectionManager();
manager.shutdown();
}
@@ -151,7 +151,7 @@ public class Client {
* one of the members of the supplied cluster definition and iterate through
* the list until a transaction can be successfully completed. The
* definition of success here is a complete HTTP transaction, irrespective
- * of result code.
+ * of result code.
* @param cluster the cluster definition
* @param method the transaction method
* @param headers HTTP header values to send
@@ -209,8 +209,8 @@ public class Client {
long startTime = System.currentTimeMillis();
int code = httpClient.executeMethod(method);
long endTime = System.currentTimeMillis();
- if (LOG.isDebugEnabled()) {
- LOG.debug(method.getName() + " " + uri + " " + code + " " +
+ if (LOG.isTraceEnabled()) {
+ LOG.trace(method.getName() + " " + uri + " " + code + " " +
method.getStatusText() + " in " + (endTime - startTime) + " ms");
}
return code;
@@ -250,7 +250,7 @@ public class Client {
}
/**
- * Send a HEAD request
+ * Send a HEAD request
* @param path the path or URI
* @return a Response object with response detail
* @throws IOException
@@ -260,14 +260,14 @@ public class Client {
}
/**
- * Send a HEAD request
+ * Send a HEAD request
* @param cluster the cluster definition
* @param path the path or URI
* @param headers the HTTP headers to include in the request
* @return a Response object with response detail
* @throws IOException
*/
- public Response head(Cluster cluster, String path, Header[] headers)
+ public Response head(Cluster cluster, String path, Header[] headers)
throws IOException {
HeadMethod method = new HeadMethod();
try {
@@ -280,7 +280,7 @@ public class Client {
}
/**
- * Send a GET request
+ * Send a GET request
* @param path the path or URI
* @return a Response object with response detail
* @throws IOException
@@ -290,7 +290,7 @@ public class Client {
}
/**
- * Send a GET request
+ * Send a GET request
* @param cluster the cluster definition
* @param path the path or URI
* @return a Response object with response detail
@@ -301,7 +301,7 @@ public class Client {
}
/**
- * Send a GET request
+ * Send a GET request
* @param path the path or URI
* @param accept Accept header value
* @return a Response object with response detail
@@ -312,7 +312,7 @@ public class Client {
}
/**
- * Send a GET request
+ * Send a GET request
* @param cluster the cluster definition
* @param path the path or URI
* @param accept Accept header value
@@ -329,7 +329,7 @@ public class Client {
/**
* Send a GET request
* @param path the path or URI
- * @param headers the HTTP headers to include in the request,
+ * @param headers the HTTP headers to include in the request,
* <tt>Accept</tt> must be supplied
* @return a Response object with response detail
* @throws IOException
@@ -346,7 +346,7 @@ public class Client {
* @return a Response object with response detail
* @throws IOException
*/
- public Response get(Cluster c, String path, Header[] headers)
+ public Response get(Cluster c, String path, Header[] headers)
throws IOException {
GetMethod method = new GetMethod();
try {
@@ -382,7 +382,7 @@ public class Client {
* @return a Response object with response detail
* @throws IOException
*/
- public Response put(Cluster cluster, String path, String contentType,
+ public Response put(Cluster cluster, String path, String contentType,
byte[] content) throws IOException {
Header[] headers = new Header[1];
headers[0] = new Header("Content-Type", contentType);
@@ -398,7 +398,7 @@ public class Client {
* @return a Response object with response detail
* @throws IOException
*/
- public Response put(String path, Header[] headers, byte[] content)
+ public Response put(String path, Header[] headers, byte[] content)
throws IOException {
return put(cluster, path, headers, content);
}
@@ -413,7 +413,7 @@ public class Client {
* @return a Response object with response detail
* @throws IOException
*/
- public Response put(Cluster cluster, String path, Header[] headers,
+ public Response put(Cluster cluster, String path, Header[] headers,
byte[] content) throws IOException {
PutMethod method = new PutMethod();
try {
@@ -449,7 +449,7 @@ public class Client {
* @return a Response object with response detail
* @throws IOException
*/
- public Response post(Cluster cluster, String path, String contentType,
+ public Response post(Cluster cluster, String path, String contentType,
byte[] content) throws IOException {
Header[] headers = new Header[1];
headers[0] = new Header("Content-Type", contentType);
@@ -465,7 +465,7 @@ public class Client {
* @return a Response object with response detail
* @throws IOException
*/
- public Response post(String path, Header[] headers, byte[] content)
+ public Response post(String path, Header[] headers, byte[] content)
throws IOException {
return post(cluster, path, headers, content);
}
@@ -480,7 +480,7 @@ public class Client {
* @return a Response object with response detail
* @throws IOException
*/
- public Response post(Cluster cluster, String path, Header[] headers,
+ public Response post(Cluster cluster, String path, Header[] headers,
byte[] content) throws IOException {
PostMethod method = new PostMethod();
try {
http://git-wip-us.apache.org/repos/asf/hbase/blob/70593efa/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java
index e5208af..f051bc8 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java
@@ -72,7 +72,9 @@ public class AuthFilter extends AuthenticationFilter {
throw new ServletException("Failed to retrieve server principal", ie);
}
}
- LOG.debug("Setting property " + name + "=" + value);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Setting property " + name + "=" + value);
+ }
name = name.substring(REST_PREFIX_LEN);
props.setProperty(name, value);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/70593efa/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java
index ec39db0..073c038 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java
@@ -45,7 +45,7 @@ import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
@Provider
@Consumes({Constants.MIMETYPE_PROTOBUF, Constants.MIMETYPE_PROTOBUF_IETF})
@InterfaceAudience.Private
-public class ProtobufMessageBodyConsumer
+public class ProtobufMessageBodyConsumer
implements MessageBodyReader<ProtobufMessageHandler> {
private static final Log LOG =
LogFactory.getLog(ProtobufMessageBodyConsumer.class);
@@ -73,8 +73,8 @@ public class ProtobufMessageBodyConsumer
baos.write(buffer, 0, read);
}
} while (read > 0);
- if (LOG.isDebugEnabled()) {
- LOG.debug(getClass() + ": read " + baos.size() + " bytes from " +
+ if (LOG.isTraceEnabled()) {
+ LOG.trace(getClass() + ": read " + baos.size() + " bytes from " +
inputStream);
}
obj = obj.getObjectFromMessage(baos.toByteArray());
http://git-wip-us.apache.org/repos/asf/hbase/blob/70593efa/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java
index 89e44bd..1475879 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java
@@ -23,6 +23,7 @@ import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.locks.Lock;
+import org.apache.commons.logging.Log;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ChoreService;
import org.apache.hadoop.hbase.ScheduledChore;
@@ -37,7 +38,7 @@ import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.UserProvider;
import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.log4j.Logger;
+import org.apache.commons.logging.LogFactory;
/**
* A utility to store user specific HConnections in memory.
@@ -47,7 +48,7 @@ import org.apache.log4j.Logger;
*/
@InterfaceAudience.Private
public class ConnectionCache {
- private static final Logger LOG = Logger.getLogger(ConnectionCache.class);
+ private static final Log LOG = LogFactory.getLog(ConnectionCache.class);
private final Map<String, ConnectionInfo>
connections = new ConcurrentHashMap<String, ConnectionInfo>();
@@ -60,6 +61,7 @@ public class ConnectionCache {
private final ThreadLocal<String> effectiveUserNames =
new ThreadLocal<String>() {
+ @Override
protected String initialValue() {
return realUserName;
}
[3/5] hbase git commit: HBASE-15954 REST server should log requests
with TRACE instead of DEBUG
Posted by en...@apache.org.
HBASE-15954 REST server should log requests with TRACE instead of DEBUG
Conflicts:
hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java
Conflicts:
hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java
hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/466eb316
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/466eb316
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/466eb316
Branch: refs/heads/branch-1.3
Commit: 466eb31648a4783c79d9b044fdd84d0db25c3d12
Parents: aa636be
Author: Enis Soztutar <en...@apache.org>
Authored: Mon Jun 6 10:58:37 2016 -0700
Committer: Enis Soztutar <en...@apache.org>
Committed: Mon Jun 6 11:34:33 2016 -0700
----------------------------------------------------------------------
.../hadoop/hbase/rest/MultiRowResource.java | 4 +-
.../hbase/rest/NamespacesInstanceResource.java | 24 +-
.../hadoop/hbase/rest/NamespacesResource.java | 4 +-
.../hbase/rest/ProtobufStreamingUtil.java | 10 +-
.../apache/hadoop/hbase/rest/RESTServer.java | 18 +-
.../apache/hadoop/hbase/rest/RESTServlet.java | 5 +-
.../hadoop/hbase/rest/RegionsResource.java | 4 +-
.../apache/hadoop/hbase/rest/RootResource.java | 4 +-
.../apache/hadoop/hbase/rest/RowResource.java | 54 ++--
.../hbase/rest/ScannerInstanceResource.java | 32 ++-
.../hadoop/hbase/rest/ScannerResource.java | 18 +-
.../hadoop/hbase/rest/SchemaResource.java | 22 +-
.../rest/StorageClusterStatusResource.java | 4 +-
.../rest/StorageClusterVersionResource.java | 4 +-
.../apache/hadoop/hbase/rest/TableResource.java | 26 +-
.../hadoop/hbase/rest/VersionResource.java | 10 +-
.../apache/hadoop/hbase/rest/client/Client.java | 40 +--
.../hadoop/hbase/rest/filter/AuthFilter.java | 4 +-
.../rest/filter/RestCsrfPreventionFilter.java | 286 +++++++++++++++++++
.../consumer/ProtobufMessageBodyConsumer.java | 6 +-
.../hadoop/hbase/util/ConnectionCache.java | 6 +-
21 files changed, 448 insertions(+), 137 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/466eb316/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java
index c88ac91..8ff3ef6 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java
@@ -86,7 +86,9 @@ public class MultiRowResource extends ResourceBase implements Constants {
}
model.addRow(rowModel);
} else {
- LOG.trace("The row : " + rk + " not found in the table.");
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("The row : " + rk + " not found in the table.");
+ }
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/466eb316/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesInstanceResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesInstanceResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesInstanceResource.java
index 8f64738..c832905 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesInstanceResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesInstanceResource.java
@@ -91,8 +91,8 @@ public class NamespacesInstanceResource extends ResourceBase {
MIMETYPE_PROTOBUF_IETF})
public Response get(final @Context ServletContext context,
final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
@@ -135,8 +135,8 @@ public class NamespacesInstanceResource extends ResourceBase {
@Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
public Response put(final NamespacesInstanceModel model, final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
return processUpdate(model, true, uriInfo);
@@ -151,8 +151,8 @@ public class NamespacesInstanceResource extends ResourceBase {
@PUT
public Response putNoBody(final byte[] message,
final @Context UriInfo uriInfo, final @Context HttpHeaders headers) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
try{
@@ -176,8 +176,8 @@ public class NamespacesInstanceResource extends ResourceBase {
public Response post(final NamespacesInstanceModel model,
final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("POST " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("POST " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
return processUpdate(model, false, uriInfo);
@@ -192,8 +192,8 @@ public class NamespacesInstanceResource extends ResourceBase {
@POST
public Response postNoBody(final byte[] message,
final @Context UriInfo uriInfo, final @Context HttpHeaders headers) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("POST " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("POST " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
try{
@@ -287,8 +287,8 @@ public class NamespacesInstanceResource extends ResourceBase {
@DELETE
public Response deleteNoBody(final byte[] message,
final @Context UriInfo uriInfo, final @Context HttpHeaders headers) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("DELETE " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("DELETE " + uriInfo.getAbsolutePath());
}
if (servlet.isReadOnly()) {
servlet.getMetrics().incrementFailedDeleteRequests(1);
http://git-wip-us.apache.org/repos/asf/hbase/blob/466eb316/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesResource.java
index 0548fe8..1304fe0 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesResource.java
@@ -64,8 +64,8 @@ public class NamespacesResource extends ResourceBase {
@Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
public Response get(final @Context ServletContext context, final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
try {
http://git-wip-us.apache.org/repos/asf/hbase/blob/466eb316/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingUtil.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingUtil.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingUtil.java
index 93bb940..cb0f4c8 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingUtil.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingUtil.java
@@ -49,8 +49,10 @@ public class ProtobufStreamingUtil implements StreamingOutput {
this.contentType = type;
this.limit = limit;
this.fetchSize = fetchSize;
- LOG.debug("Created ScanStreamingUtil with content type = " + this.contentType + " user limit : "
- + this.limit + " scan fetch size : " + this.fetchSize);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Created ScanStreamingUtil with content type = " + this.contentType
+ + " user limit : " + this.limit + " scan fetch size : " + this.fetchSize);
+ }
}
@Override
@@ -82,7 +84,9 @@ public class ProtobufStreamingUtil implements StreamingOutput {
outStream.write(Bytes.toBytes((short)objectBytes.length));
outStream.write(objectBytes);
outStream.flush();
- LOG.trace("Wrote " + model.getRows().size() + " rows to stream successfully.");
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Wrote " + model.getRows().size() + " rows to stream successfully.");
+ }
}
private CellSetModel createModelFromResults(Result[] results) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/466eb316/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
index ad8c65d..7ccc6c1 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
@@ -127,23 +127,27 @@ public class RESTServer implements Constants {
// check for user-defined port setting, if so override the conf
if (commandLine != null && commandLine.hasOption("port")) {
String val = commandLine.getOptionValue("port");
- servlet.getConfiguration()
- .setInt("hbase.rest.port", Integer.valueOf(val));
- LOG.debug("port set to " + val);
+ servlet.getConfiguration().setInt("hbase.rest.port", Integer.parseInt(val));
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("port set to " + val);
+ }
}
// check if server should only process GET requests, if so override the conf
if (commandLine != null && commandLine.hasOption("readonly")) {
servlet.getConfiguration().setBoolean("hbase.rest.readonly", true);
- LOG.debug("readonly set to true");
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("readonly set to true");
+ }
}
// check for user-defined info server port setting, if so override the conf
if (commandLine != null && commandLine.hasOption("infoport")) {
String val = commandLine.getOptionValue("infoport");
- servlet.getConfiguration()
- .setInt("hbase.rest.info.port", Integer.valueOf(val));
- LOG.debug("Web UI port set to " + val);
+ servlet.getConfiguration().setInt("hbase.rest.info.port", Integer.parseInt(val));
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Web UI port set to " + val);
+ }
}
@SuppressWarnings("unchecked")
http://git-wip-us.apache.org/repos/asf/hbase/blob/466eb316/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java
index 1f751a6..e49298b 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java
@@ -20,6 +20,8 @@ package org.apache.hadoop.hbase.rest;
import java.io.IOException;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.client.Admin;
@@ -29,14 +31,13 @@ import org.apache.hadoop.hbase.security.UserProvider;
import org.apache.hadoop.hbase.util.ConnectionCache;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.ProxyUsers;
-import org.apache.log4j.Logger;
/**
* Singleton class encapsulating global REST servlet state and functions.
*/
@InterfaceAudience.Private
public class RESTServlet implements Constants {
- private static final Logger LOG = Logger.getLogger(RESTServlet.class);
+ private static final Log LOG = LogFactory.getLog(RESTServlet.class);
private static RESTServlet INSTANCE;
private final Configuration conf;
private final MetricsREST metrics = new MetricsREST();
http://git-wip-us.apache.org/repos/asf/hbase/blob/466eb316/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java
index 001c6b5..100dfd5 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java
@@ -71,8 +71,8 @@ public class RegionsResource extends ResourceBase {
@Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
public Response get(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
try {
http://git-wip-us.apache.org/repos/asf/hbase/blob/466eb316/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java
index c08bb8b..fc4c548 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java
@@ -72,8 +72,8 @@ public class RootResource extends ResourceBase {
@Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
public Response get(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
try {
http://git-wip-us.apache.org/repos/asf/hbase/blob/466eb316/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
index bd1ea24..4d50c54 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
@@ -85,8 +85,8 @@ public class RowResource extends ResourceBase {
@Produces({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
public Response get(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
MultivaluedMap<String, String> params = uriInfo.getQueryParameters();
@@ -130,8 +130,8 @@ public class RowResource extends ResourceBase {
@GET
@Produces(MIMETYPE_BINARY)
public Response getBinary(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath() + " as "+ MIMETYPE_BINARY);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath() + " as "+ MIMETYPE_BINARY);
}
servlet.getMetrics().incrementRequests(1);
// doesn't make sense to use a non specific coordinate as this can only
@@ -221,8 +221,8 @@ public class RowResource extends ResourceBase {
put.addImmutable(parts[0], parts[1], cell.getTimestamp(), cell.getValue());
}
puts.add(put);
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + put.toString());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + put.toString());
}
}
table = servlet.getTable(tableResource.getName());
@@ -289,8 +289,8 @@ public class RowResource extends ResourceBase {
put.addImmutable(parts[0], parts[1], timestamp, message);
table = servlet.getTable(tableResource.getName());
table.put(put);
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + put.toString());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + put.toString());
}
servlet.getMetrics().incrementSucessfulPutRequests(1);
return Response.ok().build();
@@ -301,7 +301,7 @@ public class RowResource extends ResourceBase {
if (table != null) try {
table.close();
} catch (IOException ioe) {
- LOG.debug(ioe);
+ LOG.debug("Exception received while closing the table", ioe);
}
}
}
@@ -311,8 +311,8 @@ public class RowResource extends ResourceBase {
MIMETYPE_PROTOBUF_IETF})
public Response put(final CellSetModel model,
final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + uriInfo.getAbsolutePath()
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + uriInfo.getAbsolutePath()
+ " " + uriInfo.getQueryParameters());
}
return update(model, true);
@@ -322,8 +322,8 @@ public class RowResource extends ResourceBase {
@Consumes(MIMETYPE_BINARY)
public Response putBinary(final byte[] message,
final @Context UriInfo uriInfo, final @Context HttpHeaders headers) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + uriInfo.getAbsolutePath() + " as "+ MIMETYPE_BINARY);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + uriInfo.getAbsolutePath() + " as "+ MIMETYPE_BINARY);
}
return updateBinary(message, headers, true);
}
@@ -333,8 +333,8 @@ public class RowResource extends ResourceBase {
MIMETYPE_PROTOBUF_IETF})
public Response post(final CellSetModel model,
final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("POST " + uriInfo.getAbsolutePath()
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("POST " + uriInfo.getAbsolutePath()
+ " " + uriInfo.getQueryParameters());
}
return update(model, false);
@@ -344,16 +344,16 @@ public class RowResource extends ResourceBase {
@Consumes(MIMETYPE_BINARY)
public Response postBinary(final byte[] message,
final @Context UriInfo uriInfo, final @Context HttpHeaders headers) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("POST " + uriInfo.getAbsolutePath() + " as "+MIMETYPE_BINARY);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("POST " + uriInfo.getAbsolutePath() + " as "+MIMETYPE_BINARY);
}
return updateBinary(message, headers, false);
}
@DELETE
public Response delete(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("DELETE " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("DELETE " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
if (servlet.isReadOnly()) {
@@ -397,8 +397,8 @@ public class RowResource extends ResourceBase {
table = servlet.getTable(tableResource.getName());
table.delete(delete);
servlet.getMetrics().incrementSucessfulDeleteRequests(1);
- if (LOG.isDebugEnabled()) {
- LOG.debug("DELETE " + delete.toString());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("DELETE " + delete.toString());
}
} catch (Exception e) {
servlet.getMetrics().incrementFailedDeleteRequests(1);
@@ -407,7 +407,7 @@ public class RowResource extends ResourceBase {
if (table != null) try {
table.close();
} catch (IOException ioe) {
- LOG.debug(ioe);
+ LOG.debug("Exception received while closing the table", ioe);
}
}
return Response.ok().build();
@@ -499,8 +499,8 @@ public class RowResource extends ResourceBase {
.build();
}
- if (LOG.isDebugEnabled()) {
- LOG.debug("CHECK-AND-PUT " + put.toString() + ", returns " + retValue);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("CHECK-AND-PUT " + put.toString() + ", returns " + retValue);
}
if (!retValue) {
servlet.getMetrics().incrementFailedPutRequests(1);
@@ -517,7 +517,7 @@ public class RowResource extends ResourceBase {
} finally {
if (table != null) try {
table.close();
- } catch (IOException ioe) {
+ } catch (IOException ioe) {
LOG.debug("Exception received while closing the table", ioe);
}
}
@@ -627,8 +627,8 @@ public class RowResource extends ResourceBase {
.build();
}
- if (LOG.isDebugEnabled()) {
- LOG.debug("CHECK-AND-DELETE " + delete.toString() + ", returns "
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("CHECK-AND-DELETE " + delete.toString() + ", returns "
+ retValue);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/466eb316/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java
index ffb2fae..2469faa 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java
@@ -62,7 +62,7 @@ public class ScannerInstanceResource extends ResourceBase {
public ScannerInstanceResource() throws IOException { }
- public ScannerInstanceResource(String table, String id,
+ public ScannerInstanceResource(String table, String id,
ResultGenerator generator, int batch) throws IOException {
this.id = id;
this.generator = generator;
@@ -72,10 +72,10 @@ public class ScannerInstanceResource extends ResourceBase {
@GET
@Produces({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
- public Response get(final @Context UriInfo uriInfo,
+ public Response get(final @Context UriInfo uriInfo,
@QueryParam("n") int maxRows, final @QueryParam("c") int maxValues) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
if (generator == null) {
@@ -108,7 +108,9 @@ public class ScannerInstanceResource extends ResourceBase {
.build();
}
if (value == null) {
- LOG.info("generator exhausted");
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("generator exhausted");
+ }
// respond with 204 (No Content) if an empty cell set would be
// returned
if (count == limit) {
@@ -123,7 +125,7 @@ public class ScannerInstanceResource extends ResourceBase {
if (!Bytes.equals(CellUtil.cloneRow(value), rowKey)) {
// if maxRows was given as a query param, stop if we would exceed the
// specified number of rows
- if (maxRows > 0) {
+ if (maxRows > 0) {
if (--maxRows == 0) {
generator.putBack(value);
break;
@@ -134,7 +136,7 @@ public class ScannerInstanceResource extends ResourceBase {
rowModel = new RowModel(rowKey);
}
rowModel.addCell(
- new CellModel(CellUtil.cloneFamily(value), CellUtil.cloneQualifier(value),
+ new CellModel(CellUtil.cloneFamily(value), CellUtil.cloneQualifier(value),
value.getTimestamp(), CellUtil.cloneValue(value)));
} while (--count > 0);
model.addRow(rowModel);
@@ -147,21 +149,23 @@ public class ScannerInstanceResource extends ResourceBase {
@GET
@Produces(MIMETYPE_BINARY)
public Response getBinary(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath() + " as " +
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath() + " as " +
MIMETYPE_BINARY);
}
servlet.getMetrics().incrementRequests(1);
try {
Cell value = generator.next();
if (value == null) {
- LOG.info("generator exhausted");
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("generator exhausted");
+ }
return Response.noContent().build();
}
ResponseBuilder response = Response.ok(CellUtil.cloneValue(value));
response.cacheControl(cacheControl);
- response.header("X-Row", Base64.encodeBytes(CellUtil.cloneRow(value)));
- response.header("X-Column",
+ response.header("X-Row", Base64.encodeBytes(CellUtil.cloneRow(value)));
+ response.header("X-Column",
Base64.encodeBytes(
KeyValue.makeColumn(CellUtil.cloneFamily(value), CellUtil.cloneQualifier(value))));
response.header("X-Timestamp", value.getTimestamp());
@@ -182,8 +186,8 @@ public class ScannerInstanceResource extends ResourceBase {
@DELETE
public Response delete(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("DELETE " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("DELETE " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
if (servlet.isReadOnly()) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/466eb316/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java
index 6c424ce..71723d8 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java
@@ -31,7 +31,6 @@ import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.core.Context;
-import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriBuilder;
import javax.ws.rs.core.UriInfo;
@@ -91,8 +90,7 @@ public class ScannerResource extends ResourceBase {
spec = new RowSpec(model.getStartRow(), endRow, model.getColumns(), model.getStartTime(),
model.getEndTime(), model.getMaxVersions());
}
- MultivaluedMap<String, String> params = uriInfo.getQueryParameters();
-
+
try {
Filter filter = ScannerResultGenerator.buildFilterFromModel(model);
String tableName = tableResource.getName();
@@ -103,8 +101,8 @@ public class ScannerResource extends ResourceBase {
ScannerInstanceResource instance =
new ScannerInstanceResource(tableName, id, gen, model.getBatch());
scanners.put(id, instance);
- if (LOG.isDebugEnabled()) {
- LOG.debug("new scanner: " + id);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("new scanner: " + id);
}
UriBuilder builder = uriInfo.getAbsolutePathBuilder();
URI uri = builder.path(id).build();
@@ -130,10 +128,10 @@ public class ScannerResource extends ResourceBase {
@PUT
@Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
- public Response put(final ScannerModel model,
+ public Response put(final ScannerModel model,
final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + uriInfo.getAbsolutePath());
}
return update(model, true, uriInfo);
}
@@ -143,8 +141,8 @@ public class ScannerResource extends ResourceBase {
MIMETYPE_PROTOBUF_IETF})
public Response post(final ScannerModel model,
final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("POST " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("POST " + uriInfo.getAbsolutePath());
}
return update(model, false, uriInfo);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/466eb316/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java
index 9826b67..375643a 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java
@@ -86,8 +86,8 @@ public class SchemaResource extends ResourceBase {
@Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
public Response get(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
try {
@@ -99,7 +99,7 @@ public class SchemaResource extends ResourceBase {
} catch (Exception e) {
servlet.getMetrics().incrementFailedGetRequests(1);
return processException(e);
- }
+ }
}
private Response replace(final TableName name, final TableSchemaModel model,
@@ -198,10 +198,10 @@ public class SchemaResource extends ResourceBase {
@PUT
@Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
- public Response put(final TableSchemaModel model,
+ public Response put(final TableSchemaModel model,
final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
return update(model, true, uriInfo);
@@ -210,10 +210,10 @@ public class SchemaResource extends ResourceBase {
@POST
@Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
- public Response post(final TableSchemaModel model,
+ public Response post(final TableSchemaModel model,
final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
return update(model, false, uriInfo);
@@ -223,8 +223,8 @@ public class SchemaResource extends ResourceBase {
justification="Expected")
@DELETE
public Response delete(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("DELETE " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("DELETE " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
if (servlet.isReadOnly()) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/466eb316/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java
index a7e52bd..27977c3 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java
@@ -63,8 +63,8 @@ public class StorageClusterStatusResource extends ResourceBase {
@Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
public Response get(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
try {
http://git-wip-us.apache.org/repos/asf/hbase/blob/466eb316/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java
index 85e81f8..b9fb5d4 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java
@@ -58,8 +58,8 @@ public class StorageClusterVersionResource extends ResourceBase {
@GET
@Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON})
public Response get(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
try {
http://git-wip-us.apache.org/repos/asf/hbase/blob/466eb316/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
index f87ef7e..2487a35 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
@@ -133,7 +133,7 @@ public class TableResource extends ResourceBase {
@DefaultValue("-1") @QueryParam(Constants.SCAN_BATCH_SIZE) int batchSize,
@DefaultValue("0") @QueryParam(Constants.SCAN_START_TIME) long startTime,
@DefaultValue(Long.MAX_VALUE + "") @QueryParam(Constants.SCAN_END_TIME) long endTime,
- @DefaultValue("true") @QueryParam(Constants.SCAN_BATCH_SIZE) boolean cacheBlocks,
+ @DefaultValue("true") @QueryParam(Constants.SCAN_BATCH_SIZE) boolean cacheBlocks,
@DefaultValue("") @QueryParam(Constants.SCAN_FILTER) String filters) {
try {
Filter filter = null;
@@ -146,10 +146,12 @@ public class TableResource extends ResourceBase {
tableScan.setStartRow(prefixBytes);
}
}
- LOG.debug("Query parameters : Table Name = > " + this.table + " Start Row => " + startRow
- + " End Row => " + endRow + " Columns => " + column + " Start Time => " + startTime
- + " End Time => " + endTime + " Cache Blocks => " + cacheBlocks + " Max Versions => "
- + maxVersions + " Batch Size => " + batchSize);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Query parameters : Table Name = > " + this.table + " Start Row => " + startRow
+ + " End Row => " + endRow + " Columns => " + column + " Start Time => " + startTime
+ + " End Time => " + endTime + " Cache Blocks => " + cacheBlocks + " Max Versions => "
+ + maxVersions + " Batch Size => " + batchSize);
+ }
Table hTable = RESTServlet.getInstance().getTable(this.table);
tableScan.setBatch(batchSize);
tableScan.setMaxVersions(maxVersions);
@@ -162,15 +164,21 @@ public class TableResource extends ResourceBase {
String[] familysplit = csplit.trim().split(":");
if (familysplit.length == 2) {
if (familysplit[1].length() > 0) {
- LOG.debug("Scan family and column : " + familysplit[0] + " " + familysplit[1]);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Scan family and column : " + familysplit[0] + " " + familysplit[1]);
+ }
tableScan.addColumn(Bytes.toBytes(familysplit[0]), Bytes.toBytes(familysplit[1]));
} else {
tableScan.addFamily(Bytes.toBytes(familysplit[0]));
- LOG.debug("Scan family : " + familysplit[0] + " and empty qualifier.");
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Scan family : " + familysplit[0] + " and empty qualifier.");
+ }
tableScan.addColumn(Bytes.toBytes(familysplit[0]), null);
}
- } else if (StringUtils.isNotEmpty(familysplit[0])){
- LOG.debug("Scan family : " + familysplit[0]);
+ } else if (StringUtils.isNotEmpty(familysplit[0])) {
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Scan family : " + familysplit[0]);
+ }
tableScan.addFamily(Bytes.toBytes(familysplit[0]));
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/466eb316/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java
index ae93825..172246c 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java
@@ -68,15 +68,15 @@ public class VersionResource extends ResourceBase {
* Build a response for a version request.
* @param context servlet context
* @param uriInfo (JAX-RS context variable) request URL
- * @return a response for a version request
+ * @return a response for a version request
*/
@GET
@Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
- public Response get(final @Context ServletContext context,
+ public Response get(final @Context ServletContext context,
final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
ResponseBuilder response = Response.ok(new VersionModel(context));
@@ -89,7 +89,7 @@ public class VersionResource extends ResourceBase {
* Dispatch to StorageClusterVersionResource
*/
@Path("cluster")
- public StorageClusterVersionResource getClusterVersionResource()
+ public StorageClusterVersionResource getClusterVersionResource()
throws IOException {
return new StorageClusterVersionResource();
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/466eb316/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java
index ebedf57..f511e03 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java
@@ -101,10 +101,10 @@ public class Client {
}
/**
- * Shut down the client. Close any open persistent connections.
+ * Shut down the client. Close any open persistent connections.
*/
public void shutdown() {
- MultiThreadedHttpConnectionManager manager =
+ MultiThreadedHttpConnectionManager manager =
(MultiThreadedHttpConnectionManager) httpClient.getHttpConnectionManager();
manager.shutdown();
}
@@ -151,7 +151,7 @@ public class Client {
* one of the members of the supplied cluster definition and iterate through
* the list until a transaction can be successfully completed. The
* definition of success here is a complete HTTP transaction, irrespective
- * of result code.
+ * of result code.
* @param cluster the cluster definition
* @param method the transaction method
* @param headers HTTP header values to send
@@ -209,8 +209,8 @@ public class Client {
long startTime = System.currentTimeMillis();
int code = httpClient.executeMethod(method);
long endTime = System.currentTimeMillis();
- if (LOG.isDebugEnabled()) {
- LOG.debug(method.getName() + " " + uri + " " + code + " " +
+ if (LOG.isTraceEnabled()) {
+ LOG.trace(method.getName() + " " + uri + " " + code + " " +
method.getStatusText() + " in " + (endTime - startTime) + " ms");
}
return code;
@@ -250,7 +250,7 @@ public class Client {
}
/**
- * Send a HEAD request
+ * Send a HEAD request
* @param path the path or URI
* @return a Response object with response detail
* @throws IOException
@@ -260,14 +260,14 @@ public class Client {
}
/**
- * Send a HEAD request
+ * Send a HEAD request
* @param cluster the cluster definition
* @param path the path or URI
* @param headers the HTTP headers to include in the request
* @return a Response object with response detail
* @throws IOException
*/
- public Response head(Cluster cluster, String path, Header[] headers)
+ public Response head(Cluster cluster, String path, Header[] headers)
throws IOException {
HeadMethod method = new HeadMethod();
try {
@@ -280,7 +280,7 @@ public class Client {
}
/**
- * Send a GET request
+ * Send a GET request
* @param path the path or URI
* @return a Response object with response detail
* @throws IOException
@@ -290,7 +290,7 @@ public class Client {
}
/**
- * Send a GET request
+ * Send a GET request
* @param cluster the cluster definition
* @param path the path or URI
* @return a Response object with response detail
@@ -301,7 +301,7 @@ public class Client {
}
/**
- * Send a GET request
+ * Send a GET request
* @param path the path or URI
* @param accept Accept header value
* @return a Response object with response detail
@@ -312,7 +312,7 @@ public class Client {
}
/**
- * Send a GET request
+ * Send a GET request
* @param cluster the cluster definition
* @param path the path or URI
* @param accept Accept header value
@@ -329,7 +329,7 @@ public class Client {
/**
* Send a GET request
* @param path the path or URI
- * @param headers the HTTP headers to include in the request,
+ * @param headers the HTTP headers to include in the request,
* <tt>Accept</tt> must be supplied
* @return a Response object with response detail
* @throws IOException
@@ -346,7 +346,7 @@ public class Client {
* @return a Response object with response detail
* @throws IOException
*/
- public Response get(Cluster c, String path, Header[] headers)
+ public Response get(Cluster c, String path, Header[] headers)
throws IOException {
GetMethod method = new GetMethod();
try {
@@ -382,7 +382,7 @@ public class Client {
* @return a Response object with response detail
* @throws IOException
*/
- public Response put(Cluster cluster, String path, String contentType,
+ public Response put(Cluster cluster, String path, String contentType,
byte[] content) throws IOException {
Header[] headers = new Header[1];
headers[0] = new Header("Content-Type", contentType);
@@ -398,7 +398,7 @@ public class Client {
* @return a Response object with response detail
* @throws IOException
*/
- public Response put(String path, Header[] headers, byte[] content)
+ public Response put(String path, Header[] headers, byte[] content)
throws IOException {
return put(cluster, path, headers, content);
}
@@ -413,7 +413,7 @@ public class Client {
* @return a Response object with response detail
* @throws IOException
*/
- public Response put(Cluster cluster, String path, Header[] headers,
+ public Response put(Cluster cluster, String path, Header[] headers,
byte[] content) throws IOException {
PutMethod method = new PutMethod();
try {
@@ -449,7 +449,7 @@ public class Client {
* @return a Response object with response detail
* @throws IOException
*/
- public Response post(Cluster cluster, String path, String contentType,
+ public Response post(Cluster cluster, String path, String contentType,
byte[] content) throws IOException {
Header[] headers = new Header[1];
headers[0] = new Header("Content-Type", contentType);
@@ -465,7 +465,7 @@ public class Client {
* @return a Response object with response detail
* @throws IOException
*/
- public Response post(String path, Header[] headers, byte[] content)
+ public Response post(String path, Header[] headers, byte[] content)
throws IOException {
return post(cluster, path, headers, content);
}
@@ -480,7 +480,7 @@ public class Client {
* @return a Response object with response detail
* @throws IOException
*/
- public Response post(Cluster cluster, String path, Header[] headers,
+ public Response post(Cluster cluster, String path, Header[] headers,
byte[] content) throws IOException {
PostMethod method = new PostMethod();
try {
http://git-wip-us.apache.org/repos/asf/hbase/blob/466eb316/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java
index e5208af..f051bc8 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java
@@ -72,7 +72,9 @@ public class AuthFilter extends AuthenticationFilter {
throw new ServletException("Failed to retrieve server principal", ie);
}
}
- LOG.debug("Setting property " + name + "=" + value);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Setting property " + name + "=" + value);
+ }
name = name.substring(REST_PREFIX_LEN);
props.setProperty(name, value);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/466eb316/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java
new file mode 100644
index 0000000..dbb1447
--- /dev/null
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java
@@ -0,0 +1,286 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.rest.filter;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.conf.Configuration;
+
+/**
+ * This filter provides protection against cross site request forgery (CSRF)
+ * attacks for REST APIs. Enabling this filter on an endpoint results in the
+ * requirement of all client to send a particular (configurable) HTTP header
+ * with every request. In the absense of this header the filter will reject the
+ * attempt as a bad request.
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
+public class RestCsrfPreventionFilter implements Filter {
+
+ private static final Log LOG =
+ LogFactory.getLog(RestCsrfPreventionFilter.class);
+
+ public static final String HEADER_USER_AGENT = "User-Agent";
+ public static final String BROWSER_USER_AGENT_PARAM =
+ "browser-useragents-regex";
+ public static final String CUSTOM_HEADER_PARAM = "custom-header";
+ public static final String CUSTOM_METHODS_TO_IGNORE_PARAM =
+ "methods-to-ignore";
+ static final String BROWSER_USER_AGENTS_DEFAULT = "^Mozilla.*,^Opera.*";
+ public static final String HEADER_DEFAULT = "X-XSRF-HEADER";
+ static final String METHODS_TO_IGNORE_DEFAULT = "GET,OPTIONS,HEAD,TRACE";
+ private String headerName = HEADER_DEFAULT;
+ private Set<String> methodsToIgnore = null;
+ private Set<Pattern> browserUserAgents;
+
+ @Override
+ public void init(FilterConfig filterConfig) throws ServletException {
+ String customHeader = filterConfig.getInitParameter(CUSTOM_HEADER_PARAM);
+ if (customHeader != null) {
+ headerName = customHeader;
+ }
+ String customMethodsToIgnore =
+ filterConfig.getInitParameter(CUSTOM_METHODS_TO_IGNORE_PARAM);
+ if (customMethodsToIgnore != null) {
+ parseMethodsToIgnore(customMethodsToIgnore);
+ } else {
+ parseMethodsToIgnore(METHODS_TO_IGNORE_DEFAULT);
+ }
+
+ String agents = filterConfig.getInitParameter(BROWSER_USER_AGENT_PARAM);
+ if (agents == null) {
+ agents = BROWSER_USER_AGENTS_DEFAULT;
+ }
+ parseBrowserUserAgents(agents);
+ LOG.info(String.format("Adding cross-site request forgery (CSRF) protection, "
+ + "headerName = %s, methodsToIgnore = %s, browserUserAgents = %s",
+ headerName, methodsToIgnore, browserUserAgents));
+ }
+
+ void parseBrowserUserAgents(String userAgents) {
+ String[] agentsArray = userAgents.split(",");
+ browserUserAgents = new HashSet<Pattern>();
+ for (String patternString : agentsArray) {
+ browserUserAgents.add(Pattern.compile(patternString));
+ }
+ }
+
+ void parseMethodsToIgnore(String mti) {
+ String[] methods = mti.split(",");
+ methodsToIgnore = new HashSet<String>();
+ for (int i = 0; i < methods.length; i++) {
+ methodsToIgnore.add(methods[i]);
+ }
+ }
+
+ /**
+ * This method interrogates the User-Agent String and returns whether it
+ * refers to a browser. If its not a browser, then the requirement for the
+ * CSRF header will not be enforced; if it is a browser, the requirement will
+ * be enforced.
+ * <p>
+ * A User-Agent String is considered to be a browser if it matches
+ * any of the regex patterns from browser-useragent-regex; the default
+ * behavior is to consider everything a browser that matches the following:
+ * "^Mozilla.*,^Opera.*". Subclasses can optionally override
+ * this method to use different behavior.
+ *
+ * @param userAgent The User-Agent String, or null if there isn't one
+ * @return true if the User-Agent String refers to a browser, false if not
+ */
+ protected boolean isBrowser(String userAgent) {
+ if (userAgent == null) {
+ return false;
+ }
+ for (Pattern pattern : browserUserAgents) {
+ Matcher matcher = pattern.matcher(userAgent);
+ if (matcher.matches()) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ /**
+ * Defines the minimal API requirements for the filter to execute its
+ * filtering logic. This interface exists to facilitate integration in
+ * components that do not run within a servlet container and therefore cannot
+ * rely on a servlet container to dispatch to the {@link #doFilter} method.
+ * Applications that do run inside a servlet container will not need to write
+ * code that uses this interface. Instead, they can use typical servlet
+ * container configuration mechanisms to insert the filter.
+ */
+ public interface HttpInteraction {
+
+ /**
+ * Returns the value of a header.
+ *
+ * @param header name of header
+ * @return value of header
+ */
+ String getHeader(String header);
+
+ /**
+ * Returns the method.
+ *
+ * @return method
+ */
+ String getMethod();
+
+ /**
+ * Called by the filter after it decides that the request may proceed.
+ *
+ * @throws IOException if there is an I/O error
+ * @throws ServletException if the implementation relies on the servlet API
+ * and a servlet API call has failed
+ */
+ void proceed() throws IOException, ServletException;
+
+ /**
+ * Called by the filter after it decides that the request is a potential
+ * CSRF attack and therefore must be rejected.
+ *
+ * @param code status code to send
+ * @param message response message
+ * @throws IOException if there is an I/O error
+ */
+ void sendError(int code, String message) throws IOException;
+ }
+
+ /**
+ * Handles an {@link HttpInteraction} by applying the filtering logic.
+ *
+ * @param httpInteraction caller's HTTP interaction
+ * @throws IOException if there is an I/O error
+ * @throws ServletException if the implementation relies on the servlet API
+ * and a servlet API call has failed
+ */
+ public void handleHttpInteraction(HttpInteraction httpInteraction)
+ throws IOException, ServletException {
+ if (!isBrowser(httpInteraction.getHeader(HEADER_USER_AGENT)) ||
+ methodsToIgnore.contains(httpInteraction.getMethod()) ||
+ httpInteraction.getHeader(headerName) != null) {
+ httpInteraction.proceed();
+ } else {
+ httpInteraction.sendError(HttpServletResponse.SC_BAD_REQUEST,
+ "Missing Required Header for CSRF Vulnerability Protection");
+ }
+ }
+
+ @Override
+ public void doFilter(ServletRequest request, ServletResponse response,
+ final FilterChain chain) throws IOException, ServletException {
+ final HttpServletRequest httpRequest = (HttpServletRequest)request;
+ final HttpServletResponse httpResponse = (HttpServletResponse)response;
+ handleHttpInteraction(new ServletFilterHttpInteraction(httpRequest,
+ httpResponse, chain));
+ }
+
+ @Override
+ public void destroy() {
+ }
+
+ /**
+ * Constructs a mapping of configuration properties to be used for filter
+ * initialization. The mapping includes all properties that start with the
+ * specified configuration prefix. Property names in the mapping are trimmed
+ * to remove the configuration prefix.
+ *
+ * @param conf configuration to read
+ * @param confPrefix configuration prefix
+ * @return mapping of configuration properties to be used for filter
+ * initialization
+ */
+ public static Map<String, String> getFilterParams(Configuration conf,
+ String confPrefix) {
+ Map<String, String> filterConfigMap = new HashMap<>();
+ for (Map.Entry<String, String> entry : conf) {
+ String name = entry.getKey();
+ if (name.startsWith(confPrefix)) {
+ String value = conf.get(name);
+ name = name.substring(confPrefix.length());
+ filterConfigMap.put(name, value);
+ }
+ }
+ return filterConfigMap;
+ }
+
+ /**
+ * {@link HttpInteraction} implementation for use in the servlet filter.
+ */
+ private static final class ServletFilterHttpInteraction
+ implements HttpInteraction {
+
+ private final FilterChain chain;
+ private final HttpServletRequest httpRequest;
+ private final HttpServletResponse httpResponse;
+
+ /**
+ * Creates a new ServletFilterHttpInteraction.
+ *
+ * @param httpRequest request to process
+ * @param httpResponse response to process
+ * @param chain filter chain to forward to if HTTP interaction is allowed
+ */
+ public ServletFilterHttpInteraction(HttpServletRequest httpRequest,
+ HttpServletResponse httpResponse, FilterChain chain) {
+ this.httpRequest = httpRequest;
+ this.httpResponse = httpResponse;
+ this.chain = chain;
+ }
+
+ @Override
+ public String getHeader(String header) {
+ return httpRequest.getHeader(header);
+ }
+
+ @Override
+ public String getMethod() {
+ return httpRequest.getMethod();
+ }
+
+ @Override
+ public void proceed() throws IOException, ServletException {
+ chain.doFilter(httpRequest, httpResponse);
+ }
+
+ @Override
+ public void sendError(int code, String message) throws IOException {
+ httpResponse.sendError(code, message);
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/hbase/blob/466eb316/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java
index ec39db0..073c038 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java
@@ -45,7 +45,7 @@ import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
@Provider
@Consumes({Constants.MIMETYPE_PROTOBUF, Constants.MIMETYPE_PROTOBUF_IETF})
@InterfaceAudience.Private
-public class ProtobufMessageBodyConsumer
+public class ProtobufMessageBodyConsumer
implements MessageBodyReader<ProtobufMessageHandler> {
private static final Log LOG =
LogFactory.getLog(ProtobufMessageBodyConsumer.class);
@@ -73,8 +73,8 @@ public class ProtobufMessageBodyConsumer
baos.write(buffer, 0, read);
}
} while (read > 0);
- if (LOG.isDebugEnabled()) {
- LOG.debug(getClass() + ": read " + baos.size() + " bytes from " +
+ if (LOG.isTraceEnabled()) {
+ LOG.trace(getClass() + ": read " + baos.size() + " bytes from " +
inputStream);
}
obj = obj.getObjectFromMessage(baos.toByteArray());
http://git-wip-us.apache.org/repos/asf/hbase/blob/466eb316/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java
index 89e44bd..1475879 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java
@@ -23,6 +23,7 @@ import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.locks.Lock;
+import org.apache.commons.logging.Log;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ChoreService;
import org.apache.hadoop.hbase.ScheduledChore;
@@ -37,7 +38,7 @@ import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.UserProvider;
import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.log4j.Logger;
+import org.apache.commons.logging.LogFactory;
/**
* A utility to store user specific HConnections in memory.
@@ -47,7 +48,7 @@ import org.apache.log4j.Logger;
*/
@InterfaceAudience.Private
public class ConnectionCache {
- private static final Logger LOG = Logger.getLogger(ConnectionCache.class);
+ private static final Log LOG = LogFactory.getLog(ConnectionCache.class);
private final Map<String, ConnectionInfo>
connections = new ConcurrentHashMap<String, ConnectionInfo>();
@@ -60,6 +61,7 @@ public class ConnectionCache {
private final ThreadLocal<String> effectiveUserNames =
new ThreadLocal<String>() {
+ @Override
protected String initialValue() {
return realUserName;
}
[2/5] hbase git commit: HBASE-15954 REST server should log requests
with TRACE instead of DEBUG
Posted by en...@apache.org.
HBASE-15954 REST server should log requests with TRACE instead of DEBUG
Conflicts:
hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/4a0a9a20
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/4a0a9a20
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/4a0a9a20
Branch: refs/heads/branch-1
Commit: 4a0a9a20dd5bbfdafe2ec95196b449d2e1a45a13
Parents: 878b1ea
Author: Enis Soztutar <en...@apache.org>
Authored: Mon Jun 6 10:58:37 2016 -0700
Committer: Enis Soztutar <en...@apache.org>
Committed: Mon Jun 6 11:06:52 2016 -0700
----------------------------------------------------------------------
.../hadoop/hbase/rest/MultiRowResource.java | 4 +-
.../hbase/rest/NamespacesInstanceResource.java | 24 ++++-----
.../hadoop/hbase/rest/NamespacesResource.java | 4 +-
.../hbase/rest/ProtobufStreamingUtil.java | 10 ++--
.../apache/hadoop/hbase/rest/RESTServer.java | 18 ++++---
.../apache/hadoop/hbase/rest/RESTServlet.java | 5 +-
.../hadoop/hbase/rest/RegionsResource.java | 4 +-
.../apache/hadoop/hbase/rest/RootResource.java | 4 +-
.../apache/hadoop/hbase/rest/RowResource.java | 54 ++++++++++----------
.../hbase/rest/ScannerInstanceResource.java | 32 +++++++-----
.../hadoop/hbase/rest/ScannerResource.java | 18 +++----
.../hadoop/hbase/rest/SchemaResource.java | 22 ++++----
.../rest/StorageClusterStatusResource.java | 4 +-
.../rest/StorageClusterVersionResource.java | 4 +-
.../apache/hadoop/hbase/rest/TableResource.java | 26 ++++++----
.../hadoop/hbase/rest/VersionResource.java | 10 ++--
.../apache/hadoop/hbase/rest/client/Client.java | 44 ++++++++--------
.../hadoop/hbase/rest/filter/AuthFilter.java | 4 +-
.../rest/filter/RestCsrfPreventionFilter.java | 15 +++---
.../consumer/ProtobufMessageBodyConsumer.java | 6 +--
.../hadoop/hbase/util/ConnectionCache.java | 6 ++-
21 files changed, 171 insertions(+), 147 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/4a0a9a20/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java
index c88ac91..8ff3ef6 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java
@@ -86,7 +86,9 @@ public class MultiRowResource extends ResourceBase implements Constants {
}
model.addRow(rowModel);
} else {
- LOG.trace("The row : " + rk + " not found in the table.");
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("The row : " + rk + " not found in the table.");
+ }
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/4a0a9a20/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesInstanceResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesInstanceResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesInstanceResource.java
index 8f64738..c832905 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesInstanceResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesInstanceResource.java
@@ -91,8 +91,8 @@ public class NamespacesInstanceResource extends ResourceBase {
MIMETYPE_PROTOBUF_IETF})
public Response get(final @Context ServletContext context,
final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
@@ -135,8 +135,8 @@ public class NamespacesInstanceResource extends ResourceBase {
@Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
public Response put(final NamespacesInstanceModel model, final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
return processUpdate(model, true, uriInfo);
@@ -151,8 +151,8 @@ public class NamespacesInstanceResource extends ResourceBase {
@PUT
public Response putNoBody(final byte[] message,
final @Context UriInfo uriInfo, final @Context HttpHeaders headers) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
try{
@@ -176,8 +176,8 @@ public class NamespacesInstanceResource extends ResourceBase {
public Response post(final NamespacesInstanceModel model,
final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("POST " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("POST " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
return processUpdate(model, false, uriInfo);
@@ -192,8 +192,8 @@ public class NamespacesInstanceResource extends ResourceBase {
@POST
public Response postNoBody(final byte[] message,
final @Context UriInfo uriInfo, final @Context HttpHeaders headers) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("POST " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("POST " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
try{
@@ -287,8 +287,8 @@ public class NamespacesInstanceResource extends ResourceBase {
@DELETE
public Response deleteNoBody(final byte[] message,
final @Context UriInfo uriInfo, final @Context HttpHeaders headers) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("DELETE " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("DELETE " + uriInfo.getAbsolutePath());
}
if (servlet.isReadOnly()) {
servlet.getMetrics().incrementFailedDeleteRequests(1);
http://git-wip-us.apache.org/repos/asf/hbase/blob/4a0a9a20/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesResource.java
index 0548fe8..1304fe0 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesResource.java
@@ -64,8 +64,8 @@ public class NamespacesResource extends ResourceBase {
@Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
public Response get(final @Context ServletContext context, final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
try {
http://git-wip-us.apache.org/repos/asf/hbase/blob/4a0a9a20/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingUtil.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingUtil.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingUtil.java
index 93bb940..cb0f4c8 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingUtil.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingUtil.java
@@ -49,8 +49,10 @@ public class ProtobufStreamingUtil implements StreamingOutput {
this.contentType = type;
this.limit = limit;
this.fetchSize = fetchSize;
- LOG.debug("Created ScanStreamingUtil with content type = " + this.contentType + " user limit : "
- + this.limit + " scan fetch size : " + this.fetchSize);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Created ScanStreamingUtil with content type = " + this.contentType
+ + " user limit : " + this.limit + " scan fetch size : " + this.fetchSize);
+ }
}
@Override
@@ -82,7 +84,9 @@ public class ProtobufStreamingUtil implements StreamingOutput {
outStream.write(Bytes.toBytes((short)objectBytes.length));
outStream.write(objectBytes);
outStream.flush();
- LOG.trace("Wrote " + model.getRows().size() + " rows to stream successfully.");
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Wrote " + model.getRows().size() + " rows to stream successfully.");
+ }
}
private CellSetModel createModelFromResults(Result[] results) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/4a0a9a20/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
index 9dac84a..cb37fb5 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
@@ -167,23 +167,27 @@ public class RESTServer implements Constants {
// check for user-defined port setting, if so override the conf
if (commandLine != null && commandLine.hasOption("port")) {
String val = commandLine.getOptionValue("port");
- servlet.getConfiguration()
- .setInt("hbase.rest.port", Integer.valueOf(val));
- LOG.debug("port set to " + val);
+ servlet.getConfiguration().setInt("hbase.rest.port", Integer.parseInt(val));
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("port set to " + val);
+ }
}
// check if server should only process GET requests, if so override the conf
if (commandLine != null && commandLine.hasOption("readonly")) {
servlet.getConfiguration().setBoolean("hbase.rest.readonly", true);
- LOG.debug("readonly set to true");
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("readonly set to true");
+ }
}
// check for user-defined info server port setting, if so override the conf
if (commandLine != null && commandLine.hasOption("infoport")) {
String val = commandLine.getOptionValue("infoport");
- servlet.getConfiguration()
- .setInt("hbase.rest.info.port", Integer.valueOf(val));
- LOG.debug("Web UI port set to " + val);
+ servlet.getConfiguration().setInt("hbase.rest.info.port", Integer.parseInt(val));
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Web UI port set to " + val);
+ }
}
@SuppressWarnings("unchecked")
http://git-wip-us.apache.org/repos/asf/hbase/blob/4a0a9a20/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java
index 4da5c67..411ced8 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java
@@ -20,6 +20,8 @@ package org.apache.hadoop.hbase.rest;
import java.io.IOException;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.client.Admin;
@@ -30,14 +32,13 @@ import org.apache.hadoop.hbase.util.ConnectionCache;
import org.apache.hadoop.hbase.util.JvmPauseMonitor;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.ProxyUsers;
-import org.apache.log4j.Logger;
/**
* Singleton class encapsulating global REST servlet state and functions.
*/
@InterfaceAudience.Private
public class RESTServlet implements Constants {
- private static final Logger LOG = Logger.getLogger(RESTServlet.class);
+ private static final Log LOG = LogFactory.getLog(RESTServlet.class);
private static RESTServlet INSTANCE;
private final Configuration conf;
private final MetricsREST metrics;
http://git-wip-us.apache.org/repos/asf/hbase/blob/4a0a9a20/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java
index 001c6b5..100dfd5 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java
@@ -71,8 +71,8 @@ public class RegionsResource extends ResourceBase {
@Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
public Response get(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
try {
http://git-wip-us.apache.org/repos/asf/hbase/blob/4a0a9a20/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java
index c08bb8b..fc4c548 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java
@@ -72,8 +72,8 @@ public class RootResource extends ResourceBase {
@Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
public Response get(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
try {
http://git-wip-us.apache.org/repos/asf/hbase/blob/4a0a9a20/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
index bd1ea24..4d50c54 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
@@ -85,8 +85,8 @@ public class RowResource extends ResourceBase {
@Produces({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
public Response get(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
MultivaluedMap<String, String> params = uriInfo.getQueryParameters();
@@ -130,8 +130,8 @@ public class RowResource extends ResourceBase {
@GET
@Produces(MIMETYPE_BINARY)
public Response getBinary(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath() + " as "+ MIMETYPE_BINARY);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath() + " as "+ MIMETYPE_BINARY);
}
servlet.getMetrics().incrementRequests(1);
// doesn't make sense to use a non specific coordinate as this can only
@@ -221,8 +221,8 @@ public class RowResource extends ResourceBase {
put.addImmutable(parts[0], parts[1], cell.getTimestamp(), cell.getValue());
}
puts.add(put);
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + put.toString());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + put.toString());
}
}
table = servlet.getTable(tableResource.getName());
@@ -289,8 +289,8 @@ public class RowResource extends ResourceBase {
put.addImmutable(parts[0], parts[1], timestamp, message);
table = servlet.getTable(tableResource.getName());
table.put(put);
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + put.toString());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + put.toString());
}
servlet.getMetrics().incrementSucessfulPutRequests(1);
return Response.ok().build();
@@ -301,7 +301,7 @@ public class RowResource extends ResourceBase {
if (table != null) try {
table.close();
} catch (IOException ioe) {
- LOG.debug(ioe);
+ LOG.debug("Exception received while closing the table", ioe);
}
}
}
@@ -311,8 +311,8 @@ public class RowResource extends ResourceBase {
MIMETYPE_PROTOBUF_IETF})
public Response put(final CellSetModel model,
final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + uriInfo.getAbsolutePath()
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + uriInfo.getAbsolutePath()
+ " " + uriInfo.getQueryParameters());
}
return update(model, true);
@@ -322,8 +322,8 @@ public class RowResource extends ResourceBase {
@Consumes(MIMETYPE_BINARY)
public Response putBinary(final byte[] message,
final @Context UriInfo uriInfo, final @Context HttpHeaders headers) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + uriInfo.getAbsolutePath() + " as "+ MIMETYPE_BINARY);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + uriInfo.getAbsolutePath() + " as "+ MIMETYPE_BINARY);
}
return updateBinary(message, headers, true);
}
@@ -333,8 +333,8 @@ public class RowResource extends ResourceBase {
MIMETYPE_PROTOBUF_IETF})
public Response post(final CellSetModel model,
final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("POST " + uriInfo.getAbsolutePath()
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("POST " + uriInfo.getAbsolutePath()
+ " " + uriInfo.getQueryParameters());
}
return update(model, false);
@@ -344,16 +344,16 @@ public class RowResource extends ResourceBase {
@Consumes(MIMETYPE_BINARY)
public Response postBinary(final byte[] message,
final @Context UriInfo uriInfo, final @Context HttpHeaders headers) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("POST " + uriInfo.getAbsolutePath() + " as "+MIMETYPE_BINARY);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("POST " + uriInfo.getAbsolutePath() + " as "+MIMETYPE_BINARY);
}
return updateBinary(message, headers, false);
}
@DELETE
public Response delete(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("DELETE " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("DELETE " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
if (servlet.isReadOnly()) {
@@ -397,8 +397,8 @@ public class RowResource extends ResourceBase {
table = servlet.getTable(tableResource.getName());
table.delete(delete);
servlet.getMetrics().incrementSucessfulDeleteRequests(1);
- if (LOG.isDebugEnabled()) {
- LOG.debug("DELETE " + delete.toString());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("DELETE " + delete.toString());
}
} catch (Exception e) {
servlet.getMetrics().incrementFailedDeleteRequests(1);
@@ -407,7 +407,7 @@ public class RowResource extends ResourceBase {
if (table != null) try {
table.close();
} catch (IOException ioe) {
- LOG.debug(ioe);
+ LOG.debug("Exception received while closing the table", ioe);
}
}
return Response.ok().build();
@@ -499,8 +499,8 @@ public class RowResource extends ResourceBase {
.build();
}
- if (LOG.isDebugEnabled()) {
- LOG.debug("CHECK-AND-PUT " + put.toString() + ", returns " + retValue);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("CHECK-AND-PUT " + put.toString() + ", returns " + retValue);
}
if (!retValue) {
servlet.getMetrics().incrementFailedPutRequests(1);
@@ -517,7 +517,7 @@ public class RowResource extends ResourceBase {
} finally {
if (table != null) try {
table.close();
- } catch (IOException ioe) {
+ } catch (IOException ioe) {
LOG.debug("Exception received while closing the table", ioe);
}
}
@@ -627,8 +627,8 @@ public class RowResource extends ResourceBase {
.build();
}
- if (LOG.isDebugEnabled()) {
- LOG.debug("CHECK-AND-DELETE " + delete.toString() + ", returns "
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("CHECK-AND-DELETE " + delete.toString() + ", returns "
+ retValue);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/4a0a9a20/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java
index ffb2fae..2469faa 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java
@@ -62,7 +62,7 @@ public class ScannerInstanceResource extends ResourceBase {
public ScannerInstanceResource() throws IOException { }
- public ScannerInstanceResource(String table, String id,
+ public ScannerInstanceResource(String table, String id,
ResultGenerator generator, int batch) throws IOException {
this.id = id;
this.generator = generator;
@@ -72,10 +72,10 @@ public class ScannerInstanceResource extends ResourceBase {
@GET
@Produces({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
- public Response get(final @Context UriInfo uriInfo,
+ public Response get(final @Context UriInfo uriInfo,
@QueryParam("n") int maxRows, final @QueryParam("c") int maxValues) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
if (generator == null) {
@@ -108,7 +108,9 @@ public class ScannerInstanceResource extends ResourceBase {
.build();
}
if (value == null) {
- LOG.info("generator exhausted");
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("generator exhausted");
+ }
// respond with 204 (No Content) if an empty cell set would be
// returned
if (count == limit) {
@@ -123,7 +125,7 @@ public class ScannerInstanceResource extends ResourceBase {
if (!Bytes.equals(CellUtil.cloneRow(value), rowKey)) {
// if maxRows was given as a query param, stop if we would exceed the
// specified number of rows
- if (maxRows > 0) {
+ if (maxRows > 0) {
if (--maxRows == 0) {
generator.putBack(value);
break;
@@ -134,7 +136,7 @@ public class ScannerInstanceResource extends ResourceBase {
rowModel = new RowModel(rowKey);
}
rowModel.addCell(
- new CellModel(CellUtil.cloneFamily(value), CellUtil.cloneQualifier(value),
+ new CellModel(CellUtil.cloneFamily(value), CellUtil.cloneQualifier(value),
value.getTimestamp(), CellUtil.cloneValue(value)));
} while (--count > 0);
model.addRow(rowModel);
@@ -147,21 +149,23 @@ public class ScannerInstanceResource extends ResourceBase {
@GET
@Produces(MIMETYPE_BINARY)
public Response getBinary(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath() + " as " +
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath() + " as " +
MIMETYPE_BINARY);
}
servlet.getMetrics().incrementRequests(1);
try {
Cell value = generator.next();
if (value == null) {
- LOG.info("generator exhausted");
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("generator exhausted");
+ }
return Response.noContent().build();
}
ResponseBuilder response = Response.ok(CellUtil.cloneValue(value));
response.cacheControl(cacheControl);
- response.header("X-Row", Base64.encodeBytes(CellUtil.cloneRow(value)));
- response.header("X-Column",
+ response.header("X-Row", Base64.encodeBytes(CellUtil.cloneRow(value)));
+ response.header("X-Column",
Base64.encodeBytes(
KeyValue.makeColumn(CellUtil.cloneFamily(value), CellUtil.cloneQualifier(value))));
response.header("X-Timestamp", value.getTimestamp());
@@ -182,8 +186,8 @@ public class ScannerInstanceResource extends ResourceBase {
@DELETE
public Response delete(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("DELETE " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("DELETE " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
if (servlet.isReadOnly()) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/4a0a9a20/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java
index 6c424ce..71723d8 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java
@@ -31,7 +31,6 @@ import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.core.Context;
-import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriBuilder;
import javax.ws.rs.core.UriInfo;
@@ -91,8 +90,7 @@ public class ScannerResource extends ResourceBase {
spec = new RowSpec(model.getStartRow(), endRow, model.getColumns(), model.getStartTime(),
model.getEndTime(), model.getMaxVersions());
}
- MultivaluedMap<String, String> params = uriInfo.getQueryParameters();
-
+
try {
Filter filter = ScannerResultGenerator.buildFilterFromModel(model);
String tableName = tableResource.getName();
@@ -103,8 +101,8 @@ public class ScannerResource extends ResourceBase {
ScannerInstanceResource instance =
new ScannerInstanceResource(tableName, id, gen, model.getBatch());
scanners.put(id, instance);
- if (LOG.isDebugEnabled()) {
- LOG.debug("new scanner: " + id);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("new scanner: " + id);
}
UriBuilder builder = uriInfo.getAbsolutePathBuilder();
URI uri = builder.path(id).build();
@@ -130,10 +128,10 @@ public class ScannerResource extends ResourceBase {
@PUT
@Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
- public Response put(final ScannerModel model,
+ public Response put(final ScannerModel model,
final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + uriInfo.getAbsolutePath());
}
return update(model, true, uriInfo);
}
@@ -143,8 +141,8 @@ public class ScannerResource extends ResourceBase {
MIMETYPE_PROTOBUF_IETF})
public Response post(final ScannerModel model,
final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("POST " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("POST " + uriInfo.getAbsolutePath());
}
return update(model, false, uriInfo);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/4a0a9a20/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java
index 9826b67..375643a 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java
@@ -86,8 +86,8 @@ public class SchemaResource extends ResourceBase {
@Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
public Response get(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
try {
@@ -99,7 +99,7 @@ public class SchemaResource extends ResourceBase {
} catch (Exception e) {
servlet.getMetrics().incrementFailedGetRequests(1);
return processException(e);
- }
+ }
}
private Response replace(final TableName name, final TableSchemaModel model,
@@ -198,10 +198,10 @@ public class SchemaResource extends ResourceBase {
@PUT
@Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
- public Response put(final TableSchemaModel model,
+ public Response put(final TableSchemaModel model,
final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
return update(model, true, uriInfo);
@@ -210,10 +210,10 @@ public class SchemaResource extends ResourceBase {
@POST
@Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
- public Response post(final TableSchemaModel model,
+ public Response post(final TableSchemaModel model,
final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
return update(model, false, uriInfo);
@@ -223,8 +223,8 @@ public class SchemaResource extends ResourceBase {
justification="Expected")
@DELETE
public Response delete(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("DELETE " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("DELETE " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
if (servlet.isReadOnly()) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/4a0a9a20/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java
index a7e52bd..27977c3 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java
@@ -63,8 +63,8 @@ public class StorageClusterStatusResource extends ResourceBase {
@Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
public Response get(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
try {
http://git-wip-us.apache.org/repos/asf/hbase/blob/4a0a9a20/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java
index 85e81f8..b9fb5d4 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java
@@ -58,8 +58,8 @@ public class StorageClusterVersionResource extends ResourceBase {
@GET
@Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON})
public Response get(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
try {
http://git-wip-us.apache.org/repos/asf/hbase/blob/4a0a9a20/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
index f87ef7e..2487a35 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
@@ -133,7 +133,7 @@ public class TableResource extends ResourceBase {
@DefaultValue("-1") @QueryParam(Constants.SCAN_BATCH_SIZE) int batchSize,
@DefaultValue("0") @QueryParam(Constants.SCAN_START_TIME) long startTime,
@DefaultValue(Long.MAX_VALUE + "") @QueryParam(Constants.SCAN_END_TIME) long endTime,
- @DefaultValue("true") @QueryParam(Constants.SCAN_BATCH_SIZE) boolean cacheBlocks,
+ @DefaultValue("true") @QueryParam(Constants.SCAN_BATCH_SIZE) boolean cacheBlocks,
@DefaultValue("") @QueryParam(Constants.SCAN_FILTER) String filters) {
try {
Filter filter = null;
@@ -146,10 +146,12 @@ public class TableResource extends ResourceBase {
tableScan.setStartRow(prefixBytes);
}
}
- LOG.debug("Query parameters : Table Name = > " + this.table + " Start Row => " + startRow
- + " End Row => " + endRow + " Columns => " + column + " Start Time => " + startTime
- + " End Time => " + endTime + " Cache Blocks => " + cacheBlocks + " Max Versions => "
- + maxVersions + " Batch Size => " + batchSize);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Query parameters : Table Name = > " + this.table + " Start Row => " + startRow
+ + " End Row => " + endRow + " Columns => " + column + " Start Time => " + startTime
+ + " End Time => " + endTime + " Cache Blocks => " + cacheBlocks + " Max Versions => "
+ + maxVersions + " Batch Size => " + batchSize);
+ }
Table hTable = RESTServlet.getInstance().getTable(this.table);
tableScan.setBatch(batchSize);
tableScan.setMaxVersions(maxVersions);
@@ -162,15 +164,21 @@ public class TableResource extends ResourceBase {
String[] familysplit = csplit.trim().split(":");
if (familysplit.length == 2) {
if (familysplit[1].length() > 0) {
- LOG.debug("Scan family and column : " + familysplit[0] + " " + familysplit[1]);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Scan family and column : " + familysplit[0] + " " + familysplit[1]);
+ }
tableScan.addColumn(Bytes.toBytes(familysplit[0]), Bytes.toBytes(familysplit[1]));
} else {
tableScan.addFamily(Bytes.toBytes(familysplit[0]));
- LOG.debug("Scan family : " + familysplit[0] + " and empty qualifier.");
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Scan family : " + familysplit[0] + " and empty qualifier.");
+ }
tableScan.addColumn(Bytes.toBytes(familysplit[0]), null);
}
- } else if (StringUtils.isNotEmpty(familysplit[0])){
- LOG.debug("Scan family : " + familysplit[0]);
+ } else if (StringUtils.isNotEmpty(familysplit[0])) {
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Scan family : " + familysplit[0]);
+ }
tableScan.addFamily(Bytes.toBytes(familysplit[0]));
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/4a0a9a20/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java
index ae93825..172246c 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java
@@ -68,15 +68,15 @@ public class VersionResource extends ResourceBase {
* Build a response for a version request.
* @param context servlet context
* @param uriInfo (JAX-RS context variable) request URL
- * @return a response for a version request
+ * @return a response for a version request
*/
@GET
@Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
- public Response get(final @Context ServletContext context,
+ public Response get(final @Context ServletContext context,
final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
ResponseBuilder response = Response.ok(new VersionModel(context));
@@ -89,7 +89,7 @@ public class VersionResource extends ResourceBase {
* Dispatch to StorageClusterVersionResource
*/
@Path("cluster")
- public StorageClusterVersionResource getClusterVersionResource()
+ public StorageClusterVersionResource getClusterVersionResource()
throws IOException {
return new StorageClusterVersionResource();
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/4a0a9a20/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java
index 142c276..e26de63 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java
@@ -101,10 +101,10 @@ public class Client {
}
/**
- * Shut down the client. Close any open persistent connections.
+ * Shut down the client. Close any open persistent connections.
*/
public void shutdown() {
- MultiThreadedHttpConnectionManager manager =
+ MultiThreadedHttpConnectionManager manager =
(MultiThreadedHttpConnectionManager) httpClient.getHttpConnectionManager();
manager.shutdown();
}
@@ -151,7 +151,7 @@ public class Client {
* one of the members of the supplied cluster definition and iterate through
* the list until a transaction can be successfully completed. The
* definition of success here is a complete HTTP transaction, irrespective
- * of result code.
+ * of result code.
* @param cluster the cluster definition
* @param method the transaction method
* @param headers HTTP header values to send
@@ -209,8 +209,8 @@ public class Client {
long startTime = System.currentTimeMillis();
int code = httpClient.executeMethod(method);
long endTime = System.currentTimeMillis();
- if (LOG.isDebugEnabled()) {
- LOG.debug(method.getName() + " " + uri + " " + code + " " +
+ if (LOG.isTraceEnabled()) {
+ LOG.trace(method.getName() + " " + uri + " " + code + " " +
method.getStatusText() + " in " + (endTime - startTime) + " ms");
}
return code;
@@ -250,7 +250,7 @@ public class Client {
}
/**
- * Send a HEAD request
+ * Send a HEAD request
* @param path the path or URI
* @return a Response object with response detail
* @throws IOException
@@ -260,14 +260,14 @@ public class Client {
}
/**
- * Send a HEAD request
+ * Send a HEAD request
* @param cluster the cluster definition
* @param path the path or URI
* @param headers the HTTP headers to include in the request
* @return a Response object with response detail
* @throws IOException
*/
- public Response head(Cluster cluster, String path, Header[] headers)
+ public Response head(Cluster cluster, String path, Header[] headers)
throws IOException {
HeadMethod method = new HeadMethod();
try {
@@ -280,7 +280,7 @@ public class Client {
}
/**
- * Send a GET request
+ * Send a GET request
* @param path the path or URI
* @return a Response object with response detail
* @throws IOException
@@ -290,7 +290,7 @@ public class Client {
}
/**
- * Send a GET request
+ * Send a GET request
* @param cluster the cluster definition
* @param path the path or URI
* @return a Response object with response detail
@@ -301,7 +301,7 @@ public class Client {
}
/**
- * Send a GET request
+ * Send a GET request
* @param path the path or URI
* @param accept Accept header value
* @return a Response object with response detail
@@ -312,7 +312,7 @@ public class Client {
}
/**
- * Send a GET request
+ * Send a GET request
* @param cluster the cluster definition
* @param path the path or URI
* @param accept Accept header value
@@ -329,7 +329,7 @@ public class Client {
/**
* Send a GET request
* @param path the path or URI
- * @param headers the HTTP headers to include in the request,
+ * @param headers the HTTP headers to include in the request,
* <tt>Accept</tt> must be supplied
* @return a Response object with response detail
* @throws IOException
@@ -346,7 +346,7 @@ public class Client {
* @return a Response object with response detail
* @throws IOException
*/
- public Response get(Cluster c, String path, Header[] headers)
+ public Response get(Cluster c, String path, Header[] headers)
throws IOException {
GetMethod method = new GetMethod();
try {
@@ -396,7 +396,7 @@ public class Client {
* @return a Response object with response detail
* @throws IOException for error
*/
- public Response put(Cluster cluster, String path, String contentType,
+ public Response put(Cluster cluster, String path, String contentType,
byte[] content) throws IOException {
Header[] headers = new Header[1];
headers[0] = new Header("Content-Type", contentType);
@@ -413,7 +413,7 @@ public class Client {
* @return a Response object with response detail
* @throws IOException for error
*/
- public Response put(Cluster cluster, String path, String contentType,
+ public Response put(Cluster cluster, String path, String contentType,
byte[] content, Header extraHdr) throws IOException {
int cnt = extraHdr == null ? 1 : 2;
Header[] headers = new Header[cnt];
@@ -433,7 +433,7 @@ public class Client {
* @return a Response object with response detail
* @throws IOException
*/
- public Response put(String path, Header[] headers, byte[] content)
+ public Response put(String path, Header[] headers, byte[] content)
throws IOException {
return put(cluster, path, headers, content);
}
@@ -448,7 +448,7 @@ public class Client {
* @return a Response object with response detail
* @throws IOException
*/
- public Response put(Cluster cluster, String path, Header[] headers,
+ public Response put(Cluster cluster, String path, Header[] headers,
byte[] content) throws IOException {
PutMethod method = new PutMethod();
try {
@@ -498,7 +498,7 @@ public class Client {
* @return a Response object with response detail
* @throws IOException for error
*/
- public Response post(Cluster cluster, String path, String contentType,
+ public Response post(Cluster cluster, String path, String contentType,
byte[] content) throws IOException {
Header[] headers = new Header[1];
headers[0] = new Header("Content-Type", contentType);
@@ -515,7 +515,7 @@ public class Client {
* @return a Response object with response detail
* @throws IOException for error
*/
- public Response post(Cluster cluster, String path, String contentType,
+ public Response post(Cluster cluster, String path, String contentType,
byte[] content, Header extraHdr) throws IOException {
int cnt = extraHdr == null ? 1 : 2;
Header[] headers = new Header[cnt];
@@ -535,7 +535,7 @@ public class Client {
* @return a Response object with response detail
* @throws IOException
*/
- public Response post(String path, Header[] headers, byte[] content)
+ public Response post(String path, Header[] headers, byte[] content)
throws IOException {
return post(cluster, path, headers, content);
}
@@ -550,7 +550,7 @@ public class Client {
* @return a Response object with response detail
* @throws IOException
*/
- public Response post(Cluster cluster, String path, Header[] headers,
+ public Response post(Cluster cluster, String path, Header[] headers,
byte[] content) throws IOException {
PostMethod method = new PostMethod();
try {
http://git-wip-us.apache.org/repos/asf/hbase/blob/4a0a9a20/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java
index e5208af..f051bc8 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java
@@ -72,7 +72,9 @@ public class AuthFilter extends AuthenticationFilter {
throw new ServletException("Failed to retrieve server principal", ie);
}
}
- LOG.debug("Setting property " + name + "=" + value);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Setting property " + name + "=" + value);
+ }
name = name.substring(REST_PREFIX_LEN);
props.setProperty(name, value);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/4a0a9a20/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java
index 30eea95..dbb1447 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java
@@ -34,13 +34,12 @@ import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
/**
* This filter provides protection against cross site request forgery (CSRF)
* attacks for REST APIs. Enabling this filter on an endpoint results in the
@@ -52,8 +51,8 @@ import org.slf4j.LoggerFactory;
@InterfaceStability.Evolving
public class RestCsrfPreventionFilter implements Filter {
- private static final Logger LOG =
- LoggerFactory.getLogger(RestCsrfPreventionFilter.class);
+ private static final Log LOG =
+ LogFactory.getLog(RestCsrfPreventionFilter.class);
public static final String HEADER_USER_AGENT = "User-Agent";
public static final String BROWSER_USER_AGENT_PARAM =
@@ -87,9 +86,9 @@ public class RestCsrfPreventionFilter implements Filter {
agents = BROWSER_USER_AGENTS_DEFAULT;
}
parseBrowserUserAgents(agents);
- LOG.info("Adding cross-site request forgery (CSRF) protection, "
- + "headerName = {}, methodsToIgnore = {}, browserUserAgents = {}",
- headerName, methodsToIgnore, browserUserAgents);
+ LOG.info(String.format("Adding cross-site request forgery (CSRF) protection, "
+ + "headerName = %s, methodsToIgnore = %s, browserUserAgents = %s",
+ headerName, methodsToIgnore, browserUserAgents));
}
void parseBrowserUserAgents(String userAgents) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/4a0a9a20/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java
index ec39db0..073c038 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java
@@ -45,7 +45,7 @@ import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
@Provider
@Consumes({Constants.MIMETYPE_PROTOBUF, Constants.MIMETYPE_PROTOBUF_IETF})
@InterfaceAudience.Private
-public class ProtobufMessageBodyConsumer
+public class ProtobufMessageBodyConsumer
implements MessageBodyReader<ProtobufMessageHandler> {
private static final Log LOG =
LogFactory.getLog(ProtobufMessageBodyConsumer.class);
@@ -73,8 +73,8 @@ public class ProtobufMessageBodyConsumer
baos.write(buffer, 0, read);
}
} while (read > 0);
- if (LOG.isDebugEnabled()) {
- LOG.debug(getClass() + ": read " + baos.size() + " bytes from " +
+ if (LOG.isTraceEnabled()) {
+ LOG.trace(getClass() + ": read " + baos.size() + " bytes from " +
inputStream);
}
obj = obj.getObjectFromMessage(baos.toByteArray());
http://git-wip-us.apache.org/repos/asf/hbase/blob/4a0a9a20/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java
index 89e44bd..1475879 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java
@@ -23,6 +23,7 @@ import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.locks.Lock;
+import org.apache.commons.logging.Log;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ChoreService;
import org.apache.hadoop.hbase.ScheduledChore;
@@ -37,7 +38,7 @@ import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.UserProvider;
import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.log4j.Logger;
+import org.apache.commons.logging.LogFactory;
/**
* A utility to store user specific HConnections in memory.
@@ -47,7 +48,7 @@ import org.apache.log4j.Logger;
*/
@InterfaceAudience.Private
public class ConnectionCache {
- private static final Logger LOG = Logger.getLogger(ConnectionCache.class);
+ private static final Log LOG = LogFactory.getLog(ConnectionCache.class);
private final Map<String, ConnectionInfo>
connections = new ConcurrentHashMap<String, ConnectionInfo>();
@@ -60,6 +61,7 @@ public class ConnectionCache {
private final ThreadLocal<String> effectiveUserNames =
new ThreadLocal<String>() {
+ @Override
protected String initialValue() {
return realUserName;
}
[4/5] hbase git commit: HBASE-15954 REST server should log requests
with TRACE instead of DEBUG
Posted by en...@apache.org.
HBASE-15954 REST server should log requests with TRACE instead of DEBUG
Conflicts:
hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java
Conflicts:
hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java
hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java
Conflicts:
hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesInstanceResource.java
hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesResource.java
hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java
hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/218259c0
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/218259c0
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/218259c0
Branch: refs/heads/branch-1.1
Commit: 218259c0edcb7e8d8ee5f6a586c114fa2f33bc7f
Parents: 73a7462
Author: Enis Soztutar <en...@apache.org>
Authored: Mon Jun 6 10:58:37 2016 -0700
Committer: Enis Soztutar <en...@apache.org>
Committed: Mon Jun 6 14:06:32 2016 -0700
----------------------------------------------------------------------
.../hadoop/hbase/rest/MultiRowResource.java | 4 +-
.../hbase/rest/ProtobufStreamingUtil.java | 10 +-
.../apache/hadoop/hbase/rest/RESTServer.java | 18 +-
.../apache/hadoop/hbase/rest/RESTServlet.java | 5 +-
.../hadoop/hbase/rest/RegionsResource.java | 4 +-
.../apache/hadoop/hbase/rest/RootResource.java | 4 +-
.../apache/hadoop/hbase/rest/RowResource.java | 54 ++--
.../hbase/rest/ScannerInstanceResource.java | 32 ++-
.../hadoop/hbase/rest/ScannerResource.java | 18 +-
.../hadoop/hbase/rest/SchemaResource.java | 22 +-
.../rest/StorageClusterStatusResource.java | 4 +-
.../rest/StorageClusterVersionResource.java | 4 +-
.../apache/hadoop/hbase/rest/TableResource.java | 26 +-
.../hadoop/hbase/rest/VersionResource.java | 10 +-
.../apache/hadoop/hbase/rest/client/Client.java | 40 +--
.../hadoop/hbase/rest/filter/AuthFilter.java | 4 +-
.../rest/filter/RestCsrfPreventionFilter.java | 286 +++++++++++++++++++
.../consumer/ProtobufMessageBodyConsumer.java | 6 +-
.../hadoop/hbase/util/ConnectionCache.java | 6 +-
19 files changed, 434 insertions(+), 123 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/218259c0/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java
index c88ac91..8ff3ef6 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java
@@ -86,7 +86,9 @@ public class MultiRowResource extends ResourceBase implements Constants {
}
model.addRow(rowModel);
} else {
- LOG.trace("The row : " + rk + " not found in the table.");
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("The row : " + rk + " not found in the table.");
+ }
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/218259c0/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingUtil.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingUtil.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingUtil.java
index 93bb940..cb0f4c8 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingUtil.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingUtil.java
@@ -49,8 +49,10 @@ public class ProtobufStreamingUtil implements StreamingOutput {
this.contentType = type;
this.limit = limit;
this.fetchSize = fetchSize;
- LOG.debug("Created ScanStreamingUtil with content type = " + this.contentType + " user limit : "
- + this.limit + " scan fetch size : " + this.fetchSize);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Created ScanStreamingUtil with content type = " + this.contentType
+ + " user limit : " + this.limit + " scan fetch size : " + this.fetchSize);
+ }
}
@Override
@@ -82,7 +84,9 @@ public class ProtobufStreamingUtil implements StreamingOutput {
outStream.write(Bytes.toBytes((short)objectBytes.length));
outStream.write(objectBytes);
outStream.flush();
- LOG.trace("Wrote " + model.getRows().size() + " rows to stream successfully.");
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Wrote " + model.getRows().size() + " rows to stream successfully.");
+ }
}
private CellSetModel createModelFromResults(Result[] results) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/218259c0/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
index ad8c65d..7ccc6c1 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
@@ -127,23 +127,27 @@ public class RESTServer implements Constants {
// check for user-defined port setting, if so override the conf
if (commandLine != null && commandLine.hasOption("port")) {
String val = commandLine.getOptionValue("port");
- servlet.getConfiguration()
- .setInt("hbase.rest.port", Integer.valueOf(val));
- LOG.debug("port set to " + val);
+ servlet.getConfiguration().setInt("hbase.rest.port", Integer.parseInt(val));
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("port set to " + val);
+ }
}
// check if server should only process GET requests, if so override the conf
if (commandLine != null && commandLine.hasOption("readonly")) {
servlet.getConfiguration().setBoolean("hbase.rest.readonly", true);
- LOG.debug("readonly set to true");
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("readonly set to true");
+ }
}
// check for user-defined info server port setting, if so override the conf
if (commandLine != null && commandLine.hasOption("infoport")) {
String val = commandLine.getOptionValue("infoport");
- servlet.getConfiguration()
- .setInt("hbase.rest.info.port", Integer.valueOf(val));
- LOG.debug("Web UI port set to " + val);
+ servlet.getConfiguration().setInt("hbase.rest.info.port", Integer.parseInt(val));
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Web UI port set to " + val);
+ }
}
@SuppressWarnings("unchecked")
http://git-wip-us.apache.org/repos/asf/hbase/blob/218259c0/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java
index 0ecaf5a..e49298b 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java
@@ -20,6 +20,8 @@ package org.apache.hadoop.hbase.rest;
import java.io.IOException;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.client.Admin;
@@ -29,14 +31,13 @@ import org.apache.hadoop.hbase.security.UserProvider;
import org.apache.hadoop.hbase.util.ConnectionCache;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.ProxyUsers;
-import org.apache.log4j.Logger;
/**
* Singleton class encapsulating global REST servlet state and functions.
*/
@InterfaceAudience.Private
public class RESTServlet implements Constants {
- private static Logger LOG = Logger.getLogger(RESTServlet.class);
+ private static final Log LOG = LogFactory.getLog(RESTServlet.class);
private static RESTServlet INSTANCE;
private final Configuration conf;
private final MetricsREST metrics = new MetricsREST();
http://git-wip-us.apache.org/repos/asf/hbase/blob/218259c0/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java
index 001c6b5..100dfd5 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java
@@ -71,8 +71,8 @@ public class RegionsResource extends ResourceBase {
@Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
public Response get(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
try {
http://git-wip-us.apache.org/repos/asf/hbase/blob/218259c0/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java
index c425e84..2521895 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java
@@ -72,8 +72,8 @@ public class RootResource extends ResourceBase {
@Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
public Response get(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
try {
http://git-wip-us.apache.org/repos/asf/hbase/blob/218259c0/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
index bd1ea24..4d50c54 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
@@ -85,8 +85,8 @@ public class RowResource extends ResourceBase {
@Produces({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
public Response get(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
MultivaluedMap<String, String> params = uriInfo.getQueryParameters();
@@ -130,8 +130,8 @@ public class RowResource extends ResourceBase {
@GET
@Produces(MIMETYPE_BINARY)
public Response getBinary(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath() + " as "+ MIMETYPE_BINARY);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath() + " as "+ MIMETYPE_BINARY);
}
servlet.getMetrics().incrementRequests(1);
// doesn't make sense to use a non specific coordinate as this can only
@@ -221,8 +221,8 @@ public class RowResource extends ResourceBase {
put.addImmutable(parts[0], parts[1], cell.getTimestamp(), cell.getValue());
}
puts.add(put);
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + put.toString());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + put.toString());
}
}
table = servlet.getTable(tableResource.getName());
@@ -289,8 +289,8 @@ public class RowResource extends ResourceBase {
put.addImmutable(parts[0], parts[1], timestamp, message);
table = servlet.getTable(tableResource.getName());
table.put(put);
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + put.toString());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + put.toString());
}
servlet.getMetrics().incrementSucessfulPutRequests(1);
return Response.ok().build();
@@ -301,7 +301,7 @@ public class RowResource extends ResourceBase {
if (table != null) try {
table.close();
} catch (IOException ioe) {
- LOG.debug(ioe);
+ LOG.debug("Exception received while closing the table", ioe);
}
}
}
@@ -311,8 +311,8 @@ public class RowResource extends ResourceBase {
MIMETYPE_PROTOBUF_IETF})
public Response put(final CellSetModel model,
final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + uriInfo.getAbsolutePath()
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + uriInfo.getAbsolutePath()
+ " " + uriInfo.getQueryParameters());
}
return update(model, true);
@@ -322,8 +322,8 @@ public class RowResource extends ResourceBase {
@Consumes(MIMETYPE_BINARY)
public Response putBinary(final byte[] message,
final @Context UriInfo uriInfo, final @Context HttpHeaders headers) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + uriInfo.getAbsolutePath() + " as "+ MIMETYPE_BINARY);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + uriInfo.getAbsolutePath() + " as "+ MIMETYPE_BINARY);
}
return updateBinary(message, headers, true);
}
@@ -333,8 +333,8 @@ public class RowResource extends ResourceBase {
MIMETYPE_PROTOBUF_IETF})
public Response post(final CellSetModel model,
final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("POST " + uriInfo.getAbsolutePath()
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("POST " + uriInfo.getAbsolutePath()
+ " " + uriInfo.getQueryParameters());
}
return update(model, false);
@@ -344,16 +344,16 @@ public class RowResource extends ResourceBase {
@Consumes(MIMETYPE_BINARY)
public Response postBinary(final byte[] message,
final @Context UriInfo uriInfo, final @Context HttpHeaders headers) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("POST " + uriInfo.getAbsolutePath() + " as "+MIMETYPE_BINARY);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("POST " + uriInfo.getAbsolutePath() + " as "+MIMETYPE_BINARY);
}
return updateBinary(message, headers, false);
}
@DELETE
public Response delete(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("DELETE " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("DELETE " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
if (servlet.isReadOnly()) {
@@ -397,8 +397,8 @@ public class RowResource extends ResourceBase {
table = servlet.getTable(tableResource.getName());
table.delete(delete);
servlet.getMetrics().incrementSucessfulDeleteRequests(1);
- if (LOG.isDebugEnabled()) {
- LOG.debug("DELETE " + delete.toString());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("DELETE " + delete.toString());
}
} catch (Exception e) {
servlet.getMetrics().incrementFailedDeleteRequests(1);
@@ -407,7 +407,7 @@ public class RowResource extends ResourceBase {
if (table != null) try {
table.close();
} catch (IOException ioe) {
- LOG.debug(ioe);
+ LOG.debug("Exception received while closing the table", ioe);
}
}
return Response.ok().build();
@@ -499,8 +499,8 @@ public class RowResource extends ResourceBase {
.build();
}
- if (LOG.isDebugEnabled()) {
- LOG.debug("CHECK-AND-PUT " + put.toString() + ", returns " + retValue);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("CHECK-AND-PUT " + put.toString() + ", returns " + retValue);
}
if (!retValue) {
servlet.getMetrics().incrementFailedPutRequests(1);
@@ -517,7 +517,7 @@ public class RowResource extends ResourceBase {
} finally {
if (table != null) try {
table.close();
- } catch (IOException ioe) {
+ } catch (IOException ioe) {
LOG.debug("Exception received while closing the table", ioe);
}
}
@@ -627,8 +627,8 @@ public class RowResource extends ResourceBase {
.build();
}
- if (LOG.isDebugEnabled()) {
- LOG.debug("CHECK-AND-DELETE " + delete.toString() + ", returns "
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("CHECK-AND-DELETE " + delete.toString() + ", returns "
+ retValue);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/218259c0/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java
index ffb2fae..2469faa 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java
@@ -62,7 +62,7 @@ public class ScannerInstanceResource extends ResourceBase {
public ScannerInstanceResource() throws IOException { }
- public ScannerInstanceResource(String table, String id,
+ public ScannerInstanceResource(String table, String id,
ResultGenerator generator, int batch) throws IOException {
this.id = id;
this.generator = generator;
@@ -72,10 +72,10 @@ public class ScannerInstanceResource extends ResourceBase {
@GET
@Produces({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
- public Response get(final @Context UriInfo uriInfo,
+ public Response get(final @Context UriInfo uriInfo,
@QueryParam("n") int maxRows, final @QueryParam("c") int maxValues) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
if (generator == null) {
@@ -108,7 +108,9 @@ public class ScannerInstanceResource extends ResourceBase {
.build();
}
if (value == null) {
- LOG.info("generator exhausted");
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("generator exhausted");
+ }
// respond with 204 (No Content) if an empty cell set would be
// returned
if (count == limit) {
@@ -123,7 +125,7 @@ public class ScannerInstanceResource extends ResourceBase {
if (!Bytes.equals(CellUtil.cloneRow(value), rowKey)) {
// if maxRows was given as a query param, stop if we would exceed the
// specified number of rows
- if (maxRows > 0) {
+ if (maxRows > 0) {
if (--maxRows == 0) {
generator.putBack(value);
break;
@@ -134,7 +136,7 @@ public class ScannerInstanceResource extends ResourceBase {
rowModel = new RowModel(rowKey);
}
rowModel.addCell(
- new CellModel(CellUtil.cloneFamily(value), CellUtil.cloneQualifier(value),
+ new CellModel(CellUtil.cloneFamily(value), CellUtil.cloneQualifier(value),
value.getTimestamp(), CellUtil.cloneValue(value)));
} while (--count > 0);
model.addRow(rowModel);
@@ -147,21 +149,23 @@ public class ScannerInstanceResource extends ResourceBase {
@GET
@Produces(MIMETYPE_BINARY)
public Response getBinary(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath() + " as " +
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath() + " as " +
MIMETYPE_BINARY);
}
servlet.getMetrics().incrementRequests(1);
try {
Cell value = generator.next();
if (value == null) {
- LOG.info("generator exhausted");
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("generator exhausted");
+ }
return Response.noContent().build();
}
ResponseBuilder response = Response.ok(CellUtil.cloneValue(value));
response.cacheControl(cacheControl);
- response.header("X-Row", Base64.encodeBytes(CellUtil.cloneRow(value)));
- response.header("X-Column",
+ response.header("X-Row", Base64.encodeBytes(CellUtil.cloneRow(value)));
+ response.header("X-Column",
Base64.encodeBytes(
KeyValue.makeColumn(CellUtil.cloneFamily(value), CellUtil.cloneQualifier(value))));
response.header("X-Timestamp", value.getTimestamp());
@@ -182,8 +186,8 @@ public class ScannerInstanceResource extends ResourceBase {
@DELETE
public Response delete(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("DELETE " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("DELETE " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
if (servlet.isReadOnly()) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/218259c0/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java
index 6c424ce..71723d8 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java
@@ -31,7 +31,6 @@ import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.core.Context;
-import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriBuilder;
import javax.ws.rs.core.UriInfo;
@@ -91,8 +90,7 @@ public class ScannerResource extends ResourceBase {
spec = new RowSpec(model.getStartRow(), endRow, model.getColumns(), model.getStartTime(),
model.getEndTime(), model.getMaxVersions());
}
- MultivaluedMap<String, String> params = uriInfo.getQueryParameters();
-
+
try {
Filter filter = ScannerResultGenerator.buildFilterFromModel(model);
String tableName = tableResource.getName();
@@ -103,8 +101,8 @@ public class ScannerResource extends ResourceBase {
ScannerInstanceResource instance =
new ScannerInstanceResource(tableName, id, gen, model.getBatch());
scanners.put(id, instance);
- if (LOG.isDebugEnabled()) {
- LOG.debug("new scanner: " + id);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("new scanner: " + id);
}
UriBuilder builder = uriInfo.getAbsolutePathBuilder();
URI uri = builder.path(id).build();
@@ -130,10 +128,10 @@ public class ScannerResource extends ResourceBase {
@PUT
@Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
- public Response put(final ScannerModel model,
+ public Response put(final ScannerModel model,
final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + uriInfo.getAbsolutePath());
}
return update(model, true, uriInfo);
}
@@ -143,8 +141,8 @@ public class ScannerResource extends ResourceBase {
MIMETYPE_PROTOBUF_IETF})
public Response post(final ScannerModel model,
final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("POST " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("POST " + uriInfo.getAbsolutePath());
}
return update(model, false, uriInfo);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/218259c0/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java
index 9826b67..375643a 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java
@@ -86,8 +86,8 @@ public class SchemaResource extends ResourceBase {
@Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
public Response get(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
try {
@@ -99,7 +99,7 @@ public class SchemaResource extends ResourceBase {
} catch (Exception e) {
servlet.getMetrics().incrementFailedGetRequests(1);
return processException(e);
- }
+ }
}
private Response replace(final TableName name, final TableSchemaModel model,
@@ -198,10 +198,10 @@ public class SchemaResource extends ResourceBase {
@PUT
@Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
- public Response put(final TableSchemaModel model,
+ public Response put(final TableSchemaModel model,
final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
return update(model, true, uriInfo);
@@ -210,10 +210,10 @@ public class SchemaResource extends ResourceBase {
@POST
@Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
- public Response post(final TableSchemaModel model,
+ public Response post(final TableSchemaModel model,
final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
return update(model, false, uriInfo);
@@ -223,8 +223,8 @@ public class SchemaResource extends ResourceBase {
justification="Expected")
@DELETE
public Response delete(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("DELETE " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("DELETE " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
if (servlet.isReadOnly()) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/218259c0/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java
index a7e52bd..27977c3 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java
@@ -63,8 +63,8 @@ public class StorageClusterStatusResource extends ResourceBase {
@Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
public Response get(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
try {
http://git-wip-us.apache.org/repos/asf/hbase/blob/218259c0/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java
index 85e81f8..b9fb5d4 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java
@@ -58,8 +58,8 @@ public class StorageClusterVersionResource extends ResourceBase {
@GET
@Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON})
public Response get(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
try {
http://git-wip-us.apache.org/repos/asf/hbase/blob/218259c0/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
index f87ef7e..2487a35 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
@@ -133,7 +133,7 @@ public class TableResource extends ResourceBase {
@DefaultValue("-1") @QueryParam(Constants.SCAN_BATCH_SIZE) int batchSize,
@DefaultValue("0") @QueryParam(Constants.SCAN_START_TIME) long startTime,
@DefaultValue(Long.MAX_VALUE + "") @QueryParam(Constants.SCAN_END_TIME) long endTime,
- @DefaultValue("true") @QueryParam(Constants.SCAN_BATCH_SIZE) boolean cacheBlocks,
+ @DefaultValue("true") @QueryParam(Constants.SCAN_BATCH_SIZE) boolean cacheBlocks,
@DefaultValue("") @QueryParam(Constants.SCAN_FILTER) String filters) {
try {
Filter filter = null;
@@ -146,10 +146,12 @@ public class TableResource extends ResourceBase {
tableScan.setStartRow(prefixBytes);
}
}
- LOG.debug("Query parameters : Table Name = > " + this.table + " Start Row => " + startRow
- + " End Row => " + endRow + " Columns => " + column + " Start Time => " + startTime
- + " End Time => " + endTime + " Cache Blocks => " + cacheBlocks + " Max Versions => "
- + maxVersions + " Batch Size => " + batchSize);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Query parameters : Table Name = > " + this.table + " Start Row => " + startRow
+ + " End Row => " + endRow + " Columns => " + column + " Start Time => " + startTime
+ + " End Time => " + endTime + " Cache Blocks => " + cacheBlocks + " Max Versions => "
+ + maxVersions + " Batch Size => " + batchSize);
+ }
Table hTable = RESTServlet.getInstance().getTable(this.table);
tableScan.setBatch(batchSize);
tableScan.setMaxVersions(maxVersions);
@@ -162,15 +164,21 @@ public class TableResource extends ResourceBase {
String[] familysplit = csplit.trim().split(":");
if (familysplit.length == 2) {
if (familysplit[1].length() > 0) {
- LOG.debug("Scan family and column : " + familysplit[0] + " " + familysplit[1]);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Scan family and column : " + familysplit[0] + " " + familysplit[1]);
+ }
tableScan.addColumn(Bytes.toBytes(familysplit[0]), Bytes.toBytes(familysplit[1]));
} else {
tableScan.addFamily(Bytes.toBytes(familysplit[0]));
- LOG.debug("Scan family : " + familysplit[0] + " and empty qualifier.");
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Scan family : " + familysplit[0] + " and empty qualifier.");
+ }
tableScan.addColumn(Bytes.toBytes(familysplit[0]), null);
}
- } else if (StringUtils.isNotEmpty(familysplit[0])){
- LOG.debug("Scan family : " + familysplit[0]);
+ } else if (StringUtils.isNotEmpty(familysplit[0])) {
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Scan family : " + familysplit[0]);
+ }
tableScan.addFamily(Bytes.toBytes(familysplit[0]));
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/218259c0/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java
index ae93825..172246c 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java
@@ -68,15 +68,15 @@ public class VersionResource extends ResourceBase {
* Build a response for a version request.
* @param context servlet context
* @param uriInfo (JAX-RS context variable) request URL
- * @return a response for a version request
+ * @return a response for a version request
*/
@GET
@Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
- public Response get(final @Context ServletContext context,
+ public Response get(final @Context ServletContext context,
final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
ResponseBuilder response = Response.ok(new VersionModel(context));
@@ -89,7 +89,7 @@ public class VersionResource extends ResourceBase {
* Dispatch to StorageClusterVersionResource
*/
@Path("cluster")
- public StorageClusterVersionResource getClusterVersionResource()
+ public StorageClusterVersionResource getClusterVersionResource()
throws IOException {
return new StorageClusterVersionResource();
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/218259c0/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java
index ebedf57..f511e03 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java
@@ -101,10 +101,10 @@ public class Client {
}
/**
- * Shut down the client. Close any open persistent connections.
+ * Shut down the client. Close any open persistent connections.
*/
public void shutdown() {
- MultiThreadedHttpConnectionManager manager =
+ MultiThreadedHttpConnectionManager manager =
(MultiThreadedHttpConnectionManager) httpClient.getHttpConnectionManager();
manager.shutdown();
}
@@ -151,7 +151,7 @@ public class Client {
* one of the members of the supplied cluster definition and iterate through
* the list until a transaction can be successfully completed. The
* definition of success here is a complete HTTP transaction, irrespective
- * of result code.
+ * of result code.
* @param cluster the cluster definition
* @param method the transaction method
* @param headers HTTP header values to send
@@ -209,8 +209,8 @@ public class Client {
long startTime = System.currentTimeMillis();
int code = httpClient.executeMethod(method);
long endTime = System.currentTimeMillis();
- if (LOG.isDebugEnabled()) {
- LOG.debug(method.getName() + " " + uri + " " + code + " " +
+ if (LOG.isTraceEnabled()) {
+ LOG.trace(method.getName() + " " + uri + " " + code + " " +
method.getStatusText() + " in " + (endTime - startTime) + " ms");
}
return code;
@@ -250,7 +250,7 @@ public class Client {
}
/**
- * Send a HEAD request
+ * Send a HEAD request
* @param path the path or URI
* @return a Response object with response detail
* @throws IOException
@@ -260,14 +260,14 @@ public class Client {
}
/**
- * Send a HEAD request
+ * Send a HEAD request
* @param cluster the cluster definition
* @param path the path or URI
* @param headers the HTTP headers to include in the request
* @return a Response object with response detail
* @throws IOException
*/
- public Response head(Cluster cluster, String path, Header[] headers)
+ public Response head(Cluster cluster, String path, Header[] headers)
throws IOException {
HeadMethod method = new HeadMethod();
try {
@@ -280,7 +280,7 @@ public class Client {
}
/**
- * Send a GET request
+ * Send a GET request
* @param path the path or URI
* @return a Response object with response detail
* @throws IOException
@@ -290,7 +290,7 @@ public class Client {
}
/**
- * Send a GET request
+ * Send a GET request
* @param cluster the cluster definition
* @param path the path or URI
* @return a Response object with response detail
@@ -301,7 +301,7 @@ public class Client {
}
/**
- * Send a GET request
+ * Send a GET request
* @param path the path or URI
* @param accept Accept header value
* @return a Response object with response detail
@@ -312,7 +312,7 @@ public class Client {
}
/**
- * Send a GET request
+ * Send a GET request
* @param cluster the cluster definition
* @param path the path or URI
* @param accept Accept header value
@@ -329,7 +329,7 @@ public class Client {
/**
* Send a GET request
* @param path the path or URI
- * @param headers the HTTP headers to include in the request,
+ * @param headers the HTTP headers to include in the request,
* <tt>Accept</tt> must be supplied
* @return a Response object with response detail
* @throws IOException
@@ -346,7 +346,7 @@ public class Client {
* @return a Response object with response detail
* @throws IOException
*/
- public Response get(Cluster c, String path, Header[] headers)
+ public Response get(Cluster c, String path, Header[] headers)
throws IOException {
GetMethod method = new GetMethod();
try {
@@ -382,7 +382,7 @@ public class Client {
* @return a Response object with response detail
* @throws IOException
*/
- public Response put(Cluster cluster, String path, String contentType,
+ public Response put(Cluster cluster, String path, String contentType,
byte[] content) throws IOException {
Header[] headers = new Header[1];
headers[0] = new Header("Content-Type", contentType);
@@ -398,7 +398,7 @@ public class Client {
* @return a Response object with response detail
* @throws IOException
*/
- public Response put(String path, Header[] headers, byte[] content)
+ public Response put(String path, Header[] headers, byte[] content)
throws IOException {
return put(cluster, path, headers, content);
}
@@ -413,7 +413,7 @@ public class Client {
* @return a Response object with response detail
* @throws IOException
*/
- public Response put(Cluster cluster, String path, Header[] headers,
+ public Response put(Cluster cluster, String path, Header[] headers,
byte[] content) throws IOException {
PutMethod method = new PutMethod();
try {
@@ -449,7 +449,7 @@ public class Client {
* @return a Response object with response detail
* @throws IOException
*/
- public Response post(Cluster cluster, String path, String contentType,
+ public Response post(Cluster cluster, String path, String contentType,
byte[] content) throws IOException {
Header[] headers = new Header[1];
headers[0] = new Header("Content-Type", contentType);
@@ -465,7 +465,7 @@ public class Client {
* @return a Response object with response detail
* @throws IOException
*/
- public Response post(String path, Header[] headers, byte[] content)
+ public Response post(String path, Header[] headers, byte[] content)
throws IOException {
return post(cluster, path, headers, content);
}
@@ -480,7 +480,7 @@ public class Client {
* @return a Response object with response detail
* @throws IOException
*/
- public Response post(Cluster cluster, String path, Header[] headers,
+ public Response post(Cluster cluster, String path, Header[] headers,
byte[] content) throws IOException {
PostMethod method = new PostMethod();
try {
http://git-wip-us.apache.org/repos/asf/hbase/blob/218259c0/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java
index e5208af..f051bc8 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java
@@ -72,7 +72,9 @@ public class AuthFilter extends AuthenticationFilter {
throw new ServletException("Failed to retrieve server principal", ie);
}
}
- LOG.debug("Setting property " + name + "=" + value);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Setting property " + name + "=" + value);
+ }
name = name.substring(REST_PREFIX_LEN);
props.setProperty(name, value);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/218259c0/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java
new file mode 100644
index 0000000..dbb1447
--- /dev/null
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java
@@ -0,0 +1,286 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.rest.filter;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.conf.Configuration;
+
+/**
+ * This filter provides protection against cross site request forgery (CSRF)
+ * attacks for REST APIs. Enabling this filter on an endpoint results in the
+ * requirement of all client to send a particular (configurable) HTTP header
+ * with every request. In the absense of this header the filter will reject the
+ * attempt as a bad request.
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
+public class RestCsrfPreventionFilter implements Filter {
+
+ private static final Log LOG =
+ LogFactory.getLog(RestCsrfPreventionFilter.class);
+
+ public static final String HEADER_USER_AGENT = "User-Agent";
+ public static final String BROWSER_USER_AGENT_PARAM =
+ "browser-useragents-regex";
+ public static final String CUSTOM_HEADER_PARAM = "custom-header";
+ public static final String CUSTOM_METHODS_TO_IGNORE_PARAM =
+ "methods-to-ignore";
+ static final String BROWSER_USER_AGENTS_DEFAULT = "^Mozilla.*,^Opera.*";
+ public static final String HEADER_DEFAULT = "X-XSRF-HEADER";
+ static final String METHODS_TO_IGNORE_DEFAULT = "GET,OPTIONS,HEAD,TRACE";
+ private String headerName = HEADER_DEFAULT;
+ private Set<String> methodsToIgnore = null;
+ private Set<Pattern> browserUserAgents;
+
+ @Override
+ public void init(FilterConfig filterConfig) throws ServletException {
+ String customHeader = filterConfig.getInitParameter(CUSTOM_HEADER_PARAM);
+ if (customHeader != null) {
+ headerName = customHeader;
+ }
+ String customMethodsToIgnore =
+ filterConfig.getInitParameter(CUSTOM_METHODS_TO_IGNORE_PARAM);
+ if (customMethodsToIgnore != null) {
+ parseMethodsToIgnore(customMethodsToIgnore);
+ } else {
+ parseMethodsToIgnore(METHODS_TO_IGNORE_DEFAULT);
+ }
+
+ String agents = filterConfig.getInitParameter(BROWSER_USER_AGENT_PARAM);
+ if (agents == null) {
+ agents = BROWSER_USER_AGENTS_DEFAULT;
+ }
+ parseBrowserUserAgents(agents);
+ LOG.info(String.format("Adding cross-site request forgery (CSRF) protection, "
+ + "headerName = %s, methodsToIgnore = %s, browserUserAgents = %s",
+ headerName, methodsToIgnore, browserUserAgents));
+ }
+
+ void parseBrowserUserAgents(String userAgents) {
+ String[] agentsArray = userAgents.split(",");
+ browserUserAgents = new HashSet<Pattern>();
+ for (String patternString : agentsArray) {
+ browserUserAgents.add(Pattern.compile(patternString));
+ }
+ }
+
+ void parseMethodsToIgnore(String mti) {
+ String[] methods = mti.split(",");
+ methodsToIgnore = new HashSet<String>();
+ for (int i = 0; i < methods.length; i++) {
+ methodsToIgnore.add(methods[i]);
+ }
+ }
+
+ /**
+ * This method interrogates the User-Agent String and returns whether it
+ * refers to a browser. If its not a browser, then the requirement for the
+ * CSRF header will not be enforced; if it is a browser, the requirement will
+ * be enforced.
+ * <p>
+ * A User-Agent String is considered to be a browser if it matches
+ * any of the regex patterns from browser-useragent-regex; the default
+ * behavior is to consider everything a browser that matches the following:
+ * "^Mozilla.*,^Opera.*". Subclasses can optionally override
+ * this method to use different behavior.
+ *
+ * @param userAgent The User-Agent String, or null if there isn't one
+ * @return true if the User-Agent String refers to a browser, false if not
+ */
+ protected boolean isBrowser(String userAgent) {
+ if (userAgent == null) {
+ return false;
+ }
+ for (Pattern pattern : browserUserAgents) {
+ Matcher matcher = pattern.matcher(userAgent);
+ if (matcher.matches()) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ /**
+ * Defines the minimal API requirements for the filter to execute its
+ * filtering logic. This interface exists to facilitate integration in
+ * components that do not run within a servlet container and therefore cannot
+ * rely on a servlet container to dispatch to the {@link #doFilter} method.
+ * Applications that do run inside a servlet container will not need to write
+ * code that uses this interface. Instead, they can use typical servlet
+ * container configuration mechanisms to insert the filter.
+ */
+ public interface HttpInteraction {
+
+ /**
+ * Returns the value of a header.
+ *
+ * @param header name of header
+ * @return value of header
+ */
+ String getHeader(String header);
+
+ /**
+ * Returns the method.
+ *
+ * @return method
+ */
+ String getMethod();
+
+ /**
+ * Called by the filter after it decides that the request may proceed.
+ *
+ * @throws IOException if there is an I/O error
+ * @throws ServletException if the implementation relies on the servlet API
+ * and a servlet API call has failed
+ */
+ void proceed() throws IOException, ServletException;
+
+ /**
+ * Called by the filter after it decides that the request is a potential
+ * CSRF attack and therefore must be rejected.
+ *
+ * @param code status code to send
+ * @param message response message
+ * @throws IOException if there is an I/O error
+ */
+ void sendError(int code, String message) throws IOException;
+ }
+
+ /**
+ * Handles an {@link HttpInteraction} by applying the filtering logic.
+ *
+ * @param httpInteraction caller's HTTP interaction
+ * @throws IOException if there is an I/O error
+ * @throws ServletException if the implementation relies on the servlet API
+ * and a servlet API call has failed
+ */
+ public void handleHttpInteraction(HttpInteraction httpInteraction)
+ throws IOException, ServletException {
+ if (!isBrowser(httpInteraction.getHeader(HEADER_USER_AGENT)) ||
+ methodsToIgnore.contains(httpInteraction.getMethod()) ||
+ httpInteraction.getHeader(headerName) != null) {
+ httpInteraction.proceed();
+ } else {
+ httpInteraction.sendError(HttpServletResponse.SC_BAD_REQUEST,
+ "Missing Required Header for CSRF Vulnerability Protection");
+ }
+ }
+
+ @Override
+ public void doFilter(ServletRequest request, ServletResponse response,
+ final FilterChain chain) throws IOException, ServletException {
+ final HttpServletRequest httpRequest = (HttpServletRequest)request;
+ final HttpServletResponse httpResponse = (HttpServletResponse)response;
+ handleHttpInteraction(new ServletFilterHttpInteraction(httpRequest,
+ httpResponse, chain));
+ }
+
+ @Override
+ public void destroy() {
+ }
+
+ /**
+ * Constructs a mapping of configuration properties to be used for filter
+ * initialization. The mapping includes all properties that start with the
+ * specified configuration prefix. Property names in the mapping are trimmed
+ * to remove the configuration prefix.
+ *
+ * @param conf configuration to read
+ * @param confPrefix configuration prefix
+ * @return mapping of configuration properties to be used for filter
+ * initialization
+ */
+ public static Map<String, String> getFilterParams(Configuration conf,
+ String confPrefix) {
+ Map<String, String> filterConfigMap = new HashMap<>();
+ for (Map.Entry<String, String> entry : conf) {
+ String name = entry.getKey();
+ if (name.startsWith(confPrefix)) {
+ String value = conf.get(name);
+ name = name.substring(confPrefix.length());
+ filterConfigMap.put(name, value);
+ }
+ }
+ return filterConfigMap;
+ }
+
+ /**
+ * {@link HttpInteraction} implementation for use in the servlet filter.
+ */
+ private static final class ServletFilterHttpInteraction
+ implements HttpInteraction {
+
+ private final FilterChain chain;
+ private final HttpServletRequest httpRequest;
+ private final HttpServletResponse httpResponse;
+
+ /**
+ * Creates a new ServletFilterHttpInteraction.
+ *
+ * @param httpRequest request to process
+ * @param httpResponse response to process
+ * @param chain filter chain to forward to if HTTP interaction is allowed
+ */
+ public ServletFilterHttpInteraction(HttpServletRequest httpRequest,
+ HttpServletResponse httpResponse, FilterChain chain) {
+ this.httpRequest = httpRequest;
+ this.httpResponse = httpResponse;
+ this.chain = chain;
+ }
+
+ @Override
+ public String getHeader(String header) {
+ return httpRequest.getHeader(header);
+ }
+
+ @Override
+ public String getMethod() {
+ return httpRequest.getMethod();
+ }
+
+ @Override
+ public void proceed() throws IOException, ServletException {
+ chain.doFilter(httpRequest, httpResponse);
+ }
+
+ @Override
+ public void sendError(int code, String message) throws IOException {
+ httpResponse.sendError(code, message);
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/hbase/blob/218259c0/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java
index ec39db0..073c038 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java
@@ -45,7 +45,7 @@ import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
@Provider
@Consumes({Constants.MIMETYPE_PROTOBUF, Constants.MIMETYPE_PROTOBUF_IETF})
@InterfaceAudience.Private
-public class ProtobufMessageBodyConsumer
+public class ProtobufMessageBodyConsumer
implements MessageBodyReader<ProtobufMessageHandler> {
private static final Log LOG =
LogFactory.getLog(ProtobufMessageBodyConsumer.class);
@@ -73,8 +73,8 @@ public class ProtobufMessageBodyConsumer
baos.write(buffer, 0, read);
}
} while (read > 0);
- if (LOG.isDebugEnabled()) {
- LOG.debug(getClass() + ": read " + baos.size() + " bytes from " +
+ if (LOG.isTraceEnabled()) {
+ LOG.trace(getClass() + ": read " + baos.size() + " bytes from " +
inputStream);
}
obj = obj.getObjectFromMessage(baos.toByteArray());
http://git-wip-us.apache.org/repos/asf/hbase/blob/218259c0/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java
index ccd59d4..1475879 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java
@@ -23,6 +23,7 @@ import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.locks.Lock;
+import org.apache.commons.logging.Log;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ChoreService;
import org.apache.hadoop.hbase.ScheduledChore;
@@ -37,7 +38,7 @@ import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.UserProvider;
import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.log4j.Logger;
+import org.apache.commons.logging.LogFactory;
/**
* A utility to store user specific HConnections in memory.
@@ -47,7 +48,7 @@ import org.apache.log4j.Logger;
*/
@InterfaceAudience.Private
public class ConnectionCache {
- private static Logger LOG = Logger.getLogger(ConnectionCache.class);
+ private static final Log LOG = LogFactory.getLog(ConnectionCache.class);
private final Map<String, ConnectionInfo>
connections = new ConcurrentHashMap<String, ConnectionInfo>();
@@ -60,6 +61,7 @@ public class ConnectionCache {
private final ThreadLocal<String> effectiveUserNames =
new ThreadLocal<String>() {
+ @Override
protected String initialValue() {
return realUserName;
}