You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by en...@apache.org on 2016/06/06 22:32:18 UTC
[2/5] hbase git commit: HBASE-15954 REST server should log requests
with TRACE instead of DEBUG
HBASE-15954 REST server should log requests with TRACE instead of DEBUG
Conflicts:
hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/4a0a9a20
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/4a0a9a20
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/4a0a9a20
Branch: refs/heads/branch-1
Commit: 4a0a9a20dd5bbfdafe2ec95196b449d2e1a45a13
Parents: 878b1ea
Author: Enis Soztutar <en...@apache.org>
Authored: Mon Jun 6 10:58:37 2016 -0700
Committer: Enis Soztutar <en...@apache.org>
Committed: Mon Jun 6 11:06:52 2016 -0700
----------------------------------------------------------------------
.../hadoop/hbase/rest/MultiRowResource.java | 4 +-
.../hbase/rest/NamespacesInstanceResource.java | 24 ++++-----
.../hadoop/hbase/rest/NamespacesResource.java | 4 +-
.../hbase/rest/ProtobufStreamingUtil.java | 10 ++--
.../apache/hadoop/hbase/rest/RESTServer.java | 18 ++++---
.../apache/hadoop/hbase/rest/RESTServlet.java | 5 +-
.../hadoop/hbase/rest/RegionsResource.java | 4 +-
.../apache/hadoop/hbase/rest/RootResource.java | 4 +-
.../apache/hadoop/hbase/rest/RowResource.java | 54 ++++++++++----------
.../hbase/rest/ScannerInstanceResource.java | 32 +++++++-----
.../hadoop/hbase/rest/ScannerResource.java | 18 +++----
.../hadoop/hbase/rest/SchemaResource.java | 22 ++++----
.../rest/StorageClusterStatusResource.java | 4 +-
.../rest/StorageClusterVersionResource.java | 4 +-
.../apache/hadoop/hbase/rest/TableResource.java | 26 ++++++----
.../hadoop/hbase/rest/VersionResource.java | 10 ++--
.../apache/hadoop/hbase/rest/client/Client.java | 44 ++++++++--------
.../hadoop/hbase/rest/filter/AuthFilter.java | 4 +-
.../rest/filter/RestCsrfPreventionFilter.java | 15 +++---
.../consumer/ProtobufMessageBodyConsumer.java | 6 +--
.../hadoop/hbase/util/ConnectionCache.java | 6 ++-
21 files changed, 171 insertions(+), 147 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/4a0a9a20/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java
index c88ac91..8ff3ef6 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java
@@ -86,7 +86,9 @@ public class MultiRowResource extends ResourceBase implements Constants {
}
model.addRow(rowModel);
} else {
- LOG.trace("The row : " + rk + " not found in the table.");
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("The row : " + rk + " not found in the table.");
+ }
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/4a0a9a20/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesInstanceResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesInstanceResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesInstanceResource.java
index 8f64738..c832905 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesInstanceResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesInstanceResource.java
@@ -91,8 +91,8 @@ public class NamespacesInstanceResource extends ResourceBase {
MIMETYPE_PROTOBUF_IETF})
public Response get(final @Context ServletContext context,
final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
@@ -135,8 +135,8 @@ public class NamespacesInstanceResource extends ResourceBase {
@Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
public Response put(final NamespacesInstanceModel model, final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
return processUpdate(model, true, uriInfo);
@@ -151,8 +151,8 @@ public class NamespacesInstanceResource extends ResourceBase {
@PUT
public Response putNoBody(final byte[] message,
final @Context UriInfo uriInfo, final @Context HttpHeaders headers) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
try{
@@ -176,8 +176,8 @@ public class NamespacesInstanceResource extends ResourceBase {
public Response post(final NamespacesInstanceModel model,
final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("POST " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("POST " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
return processUpdate(model, false, uriInfo);
@@ -192,8 +192,8 @@ public class NamespacesInstanceResource extends ResourceBase {
@POST
public Response postNoBody(final byte[] message,
final @Context UriInfo uriInfo, final @Context HttpHeaders headers) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("POST " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("POST " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
try{
@@ -287,8 +287,8 @@ public class NamespacesInstanceResource extends ResourceBase {
@DELETE
public Response deleteNoBody(final byte[] message,
final @Context UriInfo uriInfo, final @Context HttpHeaders headers) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("DELETE " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("DELETE " + uriInfo.getAbsolutePath());
}
if (servlet.isReadOnly()) {
servlet.getMetrics().incrementFailedDeleteRequests(1);
http://git-wip-us.apache.org/repos/asf/hbase/blob/4a0a9a20/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesResource.java
index 0548fe8..1304fe0 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesResource.java
@@ -64,8 +64,8 @@ public class NamespacesResource extends ResourceBase {
@Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
public Response get(final @Context ServletContext context, final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
try {
http://git-wip-us.apache.org/repos/asf/hbase/blob/4a0a9a20/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingUtil.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingUtil.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingUtil.java
index 93bb940..cb0f4c8 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingUtil.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingUtil.java
@@ -49,8 +49,10 @@ public class ProtobufStreamingUtil implements StreamingOutput {
this.contentType = type;
this.limit = limit;
this.fetchSize = fetchSize;
- LOG.debug("Created ScanStreamingUtil with content type = " + this.contentType + " user limit : "
- + this.limit + " scan fetch size : " + this.fetchSize);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Created ScanStreamingUtil with content type = " + this.contentType
+ + " user limit : " + this.limit + " scan fetch size : " + this.fetchSize);
+ }
}
@Override
@@ -82,7 +84,9 @@ public class ProtobufStreamingUtil implements StreamingOutput {
outStream.write(Bytes.toBytes((short)objectBytes.length));
outStream.write(objectBytes);
outStream.flush();
- LOG.trace("Wrote " + model.getRows().size() + " rows to stream successfully.");
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Wrote " + model.getRows().size() + " rows to stream successfully.");
+ }
}
private CellSetModel createModelFromResults(Result[] results) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/4a0a9a20/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
index 9dac84a..cb37fb5 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
@@ -167,23 +167,27 @@ public class RESTServer implements Constants {
// check for user-defined port setting, if so override the conf
if (commandLine != null && commandLine.hasOption("port")) {
String val = commandLine.getOptionValue("port");
- servlet.getConfiguration()
- .setInt("hbase.rest.port", Integer.valueOf(val));
- LOG.debug("port set to " + val);
+ servlet.getConfiguration().setInt("hbase.rest.port", Integer.parseInt(val));
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("port set to " + val);
+ }
}
// check if server should only process GET requests, if so override the conf
if (commandLine != null && commandLine.hasOption("readonly")) {
servlet.getConfiguration().setBoolean("hbase.rest.readonly", true);
- LOG.debug("readonly set to true");
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("readonly set to true");
+ }
}
// check for user-defined info server port setting, if so override the conf
if (commandLine != null && commandLine.hasOption("infoport")) {
String val = commandLine.getOptionValue("infoport");
- servlet.getConfiguration()
- .setInt("hbase.rest.info.port", Integer.valueOf(val));
- LOG.debug("Web UI port set to " + val);
+ servlet.getConfiguration().setInt("hbase.rest.info.port", Integer.parseInt(val));
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Web UI port set to " + val);
+ }
}
@SuppressWarnings("unchecked")
http://git-wip-us.apache.org/repos/asf/hbase/blob/4a0a9a20/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java
index 4da5c67..411ced8 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java
@@ -20,6 +20,8 @@ package org.apache.hadoop.hbase.rest;
import java.io.IOException;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.client.Admin;
@@ -30,14 +32,13 @@ import org.apache.hadoop.hbase.util.ConnectionCache;
import org.apache.hadoop.hbase.util.JvmPauseMonitor;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.ProxyUsers;
-import org.apache.log4j.Logger;
/**
* Singleton class encapsulating global REST servlet state and functions.
*/
@InterfaceAudience.Private
public class RESTServlet implements Constants {
- private static final Logger LOG = Logger.getLogger(RESTServlet.class);
+ private static final Log LOG = LogFactory.getLog(RESTServlet.class);
private static RESTServlet INSTANCE;
private final Configuration conf;
private final MetricsREST metrics;
http://git-wip-us.apache.org/repos/asf/hbase/blob/4a0a9a20/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java
index 001c6b5..100dfd5 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java
@@ -71,8 +71,8 @@ public class RegionsResource extends ResourceBase {
@Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
public Response get(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
try {
http://git-wip-us.apache.org/repos/asf/hbase/blob/4a0a9a20/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java
index c08bb8b..fc4c548 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java
@@ -72,8 +72,8 @@ public class RootResource extends ResourceBase {
@Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
public Response get(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
try {
http://git-wip-us.apache.org/repos/asf/hbase/blob/4a0a9a20/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
index bd1ea24..4d50c54 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
@@ -85,8 +85,8 @@ public class RowResource extends ResourceBase {
@Produces({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
public Response get(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
MultivaluedMap<String, String> params = uriInfo.getQueryParameters();
@@ -130,8 +130,8 @@ public class RowResource extends ResourceBase {
@GET
@Produces(MIMETYPE_BINARY)
public Response getBinary(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath() + " as "+ MIMETYPE_BINARY);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath() + " as "+ MIMETYPE_BINARY);
}
servlet.getMetrics().incrementRequests(1);
// doesn't make sense to use a non specific coordinate as this can only
@@ -221,8 +221,8 @@ public class RowResource extends ResourceBase {
put.addImmutable(parts[0], parts[1], cell.getTimestamp(), cell.getValue());
}
puts.add(put);
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + put.toString());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + put.toString());
}
}
table = servlet.getTable(tableResource.getName());
@@ -289,8 +289,8 @@ public class RowResource extends ResourceBase {
put.addImmutable(parts[0], parts[1], timestamp, message);
table = servlet.getTable(tableResource.getName());
table.put(put);
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + put.toString());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + put.toString());
}
servlet.getMetrics().incrementSucessfulPutRequests(1);
return Response.ok().build();
@@ -301,7 +301,7 @@ public class RowResource extends ResourceBase {
if (table != null) try {
table.close();
} catch (IOException ioe) {
- LOG.debug(ioe);
+ LOG.debug("Exception received while closing the table", ioe);
}
}
}
@@ -311,8 +311,8 @@ public class RowResource extends ResourceBase {
MIMETYPE_PROTOBUF_IETF})
public Response put(final CellSetModel model,
final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + uriInfo.getAbsolutePath()
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + uriInfo.getAbsolutePath()
+ " " + uriInfo.getQueryParameters());
}
return update(model, true);
@@ -322,8 +322,8 @@ public class RowResource extends ResourceBase {
@Consumes(MIMETYPE_BINARY)
public Response putBinary(final byte[] message,
final @Context UriInfo uriInfo, final @Context HttpHeaders headers) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + uriInfo.getAbsolutePath() + " as "+ MIMETYPE_BINARY);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + uriInfo.getAbsolutePath() + " as "+ MIMETYPE_BINARY);
}
return updateBinary(message, headers, true);
}
@@ -333,8 +333,8 @@ public class RowResource extends ResourceBase {
MIMETYPE_PROTOBUF_IETF})
public Response post(final CellSetModel model,
final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("POST " + uriInfo.getAbsolutePath()
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("POST " + uriInfo.getAbsolutePath()
+ " " + uriInfo.getQueryParameters());
}
return update(model, false);
@@ -344,16 +344,16 @@ public class RowResource extends ResourceBase {
@Consumes(MIMETYPE_BINARY)
public Response postBinary(final byte[] message,
final @Context UriInfo uriInfo, final @Context HttpHeaders headers) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("POST " + uriInfo.getAbsolutePath() + " as "+MIMETYPE_BINARY);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("POST " + uriInfo.getAbsolutePath() + " as "+MIMETYPE_BINARY);
}
return updateBinary(message, headers, false);
}
@DELETE
public Response delete(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("DELETE " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("DELETE " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
if (servlet.isReadOnly()) {
@@ -397,8 +397,8 @@ public class RowResource extends ResourceBase {
table = servlet.getTable(tableResource.getName());
table.delete(delete);
servlet.getMetrics().incrementSucessfulDeleteRequests(1);
- if (LOG.isDebugEnabled()) {
- LOG.debug("DELETE " + delete.toString());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("DELETE " + delete.toString());
}
} catch (Exception e) {
servlet.getMetrics().incrementFailedDeleteRequests(1);
@@ -407,7 +407,7 @@ public class RowResource extends ResourceBase {
if (table != null) try {
table.close();
} catch (IOException ioe) {
- LOG.debug(ioe);
+ LOG.debug("Exception received while closing the table", ioe);
}
}
return Response.ok().build();
@@ -499,8 +499,8 @@ public class RowResource extends ResourceBase {
.build();
}
- if (LOG.isDebugEnabled()) {
- LOG.debug("CHECK-AND-PUT " + put.toString() + ", returns " + retValue);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("CHECK-AND-PUT " + put.toString() + ", returns " + retValue);
}
if (!retValue) {
servlet.getMetrics().incrementFailedPutRequests(1);
@@ -517,7 +517,7 @@ public class RowResource extends ResourceBase {
} finally {
if (table != null) try {
table.close();
- } catch (IOException ioe) {
+ } catch (IOException ioe) {
LOG.debug("Exception received while closing the table", ioe);
}
}
@@ -627,8 +627,8 @@ public class RowResource extends ResourceBase {
.build();
}
- if (LOG.isDebugEnabled()) {
- LOG.debug("CHECK-AND-DELETE " + delete.toString() + ", returns "
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("CHECK-AND-DELETE " + delete.toString() + ", returns "
+ retValue);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/4a0a9a20/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java
index ffb2fae..2469faa 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java
@@ -62,7 +62,7 @@ public class ScannerInstanceResource extends ResourceBase {
public ScannerInstanceResource() throws IOException { }
- public ScannerInstanceResource(String table, String id,
+ public ScannerInstanceResource(String table, String id,
ResultGenerator generator, int batch) throws IOException {
this.id = id;
this.generator = generator;
@@ -72,10 +72,10 @@ public class ScannerInstanceResource extends ResourceBase {
@GET
@Produces({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
- public Response get(final @Context UriInfo uriInfo,
+ public Response get(final @Context UriInfo uriInfo,
@QueryParam("n") int maxRows, final @QueryParam("c") int maxValues) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
if (generator == null) {
@@ -108,7 +108,9 @@ public class ScannerInstanceResource extends ResourceBase {
.build();
}
if (value == null) {
- LOG.info("generator exhausted");
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("generator exhausted");
+ }
// respond with 204 (No Content) if an empty cell set would be
// returned
if (count == limit) {
@@ -123,7 +125,7 @@ public class ScannerInstanceResource extends ResourceBase {
if (!Bytes.equals(CellUtil.cloneRow(value), rowKey)) {
// if maxRows was given as a query param, stop if we would exceed the
// specified number of rows
- if (maxRows > 0) {
+ if (maxRows > 0) {
if (--maxRows == 0) {
generator.putBack(value);
break;
@@ -134,7 +136,7 @@ public class ScannerInstanceResource extends ResourceBase {
rowModel = new RowModel(rowKey);
}
rowModel.addCell(
- new CellModel(CellUtil.cloneFamily(value), CellUtil.cloneQualifier(value),
+ new CellModel(CellUtil.cloneFamily(value), CellUtil.cloneQualifier(value),
value.getTimestamp(), CellUtil.cloneValue(value)));
} while (--count > 0);
model.addRow(rowModel);
@@ -147,21 +149,23 @@ public class ScannerInstanceResource extends ResourceBase {
@GET
@Produces(MIMETYPE_BINARY)
public Response getBinary(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath() + " as " +
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath() + " as " +
MIMETYPE_BINARY);
}
servlet.getMetrics().incrementRequests(1);
try {
Cell value = generator.next();
if (value == null) {
- LOG.info("generator exhausted");
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("generator exhausted");
+ }
return Response.noContent().build();
}
ResponseBuilder response = Response.ok(CellUtil.cloneValue(value));
response.cacheControl(cacheControl);
- response.header("X-Row", Base64.encodeBytes(CellUtil.cloneRow(value)));
- response.header("X-Column",
+ response.header("X-Row", Base64.encodeBytes(CellUtil.cloneRow(value)));
+ response.header("X-Column",
Base64.encodeBytes(
KeyValue.makeColumn(CellUtil.cloneFamily(value), CellUtil.cloneQualifier(value))));
response.header("X-Timestamp", value.getTimestamp());
@@ -182,8 +186,8 @@ public class ScannerInstanceResource extends ResourceBase {
@DELETE
public Response delete(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("DELETE " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("DELETE " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
if (servlet.isReadOnly()) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/4a0a9a20/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java
index 6c424ce..71723d8 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java
@@ -31,7 +31,6 @@ import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.core.Context;
-import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriBuilder;
import javax.ws.rs.core.UriInfo;
@@ -91,8 +90,7 @@ public class ScannerResource extends ResourceBase {
spec = new RowSpec(model.getStartRow(), endRow, model.getColumns(), model.getStartTime(),
model.getEndTime(), model.getMaxVersions());
}
- MultivaluedMap<String, String> params = uriInfo.getQueryParameters();
-
+
try {
Filter filter = ScannerResultGenerator.buildFilterFromModel(model);
String tableName = tableResource.getName();
@@ -103,8 +101,8 @@ public class ScannerResource extends ResourceBase {
ScannerInstanceResource instance =
new ScannerInstanceResource(tableName, id, gen, model.getBatch());
scanners.put(id, instance);
- if (LOG.isDebugEnabled()) {
- LOG.debug("new scanner: " + id);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("new scanner: " + id);
}
UriBuilder builder = uriInfo.getAbsolutePathBuilder();
URI uri = builder.path(id).build();
@@ -130,10 +128,10 @@ public class ScannerResource extends ResourceBase {
@PUT
@Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
- public Response put(final ScannerModel model,
+ public Response put(final ScannerModel model,
final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + uriInfo.getAbsolutePath());
}
return update(model, true, uriInfo);
}
@@ -143,8 +141,8 @@ public class ScannerResource extends ResourceBase {
MIMETYPE_PROTOBUF_IETF})
public Response post(final ScannerModel model,
final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("POST " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("POST " + uriInfo.getAbsolutePath());
}
return update(model, false, uriInfo);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/4a0a9a20/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java
index 9826b67..375643a 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java
@@ -86,8 +86,8 @@ public class SchemaResource extends ResourceBase {
@Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
public Response get(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
try {
@@ -99,7 +99,7 @@ public class SchemaResource extends ResourceBase {
} catch (Exception e) {
servlet.getMetrics().incrementFailedGetRequests(1);
return processException(e);
- }
+ }
}
private Response replace(final TableName name, final TableSchemaModel model,
@@ -198,10 +198,10 @@ public class SchemaResource extends ResourceBase {
@PUT
@Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
- public Response put(final TableSchemaModel model,
+ public Response put(final TableSchemaModel model,
final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
return update(model, true, uriInfo);
@@ -210,10 +210,10 @@ public class SchemaResource extends ResourceBase {
@POST
@Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
- public Response post(final TableSchemaModel model,
+ public Response post(final TableSchemaModel model,
final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("PUT " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
return update(model, false, uriInfo);
@@ -223,8 +223,8 @@ public class SchemaResource extends ResourceBase {
justification="Expected")
@DELETE
public Response delete(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("DELETE " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("DELETE " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
if (servlet.isReadOnly()) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/4a0a9a20/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java
index a7e52bd..27977c3 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java
@@ -63,8 +63,8 @@ public class StorageClusterStatusResource extends ResourceBase {
@Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
public Response get(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
try {
http://git-wip-us.apache.org/repos/asf/hbase/blob/4a0a9a20/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java
index 85e81f8..b9fb5d4 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java
@@ -58,8 +58,8 @@ public class StorageClusterVersionResource extends ResourceBase {
@GET
@Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON})
public Response get(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
try {
http://git-wip-us.apache.org/repos/asf/hbase/blob/4a0a9a20/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
index f87ef7e..2487a35 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
@@ -133,7 +133,7 @@ public class TableResource extends ResourceBase {
@DefaultValue("-1") @QueryParam(Constants.SCAN_BATCH_SIZE) int batchSize,
@DefaultValue("0") @QueryParam(Constants.SCAN_START_TIME) long startTime,
@DefaultValue(Long.MAX_VALUE + "") @QueryParam(Constants.SCAN_END_TIME) long endTime,
- @DefaultValue("true") @QueryParam(Constants.SCAN_BATCH_SIZE) boolean cacheBlocks,
+ @DefaultValue("true") @QueryParam(Constants.SCAN_BATCH_SIZE) boolean cacheBlocks,
@DefaultValue("") @QueryParam(Constants.SCAN_FILTER) String filters) {
try {
Filter filter = null;
@@ -146,10 +146,12 @@ public class TableResource extends ResourceBase {
tableScan.setStartRow(prefixBytes);
}
}
- LOG.debug("Query parameters : Table Name = > " + this.table + " Start Row => " + startRow
- + " End Row => " + endRow + " Columns => " + column + " Start Time => " + startTime
- + " End Time => " + endTime + " Cache Blocks => " + cacheBlocks + " Max Versions => "
- + maxVersions + " Batch Size => " + batchSize);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Query parameters : Table Name = > " + this.table + " Start Row => " + startRow
+ + " End Row => " + endRow + " Columns => " + column + " Start Time => " + startTime
+ + " End Time => " + endTime + " Cache Blocks => " + cacheBlocks + " Max Versions => "
+ + maxVersions + " Batch Size => " + batchSize);
+ }
Table hTable = RESTServlet.getInstance().getTable(this.table);
tableScan.setBatch(batchSize);
tableScan.setMaxVersions(maxVersions);
@@ -162,15 +164,21 @@ public class TableResource extends ResourceBase {
String[] familysplit = csplit.trim().split(":");
if (familysplit.length == 2) {
if (familysplit[1].length() > 0) {
- LOG.debug("Scan family and column : " + familysplit[0] + " " + familysplit[1]);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Scan family and column : " + familysplit[0] + " " + familysplit[1]);
+ }
tableScan.addColumn(Bytes.toBytes(familysplit[0]), Bytes.toBytes(familysplit[1]));
} else {
tableScan.addFamily(Bytes.toBytes(familysplit[0]));
- LOG.debug("Scan family : " + familysplit[0] + " and empty qualifier.");
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Scan family : " + familysplit[0] + " and empty qualifier.");
+ }
tableScan.addColumn(Bytes.toBytes(familysplit[0]), null);
}
- } else if (StringUtils.isNotEmpty(familysplit[0])){
- LOG.debug("Scan family : " + familysplit[0]);
+ } else if (StringUtils.isNotEmpty(familysplit[0])) {
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Scan family : " + familysplit[0]);
+ }
tableScan.addFamily(Bytes.toBytes(familysplit[0]));
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/4a0a9a20/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java
index ae93825..172246c 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java
@@ -68,15 +68,15 @@ public class VersionResource extends ResourceBase {
* Build a response for a version request.
* @param context servlet context
* @param uriInfo (JAX-RS context variable) request URL
- * @return a response for a version request
+ * @return a response for a version request
*/
@GET
@Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
MIMETYPE_PROTOBUF_IETF})
- public Response get(final @Context ServletContext context,
+ public Response get(final @Context ServletContext context,
final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
ResponseBuilder response = Response.ok(new VersionModel(context));
@@ -89,7 +89,7 @@ public class VersionResource extends ResourceBase {
* Dispatch to StorageClusterVersionResource
*/
@Path("cluster")
- public StorageClusterVersionResource getClusterVersionResource()
+ public StorageClusterVersionResource getClusterVersionResource()
throws IOException {
return new StorageClusterVersionResource();
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/4a0a9a20/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java
index 142c276..e26de63 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java
@@ -101,10 +101,10 @@ public class Client {
}
/**
- * Shut down the client. Close any open persistent connections.
+ * Shut down the client. Close any open persistent connections.
*/
public void shutdown() {
- MultiThreadedHttpConnectionManager manager =
+ MultiThreadedHttpConnectionManager manager =
(MultiThreadedHttpConnectionManager) httpClient.getHttpConnectionManager();
manager.shutdown();
}
@@ -151,7 +151,7 @@ public class Client {
* one of the members of the supplied cluster definition and iterate through
* the list until a transaction can be successfully completed. The
* definition of success here is a complete HTTP transaction, irrespective
- * of result code.
+ * of result code.
* @param cluster the cluster definition
* @param method the transaction method
* @param headers HTTP header values to send
@@ -209,8 +209,8 @@ public class Client {
long startTime = System.currentTimeMillis();
int code = httpClient.executeMethod(method);
long endTime = System.currentTimeMillis();
- if (LOG.isDebugEnabled()) {
- LOG.debug(method.getName() + " " + uri + " " + code + " " +
+ if (LOG.isTraceEnabled()) {
+ LOG.trace(method.getName() + " " + uri + " " + code + " " +
method.getStatusText() + " in " + (endTime - startTime) + " ms");
}
return code;
@@ -250,7 +250,7 @@ public class Client {
}
/**
- * Send a HEAD request
+ * Send a HEAD request
* @param path the path or URI
* @return a Response object with response detail
* @throws IOException
@@ -260,14 +260,14 @@ public class Client {
}
/**
- * Send a HEAD request
+ * Send a HEAD request
* @param cluster the cluster definition
* @param path the path or URI
* @param headers the HTTP headers to include in the request
* @return a Response object with response detail
* @throws IOException
*/
- public Response head(Cluster cluster, String path, Header[] headers)
+ public Response head(Cluster cluster, String path, Header[] headers)
throws IOException {
HeadMethod method = new HeadMethod();
try {
@@ -280,7 +280,7 @@ public class Client {
}
/**
- * Send a GET request
+ * Send a GET request
* @param path the path or URI
* @return a Response object with response detail
* @throws IOException
@@ -290,7 +290,7 @@ public class Client {
}
/**
- * Send a GET request
+ * Send a GET request
* @param cluster the cluster definition
* @param path the path or URI
* @return a Response object with response detail
@@ -301,7 +301,7 @@ public class Client {
}
/**
- * Send a GET request
+ * Send a GET request
* @param path the path or URI
* @param accept Accept header value
* @return a Response object with response detail
@@ -312,7 +312,7 @@ public class Client {
}
/**
- * Send a GET request
+ * Send a GET request
* @param cluster the cluster definition
* @param path the path or URI
* @param accept Accept header value
@@ -329,7 +329,7 @@ public class Client {
/**
* Send a GET request
* @param path the path or URI
- * @param headers the HTTP headers to include in the request,
+ * @param headers the HTTP headers to include in the request,
* <tt>Accept</tt> must be supplied
* @return a Response object with response detail
* @throws IOException
@@ -346,7 +346,7 @@ public class Client {
* @return a Response object with response detail
* @throws IOException
*/
- public Response get(Cluster c, String path, Header[] headers)
+ public Response get(Cluster c, String path, Header[] headers)
throws IOException {
GetMethod method = new GetMethod();
try {
@@ -396,7 +396,7 @@ public class Client {
* @return a Response object with response detail
* @throws IOException for error
*/
- public Response put(Cluster cluster, String path, String contentType,
+ public Response put(Cluster cluster, String path, String contentType,
byte[] content) throws IOException {
Header[] headers = new Header[1];
headers[0] = new Header("Content-Type", contentType);
@@ -413,7 +413,7 @@ public class Client {
* @return a Response object with response detail
* @throws IOException for error
*/
- public Response put(Cluster cluster, String path, String contentType,
+ public Response put(Cluster cluster, String path, String contentType,
byte[] content, Header extraHdr) throws IOException {
int cnt = extraHdr == null ? 1 : 2;
Header[] headers = new Header[cnt];
@@ -433,7 +433,7 @@ public class Client {
* @return a Response object with response detail
* @throws IOException
*/
- public Response put(String path, Header[] headers, byte[] content)
+ public Response put(String path, Header[] headers, byte[] content)
throws IOException {
return put(cluster, path, headers, content);
}
@@ -448,7 +448,7 @@ public class Client {
* @return a Response object with response detail
* @throws IOException
*/
- public Response put(Cluster cluster, String path, Header[] headers,
+ public Response put(Cluster cluster, String path, Header[] headers,
byte[] content) throws IOException {
PutMethod method = new PutMethod();
try {
@@ -498,7 +498,7 @@ public class Client {
* @return a Response object with response detail
* @throws IOException for error
*/
- public Response post(Cluster cluster, String path, String contentType,
+ public Response post(Cluster cluster, String path, String contentType,
byte[] content) throws IOException {
Header[] headers = new Header[1];
headers[0] = new Header("Content-Type", contentType);
@@ -515,7 +515,7 @@ public class Client {
* @return a Response object with response detail
* @throws IOException for error
*/
- public Response post(Cluster cluster, String path, String contentType,
+ public Response post(Cluster cluster, String path, String contentType,
byte[] content, Header extraHdr) throws IOException {
int cnt = extraHdr == null ? 1 : 2;
Header[] headers = new Header[cnt];
@@ -535,7 +535,7 @@ public class Client {
* @return a Response object with response detail
* @throws IOException
*/
- public Response post(String path, Header[] headers, byte[] content)
+ public Response post(String path, Header[] headers, byte[] content)
throws IOException {
return post(cluster, path, headers, content);
}
@@ -550,7 +550,7 @@ public class Client {
* @return a Response object with response detail
* @throws IOException
*/
- public Response post(Cluster cluster, String path, Header[] headers,
+ public Response post(Cluster cluster, String path, Header[] headers,
byte[] content) throws IOException {
PostMethod method = new PostMethod();
try {
http://git-wip-us.apache.org/repos/asf/hbase/blob/4a0a9a20/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java
index e5208af..f051bc8 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java
@@ -72,7 +72,9 @@ public class AuthFilter extends AuthenticationFilter {
throw new ServletException("Failed to retrieve server principal", ie);
}
}
- LOG.debug("Setting property " + name + "=" + value);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Setting property " + name + "=" + value);
+ }
name = name.substring(REST_PREFIX_LEN);
props.setProperty(name, value);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/4a0a9a20/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java
index 30eea95..dbb1447 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java
@@ -34,13 +34,12 @@ import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
/**
* This filter provides protection against cross site request forgery (CSRF)
* attacks for REST APIs. Enabling this filter on an endpoint results in the
@@ -52,8 +51,8 @@ import org.slf4j.LoggerFactory;
@InterfaceStability.Evolving
public class RestCsrfPreventionFilter implements Filter {
- private static final Logger LOG =
- LoggerFactory.getLogger(RestCsrfPreventionFilter.class);
+ private static final Log LOG =
+ LogFactory.getLog(RestCsrfPreventionFilter.class);
public static final String HEADER_USER_AGENT = "User-Agent";
public static final String BROWSER_USER_AGENT_PARAM =
@@ -87,9 +86,9 @@ public class RestCsrfPreventionFilter implements Filter {
agents = BROWSER_USER_AGENTS_DEFAULT;
}
parseBrowserUserAgents(agents);
- LOG.info("Adding cross-site request forgery (CSRF) protection, "
- + "headerName = {}, methodsToIgnore = {}, browserUserAgents = {}",
- headerName, methodsToIgnore, browserUserAgents);
+ LOG.info(String.format("Adding cross-site request forgery (CSRF) protection, "
+ + "headerName = %s, methodsToIgnore = %s, browserUserAgents = %s",
+ headerName, methodsToIgnore, browserUserAgents));
}
void parseBrowserUserAgents(String userAgents) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/4a0a9a20/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java
index ec39db0..073c038 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java
@@ -45,7 +45,7 @@ import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
@Provider
@Consumes({Constants.MIMETYPE_PROTOBUF, Constants.MIMETYPE_PROTOBUF_IETF})
@InterfaceAudience.Private
-public class ProtobufMessageBodyConsumer
+public class ProtobufMessageBodyConsumer
implements MessageBodyReader<ProtobufMessageHandler> {
private static final Log LOG =
LogFactory.getLog(ProtobufMessageBodyConsumer.class);
@@ -73,8 +73,8 @@ public class ProtobufMessageBodyConsumer
baos.write(buffer, 0, read);
}
} while (read > 0);
- if (LOG.isDebugEnabled()) {
- LOG.debug(getClass() + ": read " + baos.size() + " bytes from " +
+ if (LOG.isTraceEnabled()) {
+ LOG.trace(getClass() + ": read " + baos.size() + " bytes from " +
inputStream);
}
obj = obj.getObjectFromMessage(baos.toByteArray());
http://git-wip-us.apache.org/repos/asf/hbase/blob/4a0a9a20/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java
index 89e44bd..1475879 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java
@@ -23,6 +23,7 @@ import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.locks.Lock;
+import org.apache.commons.logging.Log;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ChoreService;
import org.apache.hadoop.hbase.ScheduledChore;
@@ -37,7 +38,7 @@ import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.UserProvider;
import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.log4j.Logger;
+import org.apache.commons.logging.LogFactory;
/**
* A utility to store user specific HConnections in memory.
@@ -47,7 +48,7 @@ import org.apache.log4j.Logger;
*/
@InterfaceAudience.Private
public class ConnectionCache {
- private static final Logger LOG = Logger.getLogger(ConnectionCache.class);
+ private static final Log LOG = LogFactory.getLog(ConnectionCache.class);
private final Map<String, ConnectionInfo>
connections = new ConcurrentHashMap<String, ConnectionInfo>();
@@ -60,6 +61,7 @@ public class ConnectionCache {
private final ThreadLocal<String> effectiveUserNames =
new ThreadLocal<String>() {
+ @Override
protected String initialValue() {
return realUserName;
}