You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by nd...@apache.org on 2021/04/15 20:59:31 UTC
[hbase] branch branch-2.3 updated: HBASE-25770 Http InfoServers
should honor gzip encoding when requested (#3159)
This is an automated email from the ASF dual-hosted git repository.
ndimiduk pushed a commit to branch branch-2.3
in repository https://gitbox.apache.org/repos/asf/hbase.git
The following commit(s) were added to refs/heads/branch-2.3 by this push:
new 545330e HBASE-25770 Http InfoServers should honor gzip encoding when requested (#3159)
545330e is described below
commit 545330ecbd8c94cac50424dc4b72642e3eacda59
Author: Nick Dimiduk <nd...@apache.org>
AuthorDate: Thu Apr 15 09:07:13 2021 -0700
HBASE-25770 Http InfoServers should honor gzip encoding when requested (#3159)
Signed-off-by: Duo Zhang <zh...@apache.org>
Signed-off-by: Josh Elser <el...@apache.org>
---
hbase-http/pom.xml | 5 ++
.../org/apache/hadoop/hbase/http/HttpServer.java | 19 ++++++
.../apache/hadoop/hbase/http/TestHttpServer.java | 68 +++++++++++++++++++++-
.../org/apache/hadoop/hbase/master/HMaster.java | 5 +-
4 files changed, 95 insertions(+), 2 deletions(-)
diff --git a/hbase-http/pom.xml b/hbase-http/pom.xml
index ecf5c2c..7e50f8d 100644
--- a/hbase-http/pom.xml
+++ b/hbase-http/pom.xml
@@ -249,6 +249,11 @@
<scope>test</scope>
</dependency>
<dependency>
+ <groupId>org.hamcrest</groupId>
+ <artifactId>hamcrest-library</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
<scope>test</scope>
diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java
index f3c4a59..51a8af5 100644
--- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java
+++ b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java
@@ -77,6 +77,7 @@ import org.eclipse.jetty.server.SslConnectionFactory;
import org.eclipse.jetty.server.handler.ContextHandlerCollection;
import org.eclipse.jetty.server.handler.HandlerCollection;
import org.eclipse.jetty.server.handler.RequestLogHandler;
+import org.eclipse.jetty.server.handler.gzip.GzipHandler;
import org.eclipse.jetty.servlet.DefaultServlet;
import org.eclipse.jetty.servlet.FilterHolder;
import org.eclipse.jetty.servlet.FilterMapping;
@@ -577,6 +578,7 @@ public class HttpServer implements FilterContainer {
this.findPort = b.findPort;
this.authenticationEnabled = b.securityEnabled;
initializeWebServer(b.name, b.hostName, b.conf, b.pathSpecs, b);
+ this.webServer.setHandler(buildGzipHandler(this.webServer.getHandler()));
}
private void initializeWebServer(String name, String hostName,
@@ -664,6 +666,23 @@ public class HttpServer implements FilterContainer {
return ctx;
}
+ /**
+ * Construct and configure an instance of {@link GzipHandler}. With complex
+ * multi-{@link WebAppContext} configurations, it's easiest to apply this handler directly to the
+ * instance of {@link Server} near the end of its configuration, something like
+ * <pre>
+ * Server server = new Server();
+ * //...
+ * server.setHandler(buildGzipHandler(server.getHandler()));
+ * server.start();
+ * </pre>
+ */
+ public static GzipHandler buildGzipHandler(final Handler wrapped) {
+ final GzipHandler gzipHandler = new GzipHandler();
+ gzipHandler.setHandler(wrapped);
+ return gzipHandler;
+ }
+
private static void addNoCacheFilter(WebAppContext ctxt) {
defineFilter(ctxt, NO_CACHE_FILTER, NoCacheFilter.class.getName(),
Collections.<String, String> emptyMap(), new String[] { "/*" });
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
index 881c66a..01bc8c5 100644
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
+++ b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
@@ -1,4 +1,4 @@
-/**
+/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
@@ -17,11 +17,16 @@
*/
package org.apache.hadoop.hbase.http;
+import static org.hamcrest.Matchers.greaterThan;
+import java.io.BufferedReader;
import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.net.HttpURLConnection;
import java.net.URI;
import java.net.URL;
+import java.nio.CharBuffer;
import java.util.Arrays;
import java.util.Collections;
import java.util.Enumeration;
@@ -58,6 +63,13 @@ import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.AccessControlList;
import org.eclipse.jetty.server.ServerConnector;
import org.eclipse.jetty.util.ajax.JSON;
+import org.apache.http.HttpEntity;
+import org.apache.http.HttpHeaders;
+import org.apache.http.client.methods.CloseableHttpResponse;
+import org.apache.http.client.methods.HttpGet;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.impl.client.HttpClients;
+import org.hamcrest.MatcherAssert;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
@@ -268,6 +280,60 @@ public class TestHttpServer extends HttpServerFunctionalTest {
// assertEquals("text/html; charset=utf-8", conn.getContentType());
}
+ @Test
+ public void testNegotiatesEncodingGzip() throws IOException {
+ final InputStream stream = ClassLoader.getSystemResourceAsStream("webapps/static/test.css");
+ assertNotNull(stream);
+ final String sourceContent = readFully(stream);
+
+ try (final CloseableHttpClient client = HttpClients.createMinimal()) {
+ final HttpGet request = new HttpGet(new URL(baseUrl, "/static/test.css").toString());
+
+ request.setHeader(HttpHeaders.ACCEPT_ENCODING, null);
+ final long unencodedContentLength;
+ try (final CloseableHttpResponse response = client.execute(request)) {
+ final HttpEntity entity = response.getEntity();
+ assertNotNull(entity);
+ assertNull(entity.getContentEncoding());
+ unencodedContentLength = entity.getContentLength();
+ MatcherAssert.assertThat(unencodedContentLength, greaterThan(0L));
+ final String unencodedEntityBody = readFully(entity.getContent());
+ assertEquals(sourceContent, unencodedEntityBody);
+ }
+
+ request.setHeader(HttpHeaders.ACCEPT_ENCODING, "gzip");
+ final long encodedContentLength;
+ try (final CloseableHttpResponse response = client.execute(request)) {
+ final HttpEntity entity = response.getEntity();
+ assertNotNull(entity);
+ assertNotNull(entity.getContentEncoding());
+ assertEquals("gzip", entity.getContentEncoding().getValue());
+ encodedContentLength = entity.getContentLength();
+ MatcherAssert.assertThat(encodedContentLength, greaterThan(0L));
+ final String encodedEntityBody = readFully(entity.getContent());
+ // the encoding/decoding process, as implemented in this specific combination of dependency
+ // versions, does not perfectly preserve trailing whitespace. thus, `trim()`.
+ assertEquals(sourceContent.trim(), encodedEntityBody.trim());
+ }
+ MatcherAssert.assertThat(unencodedContentLength, greaterThan(encodedContentLength));
+ }
+ }
+
+ private static String readFully(final InputStream input) throws IOException {
+ // TODO: when the time comes, delete me and replace with a JDK11 IO helper API.
+ try (final BufferedReader reader = new BufferedReader(new InputStreamReader(input))) {
+ final StringBuilder sb = new StringBuilder();
+ final CharBuffer buffer = CharBuffer.allocate(1024 * 2);
+ while (reader.read(buffer) > 0) {
+ sb.append(buffer);
+ buffer.clear();
+ }
+ return sb.toString();
+ } finally {
+ input.close();
+ }
+ }
+
/**
* Dummy filter that mimics as an authentication filter. Obtains user identity
* from the request parameter user.name. Wraps around the request so that
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
index 5f8b63d..f7c2d8e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
@@ -95,6 +95,7 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.executor.ExecutorType;
import org.apache.hadoop.hbase.favored.FavoredNodesManager;
import org.apache.hadoop.hbase.favored.FavoredNodesPromoter;
+import org.apache.hadoop.hbase.http.HttpServer;
import org.apache.hadoop.hbase.http.InfoServer;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
import org.apache.hadoop.hbase.ipc.RpcServer;
@@ -660,7 +661,8 @@ public class HMaster extends HRegionServer implements MasterServices {
if (infoPort < 0 || infoServer == null) {
return -1;
}
- if(infoPort == infoServer.getPort()) {
+ if (infoPort == infoServer.getPort()) {
+ // server is already running
return infoPort;
}
final String addr = conf.get("hbase.master.info.bindAddress", "0.0.0.0");
@@ -682,6 +684,7 @@ public class HMaster extends HRegionServer implements MasterServices {
connector.setPort(infoPort);
masterJettyServer.addConnector(connector);
masterJettyServer.setStopAtShutdown(true);
+ masterJettyServer.setHandler(HttpServer.buildGzipHandler(masterJettyServer.getHandler()));
final String redirectHostname =
StringUtils.isBlank(useThisHostnameInstead) ? null : useThisHostnameInstead;