You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by zh...@apache.org on 2023/05/21 12:23:33 UTC

[hbase] branch branch-2 updated (e70b4685191 -> 6624f0761fc)

This is an automated email from the ASF dual-hosted git repository.

zhangduo pushed a change to branch branch-2
in repository https://gitbox.apache.org/repos/asf/hbase.git


    from e70b4685191 HBASE-27870 Eliminate the 'WARNING: package jdk.internal.util.random not in java.base' when running UTs with jdk11 (#5242)
     new bf07ff4013c HBASE-27812 Provide option in HBase UI to disable stack trace for security (#5205)
     new 01fb688a4a9 HBASE-27848:Should fast-fail if unmatched column family exists when using ImportTsv (#5225)
     new 6624f0761fc HBASE-27634 Builds emit errors related to SBOM parsing (#5246)

The 3 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../org/apache/hadoop/hbase/http/HttpServer.java   | 10 ++++++++++
 .../apache/hadoop/hbase/mapreduce/ImportTsv.java   | 17 +++++++++++++++++
 .../hadoop/hbase/mapreduce/TestImportTsv.java      | 22 ++++++++++++++++++++++
 pom.xml                                            |  2 +-
 4 files changed, 50 insertions(+), 1 deletion(-)


[hbase] 02/03: HBASE-27848:Should fast-fail if unmatched column family exists when using ImportTsv (#5225)

Posted by zh...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

zhangduo pushed a commit to branch branch-2
in repository https://gitbox.apache.org/repos/asf/hbase.git

commit 01fb688a4a90303c56335d1150e26a336e429054
Author: guluo <lu...@qq.com>
AuthorDate: Sun May 21 19:47:27 2023 +0800

    HBASE-27848:Should fast-fail if unmatched column family exists when using ImportTsv (#5225)
    
    Signed-off-by: Duo Zhang <zh...@apache.org>
    (cherry picked from commit ce29f97a809a849bf067fa3571fd775fb596fc10)
---
 .../apache/hadoop/hbase/mapreduce/ImportTsv.java   | 17 +++++++++++++++++
 .../hadoop/hbase/mapreduce/TestImportTsv.java      | 22 ++++++++++++++++++++++
 2 files changed, 39 insertions(+)

diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java
index 665ff93a977..d7833fabeaf 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java
@@ -45,6 +45,7 @@ import org.apache.hadoop.hbase.client.RegionLocator;
 import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+import org.apache.hadoop.hbase.regionserver.NoSuchColumnFamilyException;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.apache.hadoop.hbase.util.Pair;
@@ -554,6 +555,22 @@ public class ImportTsv extends Configured implements Tool {
             LOG.error(errorMsg);
             throw new TableNotFoundException(errorMsg);
           }
+          try (Table table = connection.getTable(tableName)) {
+            ArrayList<String> unmatchedFamilies = new ArrayList<>();
+            Set<String> cfSet = getColumnFamilies(columns);
+            TableDescriptor tDesc = table.getDescriptor();
+            for (String cf : cfSet) {
+              if (!tDesc.hasColumnFamily(Bytes.toBytes(cf))) {
+                unmatchedFamilies.add(cf);
+              }
+            }
+            if (unmatchedFamilies.size() > 0) {
+              String noSuchColumnFamiliesMsg =
+                format("Column families: %s do not exist.", unmatchedFamilies);
+              LOG.error(noSuchColumnFamiliesMsg);
+              throw new NoSuchColumnFamilyException(noSuchColumnFamiliesMsg);
+            }
+          }
           if (mapperClass.equals(TsvImporterTextMapper.class)) {
             usage(TsvImporterTextMapper.class.toString()
               + " should not be used for non bulkloading case. use "
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java
index 737ae178b63..8a30e404cff 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java
@@ -51,6 +51,7 @@ import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.io.hfile.CacheConfig;
 import org.apache.hadoop.hbase.io.hfile.HFile;
 import org.apache.hadoop.hbase.io.hfile.HFileScanner;
+import org.apache.hadoop.hbase.regionserver.NoSuchColumnFamilyException;
 import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.testclassification.VerySlowMapReduceTests;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -241,6 +242,27 @@ public class TestImportTsv implements Configurable {
       }, args));
   }
 
+  @Test
+  public void testMRNoMatchedColumnFamily() throws Exception {
+    util.createTable(tn, FAMILY);
+
+    String[] args = new String[] {
+      "-D" + ImportTsv.COLUMNS_CONF_KEY
+        + "=HBASE_ROW_KEY,FAM:A,FAM01_ERROR:A,FAM01_ERROR:B,FAM02_ERROR:C",
+      tn.getNameAsString(), "/inputFile" };
+    exception.expect(NoSuchColumnFamilyException.class);
+    assertEquals("running test job configuration failed.", 0,
+      ToolRunner.run(new Configuration(util.getConfiguration()), new ImportTsv() {
+        @Override
+        public int run(String[] args) throws Exception {
+          createSubmittableJob(getConf(), args);
+          return 0;
+        }
+      }, args));
+
+    util.deleteTable(tn);
+  }
+
   @Test
   public void testMRWithoutAnExistingTable() throws Exception {
     String[] args = new String[] { tn.getNameAsString(), "/inputFile" };


[hbase] 01/03: HBASE-27812 Provide option in HBase UI to disable stack trace for security (#5205)

Posted by zh...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

zhangduo pushed a commit to branch branch-2
in repository https://gitbox.apache.org/repos/asf/hbase.git

commit bf07ff4013c1d4e3bf2bc7a6460c99a6911f1678
Author: Yash Dodeja <ya...@yahoo.com>
AuthorDate: Sun May 21 17:16:10 2023 +0530

    HBASE-27812 Provide option in HBase UI to disable stack trace for security (#5205)
    
    Signed-off-by: Duo Zhang <zh...@apache.org>
    (cherry picked from commit cf9684de445c9543bc0a8cc49d1eff17c9833990)
---
 .../src/main/java/org/apache/hadoop/hbase/http/HttpServer.java | 10 ++++++++++
 1 file changed, 10 insertions(+)

diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java
index ce1b387bc15..6c2b71a0b90 100644
--- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java
+++ b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java
@@ -81,6 +81,7 @@ import org.apache.hbase.thirdparty.org.eclipse.jetty.server.Server;
 import org.apache.hbase.thirdparty.org.eclipse.jetty.server.ServerConnector;
 import org.apache.hbase.thirdparty.org.eclipse.jetty.server.SslConnectionFactory;
 import org.apache.hbase.thirdparty.org.eclipse.jetty.server.handler.ContextHandlerCollection;
+import org.apache.hbase.thirdparty.org.eclipse.jetty.server.handler.ErrorHandler;
 import org.apache.hbase.thirdparty.org.eclipse.jetty.server.handler.HandlerCollection;
 import org.apache.hbase.thirdparty.org.eclipse.jetty.server.handler.RequestLogHandler;
 import org.apache.hbase.thirdparty.org.eclipse.jetty.server.handler.gzip.GzipHandler;
@@ -153,6 +154,7 @@ public class HttpServer implements FilterContainer {
   public static final String SPNEGO_PROXYUSER_FILTER = "SpnegoProxyUserFilter";
   public static final String NO_CACHE_FILTER = "NoCacheFilter";
   public static final String APP_DIR = "webapps";
+  public static final String HTTP_UI_SHOW_STACKTRACE_KEY = "hbase.ui.show-stack-traces";
 
   public static final String METRIC_SERVLETS_CONF_KEY = "hbase.http.metrics.servlets";
   public static final String[] METRICS_SERVLETS_DEFAULT = { "jmx", "metrics", "prometheus" };
@@ -652,6 +654,14 @@ public class HttpServer implements FilterContainer {
         addFilterPathMapping(path, webAppContext);
       }
     }
+    // Check if disable stack trace property is configured
+    if (!conf.getBoolean(HTTP_UI_SHOW_STACKTRACE_KEY, true)) {
+      // Disable stack traces for server errors in UI
+      webServer.setErrorHandler(new ErrorHandler());
+      webServer.getErrorHandler().setShowStacks(false);
+      // Disable stack traces for web app errors in UI
+      webAppContext.getErrorHandler().setShowStacks(false);
+    }
   }
 
   private void addManagedListener(ServerConnector connector) {


[hbase] 03/03: HBASE-27634 Builds emit errors related to SBOM parsing (#5246)

Posted by zh...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

zhangduo pushed a commit to branch branch-2
in repository https://gitbox.apache.org/repos/asf/hbase.git

commit 6624f0761fc462d0384e982e3d9ef08a05b09c01
Author: Shuhei Yamasaki <ya...@oss.nttdata.com>
AuthorDate: Sun May 21 20:52:07 2023 +0900

    HBASE-27634 Builds emit errors related to SBOM parsing (#5246)
    
    Update CycloneDX version
    
    Signed-off-by: Duo Zhang <zh...@apache.org>
    (cherry picked from commit 49bdf6140eabcdbe2b1619bfb0f4de7f69b32c15)
---
 pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/pom.xml b/pom.xml
index 4db152270c1..c99097478ca 100644
--- a/pom.xml
+++ b/pom.xml
@@ -2562,7 +2562,7 @@
       <plugin>
         <groupId>org.cyclonedx</groupId>
         <artifactId>cyclonedx-maven-plugin</artifactId>
-        <version>2.7.3</version>
+        <version>2.7.6</version>
         <executions>
           <execution>
             <goals>