You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ro...@apache.org on 2019/09/05 12:52:18 UTC

[hadoop] 02/05: YARN-7537. Add ability to load hbase config from distributed file system. Contributed by Prabhu Joseph

This is an automated email from the ASF dual-hosted git repository.

rohithsharmaks pushed a commit to branch branch-3.2
in repository https://gitbox.apache.org/repos/asf/hadoop.git

commit 6110af2d1df75d37a2d4e4095e132df23c4ed429
Author: Eric Yang <ey...@apache.org>
AuthorDate: Tue Jun 4 19:26:06 2019 -0400

    YARN-7537.  Add ability to load hbase config from distributed file system.
                Contributed by Prabhu Joseph
---
 .../pom.xml                                        | 13 ++++
 .../storage/common/HBaseTimelineStorageUtils.java  | 32 ++++++----
 .../common/TestHBaseTimelineStorageUtils.java      | 74 ++++++++++++++++++++++
 3 files changed, 106 insertions(+), 13 deletions(-)

diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/pom.xml
index ff7fb6e..74af322 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/pom.xml
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/pom.xml
@@ -76,6 +76,19 @@
 
     <dependency>
       <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs</artifactId>
+      <scope>test</scope>
+    </dependency>
+
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs</artifactId>
+      <type>test-jar</type>
+      <scope>test</scope>
+    </dependency>
+
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-yarn-api</artifactId>
       <scope>provided</scope>
     </dependency>
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/HBaseTimelineStorageUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/HBaseTimelineStor [...]
index f4cd6fb..93feb82 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/HBaseTimelineStorageUtils.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/HBaseTimelineStorageUtils.java
@@ -17,11 +17,13 @@
 
 package org.apache.hadoop.yarn.server.timelineservice.storage.common;
 
-import java.net.MalformedURLException;
-import java.net.URL;
+import java.io.IOException;
 import java.util.Arrays;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.client.Query;
@@ -40,7 +42,6 @@ public final class HBaseTimelineStorageUtils {
   private HBaseTimelineStorageUtils() {
   }
 
-
   /**
    * @param conf YARN configuration. Used to see if there is an explicit config
    *          pointing to the HBase config file to read. It should not be null
@@ -48,28 +49,33 @@ public final class HBaseTimelineStorageUtils {
    * @return a configuration with the HBase configuration from the classpath,
    *         optionally overwritten by the timeline service configuration URL if
    *         specified.
-   * @throws MalformedURLException if a timeline service HBase configuration URL
-   *           is specified but is a malformed URL.
+   * @throws IOException if a timeline service HBase configuration URL
+   *           is specified but unable to read it.
    */
   public static Configuration getTimelineServiceHBaseConf(Configuration conf)
-      throws MalformedURLException {
+      throws IOException {
     if (conf == null) {
       throw new NullPointerException();
     }
 
     Configuration hbaseConf;
-    String timelineServiceHBaseConfFileURL =
+    String timelineServiceHBaseConfFilePath =
         conf.get(YarnConfiguration.TIMELINE_SERVICE_HBASE_CONFIGURATION_FILE);
-    if (timelineServiceHBaseConfFileURL != null
-        && timelineServiceHBaseConfFileURL.length() > 0) {
+
+    if (timelineServiceHBaseConfFilePath != null
+          && timelineServiceHBaseConfFilePath.length() > 0) {
       LOG.info("Using hbase configuration at " +
-          timelineServiceHBaseConfFileURL);
+          timelineServiceHBaseConfFilePath);
       // create a clone so that we don't mess with out input one
       hbaseConf = new Configuration(conf);
       Configuration plainHBaseConf = new Configuration(false);
-      URL hbaseSiteXML = new URL(timelineServiceHBaseConfFileURL);
-      plainHBaseConf.addResource(hbaseSiteXML);
-      HBaseConfiguration.merge(hbaseConf, plainHBaseConf);
+      Path hbaseConfigPath = new Path(timelineServiceHBaseConfFilePath);
+      try (FileSystem fs =
+          FileSystem.newInstance(hbaseConfigPath.toUri(), conf);
+          FSDataInputStream in = fs.open(hbaseConfigPath)) {
+        plainHBaseConf.addResource(in);
+        HBaseConfiguration.merge(hbaseConf, plainHBaseConf);
+      }
     } else {
       // default to what is on the classpath
       hbaseConf = HBaseConfiguration.create(conf);
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestHBaseTimelineStorageUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestHBaseTime [...]
index 402a89b..46bb8ae 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestHBaseTimelineStorageUtils.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestHBaseTimelineStorageUtils.java
@@ -18,16 +18,90 @@
 
 package org.apache.hadoop.yarn.server.timelineservice.storage.common;
 
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hdfs.HdfsConfiguration;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.junit.Assert;
+import org.junit.Before;
 import org.junit.Test;
 
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+
 /**
  * Unit tests for HBaseTimelineStorageUtils static methos.
  */
 public class TestHBaseTimelineStorageUtils {
 
+  private String hbaseConfigPath = "target/hbase-site.xml";
+
+  @Before
+  public void setup() throws IOException {
+    // Input Hbase Configuration
+    Configuration hbaseConf = new Configuration();
+    hbaseConf.set("input", "test");
+
+    //write the document to a buffer (not directly to the file, as that
+    //can cause the file being written to get read which will then fail.
+    ByteArrayOutputStream bytesOut = new ByteArrayOutputStream();
+    hbaseConf.writeXml(bytesOut);
+    bytesOut.close();
+
+    //write the bytes to the file
+    File file = new File(hbaseConfigPath);
+    OutputStream os = new FileOutputStream(file);
+    os.write(bytesOut.toByteArray());
+    os.close();
+  }
+
   @Test(expected=NullPointerException.class)
   public void testGetTimelineServiceHBaseConfNullArgument() throws Exception {
     HBaseTimelineStorageUtils.getTimelineServiceHBaseConf(null);
   }
 
+  @Test
+  public void testWithHbaseConfAtLocalFileSystem() throws IOException {
+    // Verifying With Hbase Conf from Local FileSystem
+    Configuration conf = new Configuration();
+    conf.set(YarnConfiguration.TIMELINE_SERVICE_HBASE_CONFIGURATION_FILE,
+        hbaseConfigPath);
+    Configuration hbaseConfFromLocal =
+        HBaseTimelineStorageUtils.getTimelineServiceHBaseConf(conf);
+    Assert.assertEquals("Failed to read hbase config from Local FileSystem",
+        "test", hbaseConfFromLocal.get("input"));
+  }
+
+  @Test
+  public void testWithHbaseConfAtHdfsFileSystem() throws IOException {
+    MiniDFSCluster hdfsCluster = null;
+    try {
+      HdfsConfiguration hdfsConfig = new HdfsConfiguration();
+      hdfsCluster = new MiniDFSCluster.Builder(hdfsConfig)
+          .numDataNodes(1).build();
+
+      FileSystem fs = hdfsCluster.getFileSystem();
+      Path path = new Path("/tmp/hdfs-site.xml");
+      fs.copyFromLocalFile(new Path(hbaseConfigPath), path);
+
+      // Verifying With Hbase Conf from HDFS FileSystem
+      Configuration conf = new Configuration(hdfsConfig);
+      conf.set(YarnConfiguration.TIMELINE_SERVICE_HBASE_CONFIGURATION_FILE,
+          path.toString());
+      Configuration hbaseConfFromHdfs =
+          HBaseTimelineStorageUtils.getTimelineServiceHBaseConf(conf);
+      Assert.assertEquals("Failed to read hbase config from Hdfs FileSystem",
+          "test", hbaseConfFromHdfs.get("input"));
+    } finally {
+      if (hdfsCluster != null) {
+        hdfsCluster.shutdown();
+      }
+    }
+  }
+
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org