You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by xy...@apache.org on 2018/09/20 22:22:10 UTC

[48/50] [abbrv] hadoop git commit: HDDS-100. SCM CA: generate public/private key pair for SCM/OM/DNs. Contributed by Ajay Kumar.

HDDS-100. SCM CA: generate public/private key pair for SCM/OM/DNs. Contributed by Ajay Kumar.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/f8f02f28
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/f8f02f28
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/f8f02f28

Branch: refs/heads/HDDS-4
Commit: f8f02f2866728e8113dbe366f3fcf674a8630d11
Parents: 249df9a
Author: Xiaoyu Yao <xy...@apache.org>
Authored: Fri Jun 8 08:33:58 2018 -0700
Committer: Xiaoyu Yao <xy...@apache.org>
Committed: Thu Sep 20 15:13:07 2018 -0700

----------------------------------------------------------------------
 hadoop-hdds/common/pom.xml                      |   6 +-
 .../org/apache/hadoop/hdds/HddsConfigKeys.java  |  19 ++
 .../hdds/security/x509/HDDSKeyGenerator.java    |  99 ++++++++
 .../hdds/security/x509/HDDSKeyPEMWriter.java    | 254 +++++++++++++++++++
 .../hdds/security/x509/SecurityConfig.java      | 190 ++++++++++++++
 .../hadoop/hdds/security/x509/package-info.java |  25 ++
 .../common/src/main/resources/ozone-default.xml |  40 +++
 .../security/x509/TestHDDSKeyGenerator.java     |  81 ++++++
 .../security/x509/TestHDDSKeyPEMWriter.java     | 213 ++++++++++++++++
 .../ozone/TestOzoneConfigurationFields.java     |   6 +
 10 files changed, 932 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/f8f02f28/hadoop-hdds/common/pom.xml
----------------------------------------------------------------------
diff --git a/hadoop-hdds/common/pom.xml b/hadoop-hdds/common/pom.xml
index d08a5a9..9336196 100644
--- a/hadoop-hdds/common/pom.xml
+++ b/hadoop-hdds/common/pom.xml
@@ -77,7 +77,6 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
       <artifactId>rocksdbjni</artifactId>
       <version>5.14.2</version>
     </dependency>
-
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-common</artifactId>
@@ -106,6 +105,11 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
       <version>2.6.0</version>
     </dependency>
 
+    <dependency>
+      <groupId>org.bouncycastle</groupId>
+      <artifactId>bcprov-jdk15on</artifactId>
+      <version>1.49</version>
+    </dependency>
   </dependencies>
 
   <build>

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f8f02f28/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsConfigKeys.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsConfigKeys.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsConfigKeys.java
index 856d113..adadcd2 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsConfigKeys.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsConfigKeys.java
@@ -94,4 +94,23 @@ public final class HddsConfigKeys {
       "hdds.lock.max.concurrency";
   public static final int HDDS_LOCK_MAX_CONCURRENCY_DEFAULT = 100;
 
+  public static final String HDDS_KEY_LEN = "hdds.key.len";
+  public static final int HDDS_DEFAULT_KEY_LEN = 2048;
+  public static final String HDDS_KEY_ALGORITHM = "hdds.key.algo";
+  public static final String HDDS_DEFAULT_KEY_ALGORITHM = "RSA";
+  public static final String HDDS_SECURITY_PROVIDER = "hdds.security.provider";
+  public static final String HDDS_DEFAULT_SECURITY_PROVIDER = "BC";
+  public static final String HDDS_KEY_DIR_NAME = "hdds.key.dir.name";
+  public static final String HDDS_KEY_DIR_NAME_DEFAULT = "keys";
+
+  // TODO : Talk to StorageIO classes and see if they can return a secure
+  // storage location for each node.
+  public static final String HDDS_METADATA_DIR_NAME = "hdds.metadata.dir";
+  public static final String HDDS_PRIVATE_KEY_FILE_NAME =
+      "hdds.priv.key.file.name";
+  public static final String HDDS_PRIVATE_KEY_FILE_NAME_DEFAULT = "private.pem";
+  public static final String HDDS_PUBLIC_KEY_FILE_NAME = "hdds.public.key.file"
+      + ".name";
+  public static final String HDDS_PUBLIC_KEY_FILE_NAME_DEFAULT = "public.pem";
+>>>>>>> HDDS-100. SCM CA: generate public/private key pair for SCM/OM/DNs. Contributed by Ajay Kumar.
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f8f02f28/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/HDDSKeyGenerator.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/HDDSKeyGenerator.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/HDDSKeyGenerator.java
new file mode 100644
index 0000000..cb411b2
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/HDDSKeyGenerator.java
@@ -0,0 +1,99 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+package org.apache.hadoop.hdds.security.x509;
+
+import org.apache.hadoop.conf.Configuration;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.security.KeyPair;
+import java.security.KeyPairGenerator;
+import java.security.NoSuchAlgorithmException;
+import java.security.NoSuchProviderException;
+
+/** A class to generate Key Pair for use with Certificates. */
+public class HDDSKeyGenerator {
+  private static final Logger LOG =
+      LoggerFactory.getLogger(HDDSKeyGenerator.class);
+  private final SecurityConfig securityConfig;
+
+  /**
+   * Constructor for HDDSKeyGenerator.
+   *
+   * @param configuration - config
+   */
+  public HDDSKeyGenerator(Configuration configuration) {
+    this.securityConfig = new SecurityConfig(configuration);
+  }
+
+  /**
+   * Returns the Security config used for this object.
+   * @return SecurityConfig
+   */
+  public SecurityConfig getSecurityConfig() {
+    return securityConfig;
+  }
+
+  /**
+   * Use Config to generate key.
+   *
+   * @return KeyPair
+   * @throws NoSuchProviderException
+   * @throws NoSuchAlgorithmException
+   */
+  public KeyPair generateKey() throws NoSuchProviderException,
+      NoSuchAlgorithmException {
+    return generateKey(securityConfig.getSize(),
+        securityConfig.getAlgo(), securityConfig.getProvider());
+  }
+
+  /**
+   * Specify the size -- all other parameters are used from config.
+   *
+   * @param size - int, valid key sizes.
+   * @return KeyPair
+   * @throws NoSuchProviderException
+   * @throws NoSuchAlgorithmException
+   */
+  public KeyPair generateKey(int size) throws
+      NoSuchProviderException, NoSuchAlgorithmException {
+    return generateKey(size,
+        securityConfig.getAlgo(), securityConfig.getProvider());
+  }
+
+  /**
+   * Custom Key Generation, all values are user provided.
+   *
+   * @param size - Key Size
+   * @param algorithm - Algorithm to use
+   * @param provider - Security provider.
+   * @return KeyPair.
+   * @throws NoSuchProviderException
+   * @throws NoSuchAlgorithmException
+   */
+  public KeyPair generateKey(int size, String algorithm, String provider)
+      throws NoSuchProviderException, NoSuchAlgorithmException {
+    LOG.info("Generating key pair using size:{}, Algorithm:{}, Provider:{}",
+        size, algorithm, provider);
+    KeyPairGenerator generator = KeyPairGenerator
+        .getInstance(algorithm, provider);
+    generator.initialize(size);
+    return generator.generateKeyPair();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f8f02f28/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/HDDSKeyPEMWriter.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/HDDSKeyPEMWriter.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/HDDSKeyPEMWriter.java
new file mode 100644
index 0000000..6ca7584
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/HDDSKeyPEMWriter.java
@@ -0,0 +1,254 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+package org.apache.hadoop.hdds.security.x509;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
+import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
+import org.apache.commons.io.output.FileWriterWithEncoding;
+import org.apache.hadoop.conf.Configuration;
+import org.bouncycastle.util.io.pem.PemObject;
+import org.bouncycastle.util.io.pem.PemWriter;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.file.FileSystems;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.nio.file.attribute.PosixFilePermission;
+import java.security.KeyPair;
+import java.util.Set;
+import java.util.function.Supplier;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
+import static java.nio.file.attribute.PosixFilePermission.OWNER_EXECUTE;
+import static java.nio.file.attribute.PosixFilePermission.OWNER_READ;
+import static java.nio.file.attribute.PosixFilePermission.OWNER_WRITE;
+
+/**
+ * We store all Key material in good old PEM files.
+ * This helps in avoiding dealing will persistent
+ * Java KeyStore issues. Also when debugging,
+ * general tools like OpenSSL can be used to read and
+ * decode these files.
+ */
+public class HDDSKeyPEMWriter {
+  private static final Logger LOG =
+      LoggerFactory.getLogger(HDDSKeyPEMWriter.class);
+  private final Path location;
+  private final SecurityConfig securityConfig;
+  private Set<PosixFilePermission> permissionSet =
+      Stream.of(OWNER_READ, OWNER_WRITE,  OWNER_EXECUTE)
+          .collect(Collectors.toSet());
+  private Supplier<Boolean> isPosixFileSystem;
+  public final static String PRIVATE_KEY = "PRIVATE KEY";
+  public final static String PUBLIC_KEY = "PUBLIC KEY";
+  public static final Charset DEFAULT_CHARSET = StandardCharsets.UTF_8;
+  /*
+    Creates an HDDS Key Writer.
+
+    @param configuration - Configuration
+   */
+  public HDDSKeyPEMWriter(Configuration configuration) throws IOException {
+    Preconditions.checkNotNull(configuration, "Config cannot be null");
+    this.securityConfig = new SecurityConfig(configuration);
+    isPosixFileSystem = HDDSKeyPEMWriter::isPosix;
+    this.location = securityConfig.getKeyLocation();
+  }
+
+  /**
+   * Checks if File System supports posix style security permissions.
+   *
+   * @return True if it supports posix.
+   */
+  private static Boolean isPosix() {
+    return FileSystems.getDefault().supportedFileAttributeViews()
+        .contains("posix");
+  }
+
+  /**
+   * Returns the Permission set.
+   * @return Set
+   */
+  @VisibleForTesting
+  public Set<PosixFilePermission> getPermissionSet() {
+    return permissionSet;
+  }
+
+  /**
+   * Returns the Security config used for this object.
+   * @return SecurityConfig
+   */
+  public SecurityConfig getSecurityConfig() {
+    return securityConfig;
+  }
+
+  /**
+   * This function is used only for testing.
+   *
+   * @param isPosixFileSystem - Sets a boolean function for mimicking
+   * files systems that are not posix.
+   */
+  @VisibleForTesting
+  public void setIsPosixFileSystem(Supplier<Boolean> isPosixFileSystem) {
+    this.isPosixFileSystem = isPosixFileSystem;
+  }
+
+  /**
+   * Writes a given key using the default config options.
+   *
+   * @param keyPair - Key Pair to write to file.
+   * @throws IOException
+   */
+  public void writeKey(KeyPair keyPair) throws IOException {
+    writeKey(location, keyPair, securityConfig.getPrivateKeyName(),
+        securityConfig.getPublicKeyName(), false);
+  }
+
+  /**
+   * Writes a given key using default config options.
+   *
+   * @param keyPair - Key pair to write
+   * @param overwrite - Overwrites the keys if they already exist.
+   * @throws IOException
+   */
+  public void writeKey(KeyPair keyPair, boolean overwrite) throws IOException {
+    writeKey(location, keyPair, securityConfig.getPrivateKeyName(),
+        securityConfig.getPublicKeyName(), overwrite);
+  }
+
+  /**
+   * Writes a given key using default config options.
+   *
+   * @param basePath - The location to write to, override the config values.
+   * @param keyPair - Key pair to write
+   * @param overwrite - Overwrites the keys if they already exist.
+   * @throws IOException
+   */
+  public void writeKey(Path basePath, KeyPair keyPair, boolean overwrite)
+      throws IOException {
+    writeKey(basePath, keyPair, securityConfig.getPrivateKeyName(),
+        securityConfig.getPublicKeyName(), overwrite);
+  }
+
+  /**
+   * Helper function that actually writes data to the files.
+   *
+   * @param basePath - base path to write key
+   * @param keyPair - Key pair to write to file.
+   * @param privateKeyFileName - private key file name.
+   * @param publicKeyFileName - public key file name.
+   * @param force - forces overwriting the keys.
+   * @throws IOException
+   */
+  private synchronized void writeKey(Path basePath, KeyPair keyPair,
+      String privateKeyFileName, String publicKeyFileName, boolean force)
+      throws IOException {
+    checkPreconditions(basePath);
+
+    File privateKeyFile =
+        Paths.get(location.toString(), privateKeyFileName).toFile();
+    File publicKeyFile =
+        Paths.get(location.toString(), publicKeyFileName).toFile();
+    checkKeyFile(privateKeyFile, force, publicKeyFile);
+
+    try (PemWriter privateKeyWriter = new PemWriter(new
+        FileWriterWithEncoding(privateKeyFile, DEFAULT_CHARSET))) {
+      privateKeyWriter.writeObject(
+          new PemObject(PRIVATE_KEY, keyPair.getPrivate().getEncoded()));
+    }
+
+    try (PemWriter publicKeyWriter = new PemWriter(new
+        FileWriterWithEncoding(publicKeyFile, DEFAULT_CHARSET))) {
+      publicKeyWriter.writeObject(
+          new PemObject(PUBLIC_KEY, keyPair.getPublic().getEncoded()));
+    }
+    Files.setPosixFilePermissions(privateKeyFile.toPath(), permissionSet);
+    Files.setPosixFilePermissions(publicKeyFile.toPath(), permissionSet);
+  }
+
+  /**
+   * Checks if private and public key file already exists. Throws IOException
+   * if file exists and force flag is set to false, else will delete the
+   * existing file.
+   *
+   * @param privateKeyFile - Private key file.
+   * @param force - forces overwriting the keys.
+   * @param publicKeyFile - public key file.
+   * @throws IOException
+   */
+  private void checkKeyFile(File privateKeyFile, boolean force,
+      File publicKeyFile) throws IOException {
+    if (privateKeyFile.exists() && force) {
+      if (!privateKeyFile.delete()) {
+        throw new IOException("Unable to delete private key file.");
+      }
+    }
+
+    if (publicKeyFile.exists() && force) {
+      if (!publicKeyFile.delete()) {
+        throw new IOException("Unable to delete public key file.");
+      }
+    }
+
+    if (privateKeyFile.exists()) {
+      throw new IOException("Private Key file already exists.");
+    }
+
+    if (publicKeyFile.exists()) {
+      throw new IOException("Public Key file already exists.");
+    }
+  }
+
+  /**
+   * Checks if base path exists and sets file permissions.
+   *
+   * @param basePath - base path to write key
+   * @throws IOException
+   */
+  private void checkPreconditions(Path basePath) throws IOException {
+    Preconditions.checkNotNull(basePath, "Base path cannot be null");
+    if (!isPosixFileSystem.get()) {
+      LOG.error("Keys cannot be stored securely without POSIX file system "
+          + "support for now.");
+      throw new IOException("Unsupported File System for pem file.");
+    }
+
+    if (Files.exists(basePath)) {
+      // Not the end of the world if we reset the permissions on an existing
+      // directory.
+      Files.setPosixFilePermissions(basePath, permissionSet);
+    } else {
+      boolean success = basePath.toFile().mkdirs();
+      if (!success) {
+        LOG.error("Unable to create the directory for the "
+            + "location. Location: {}", basePath);
+        throw new IOException("Unable to create the directory for the "
+            + "location. Location:" + basePath);
+      }
+      Files.setPosixFilePermissions(basePath, permissionSet);
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f8f02f28/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/SecurityConfig.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/SecurityConfig.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/SecurityConfig.java
new file mode 100644
index 0000000..896a379
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/SecurityConfig.java
@@ -0,0 +1,190 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.hdds.security.x509;
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.conf.Configuration;
+import org.bouncycastle.jce.provider.BouncyCastleProvider;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.security.Provider;
+import java.security.Security;
+
+import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_DEFAULT_KEY_LEN;
+import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_DEFAULT_KEY_ALGORITHM;
+import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_DEFAULT_SECURITY_PROVIDER;
+import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_KEY_ALGORITHM;
+import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_KEY_DIR_NAME;
+import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_KEY_DIR_NAME_DEFAULT;
+import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_KEY_LEN;
+import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_METADATA_DIR_NAME;
+import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_PRIVATE_KEY_FILE_NAME;
+import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_PRIVATE_KEY_FILE_NAME_DEFAULT;
+import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_PUBLIC_KEY_FILE_NAME;
+import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_PUBLIC_KEY_FILE_NAME_DEFAULT;
+import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_SECURITY_PROVIDER;
+import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_METADATA_DIRS;
+
+/**
+ * A class that deals with all Security related configs in HDDDS.
+ * It is easier to have all Java code related to config in a single place.
+ */
+public class SecurityConfig {
+  private static final Logger LOG =
+      LoggerFactory.getLogger(SecurityConfig.class);
+  private static volatile Provider provider;
+  private final Configuration configuration;
+  private final int size;
+  private final String algo;
+  private final String providerString;
+  private final String metadatDir;
+  private final String keyDir;
+  private final String privateKeyName;
+  private final String publicKeyName;
+
+  /**
+   * Constructs a HDDSKeyGenerator.
+   *
+   * @param configuration - HDDS Configuration
+   */
+  public SecurityConfig(Configuration configuration) {
+    Preconditions.checkNotNull(configuration, "Configuration cannot be null");
+    this.configuration = configuration;
+    this.size = this.configuration.getInt(HDDS_KEY_LEN, HDDS_DEFAULT_KEY_LEN);
+    this.algo = this.configuration.get(HDDS_KEY_ALGORITHM,
+        HDDS_DEFAULT_KEY_ALGORITHM);
+    this.providerString = this.configuration.get(HDDS_SECURITY_PROVIDER,
+            HDDS_DEFAULT_SECURITY_PROVIDER);
+
+    // Please Note: To make it easy for our customers we will attempt to read
+    // HDDS metadata dir and if that is not set, we will use Ozone directory.
+    // TODO: We might want to fix this later.
+    this.metadatDir = this.configuration.get(HDDS_METADATA_DIR_NAME,
+        configuration.get(OZONE_METADATA_DIRS));
+
+    Preconditions.checkNotNull(this.metadatDir, "Metadata directory can't be"
+        + " null. Please check configs.");
+    this.keyDir = this.configuration.get(HDDS_KEY_DIR_NAME,
+        HDDS_KEY_DIR_NAME_DEFAULT);
+    this.privateKeyName = this.configuration.get(HDDS_PRIVATE_KEY_FILE_NAME,
+        HDDS_PRIVATE_KEY_FILE_NAME_DEFAULT);
+    this.publicKeyName =  this.configuration.get(HDDS_PUBLIC_KEY_FILE_NAME,
+        HDDS_PUBLIC_KEY_FILE_NAME_DEFAULT);
+
+    // First Startup -- if the provider is null, check for the provider.
+    if (SecurityConfig.provider == null) {
+      synchronized (SecurityConfig.class) {
+        provider = Security.getProvider(this.providerString);
+        if (SecurityConfig.provider == null) {
+          // Provider not found, let us try to Dynamically initialize the
+          // provider.
+          provider = initSecurityProvider(this.providerString);
+        }
+      }
+    }
+  }
+
+  /**
+   * Returns the Provider name.
+   * @return String Provider name.
+   */
+  public String getProviderString() {
+    return providerString;
+  }
+
+  /**
+   * Returns the public key file name.
+   * @return String, File name used for public keys.
+   */
+  public String getPublicKeyName() {
+    return publicKeyName;
+  }
+
+  /**
+   * Returns the private key file name.
+   * @return String, File name used for private keys.
+   */
+  public String getPrivateKeyName() {
+    return privateKeyName;
+  }
+
+  /**
+   * Returns the File path to where keys are stored.
+   * @return  String Key location.
+   */
+  public Path getKeyLocation() {
+    return Paths.get(metadatDir, keyDir);
+  }
+
+  /**
+   * Gets the Key Size.
+   *
+   * @return key size.
+   */
+  public int getSize() {
+    return size;
+  }
+
+  /**
+   * Gets provider.
+   *
+   * @return String Provider name.
+   */
+  public String getProvider() {
+    return providerString;
+  }
+
+  /**
+   * Returns the Key generation Algorithm used.
+   *
+   * @return String Algo.
+   */
+  public String getAlgo() {
+    return algo;
+  }
+
+  /**
+   * Returns the Configuration used for initializing this SecurityConfig.
+   * @return  Configuration
+   */
+  public Configuration getConfiguration() {
+    return configuration;
+  }
+
+
+  /**
+   * Adds a security provider dynamically if it is not loaded already.
+   *
+   * @param providerName - name of the provider.
+   */
+  private Provider initSecurityProvider(String providerName) {
+    switch (providerName) {
+    case "BC":
+      Security.addProvider(new BouncyCastleProvider());
+      return Security.getProvider(providerName);
+    default:
+      LOG.error("Security Provider:{} is unknown", provider);
+      throw new SecurityException("Unknown security provider:" + provider);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f8f02f28/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/package-info.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/package-info.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/package-info.java
new file mode 100644
index 0000000..89d5d51
--- /dev/null
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/package-info.java
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+
+/**
+ * This package contains common routines used in creating an x509 based
+ * identity framework for HDDS.
+ */
+package org.apache.hadoop.hdds.security.x509;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f8f02f28/hadoop-hdds/common/src/main/resources/ozone-default.xml
----------------------------------------------------------------------
diff --git a/hadoop-hdds/common/src/main/resources/ozone-default.xml b/hadoop-hdds/common/src/main/resources/ozone-default.xml
index d450b37..dd0aca3 100644
--- a/hadoop-hdds/common/src/main/resources/ozone-default.xml
+++ b/hadoop-hdds/common/src/main/resources/ozone-default.xml
@@ -1291,4 +1291,44 @@
       OzoneManager http server kerberos keytab.
     </description>
   </property>
+  <property>
+    <name>hdds.key.len</name>
+    <value>2048</value>
+    <tag>SCM, HDDS, X509, SECURITY</tag>
+    <description>
+      SCM CA key length.  This is an algorithm-specific metric, such as modulus length, specified in number of bits.
+    </description>
+  </property>
+  <property>
+    <name>hdds.key.dir.name</name>
+    <value>keys</value>
+    <tag>SCM, HDDS, X509, SECURITY</tag>
+    <description>
+      Directory to store public/private key for SCM CA. This is relative to ozone/hdds meteadata dir.
+    </description>
+  </property>
+  <property>
+    <name>hdds.metadata.dir</name>
+    <value/>
+    <tag>X509, SECURITY</tag>
+    <description>
+      Absolute path to HDDS metadata dir.
+    </description>
+  </property>
+  <property>
+    <name>hdds.priv.key.file.name</name>
+    <value>private.pem</value>
+    <tag>X509, SECURITY</tag>
+    <description>
+      Name of file which stores private key generated for SCM CA.
+    </description>
+  </property>
+  <property>
+    <name>hdds.public.key.file.name</name>
+    <value>public.pem</value>
+    <tag>X509, SECURITY</tag>
+    <description>
+      Name of file which stores public key generated for SCM CA.
+    </description>
+  </property>
 </configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f8f02f28/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/TestHDDSKeyGenerator.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/TestHDDSKeyGenerator.java b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/TestHDDSKeyGenerator.java
new file mode 100644
index 0000000..2ddf59c
--- /dev/null
+++ b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/TestHDDSKeyGenerator.java
@@ -0,0 +1,81 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.hdds.security.x509;
+
+import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_METADATA_DIRS;
+import java.security.KeyPair;
+import java.security.NoSuchAlgorithmException;
+import java.security.NoSuchProviderException;
+import java.security.PublicKey;
+import java.security.interfaces.RSAPublicKey;
+import java.security.spec.PKCS8EncodedKeySpec;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+/**
+ * Test class for HDDS Key Generator.
+ */
+public class TestHDDSKeyGenerator {
+  private static SecurityConfig config;
+
+  @Before
+  public void init() {
+    OzoneConfiguration conf = new OzoneConfiguration();
+    conf.set(OZONE_METADATA_DIRS,  GenericTestUtils.getTempPath("testpath"));
+    config = new SecurityConfig(conf);
+  }
+  /**
+   * In this test we verify that we are able to create a key pair, then get
+   * bytes of that and use ASN1. parser to parse it back to a private key.
+   * @throws NoSuchProviderException
+   * @throws NoSuchAlgorithmException
+   */
+  @Test
+  public void testGenerateKey()
+      throws NoSuchProviderException, NoSuchAlgorithmException {
+    HDDSKeyGenerator keyGen = new HDDSKeyGenerator(config.getConfiguration());
+    KeyPair keyPair = keyGen.generateKey();
+    Assert.assertEquals(config.getAlgo(), keyPair.getPrivate().getAlgorithm());
+    PKCS8EncodedKeySpec keySpec =
+        new PKCS8EncodedKeySpec(keyPair.getPrivate().getEncoded());
+    Assert.assertEquals("PKCS#8", keySpec.getFormat());
+  }
+
+  /**
+   * In this test we assert that size that we specified is used for Key
+   * generation.
+   * @throws NoSuchProviderException
+   * @throws NoSuchAlgorithmException
+   */
+  @Test
+  public void testGenerateKeyWithSize() throws NoSuchProviderException,
+      NoSuchAlgorithmException {
+    HDDSKeyGenerator keyGen = new HDDSKeyGenerator(config.getConfiguration());
+    KeyPair keyPair = keyGen.generateKey(4096);
+    PublicKey publicKey = keyPair.getPublic();
+    if(publicKey instanceof RSAPublicKey) {
+      Assert.assertEquals(4096,
+          ((RSAPublicKey)(publicKey)).getModulus().bitLength());
+    }
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f8f02f28/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/TestHDDSKeyPEMWriter.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/TestHDDSKeyPEMWriter.java b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/TestHDDSKeyPEMWriter.java
new file mode 100644
index 0000000..68ff9e6
--- /dev/null
+++ b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/TestHDDSKeyPEMWriter.java
@@ -0,0 +1,213 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.hdds.security.x509;
+
+import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_METADATA_DIR_NAME;
+
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.nio.file.attribute.PosixFilePermission;
+import java.security.KeyFactory;
+import java.security.KeyPair;
+import java.security.NoSuchAlgorithmException;
+import java.security.NoSuchProviderException;
+import java.security.PrivateKey;
+import java.security.PublicKey;
+import java.security.spec.InvalidKeySpecException;
+import java.security.spec.PKCS8EncodedKeySpec;
+import java.security.spec.X509EncodedKeySpec;
+import java.util.Set;
+import org.apache.commons.codec.binary.Base64;
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.test.LambdaTestUtils;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+
+/**
+ * Test class for HDDS pem writer.
+ */
+public class TestHDDSKeyPEMWriter {
+
+  @Rule
+  public TemporaryFolder temporaryFolder = new TemporaryFolder();
+  private OzoneConfiguration configuration;
+  private HDDSKeyGenerator keyGenerator;
+  private String prefix;
+
+  @Before
+  public void init() throws IOException {
+    configuration = new OzoneConfiguration();
+    prefix = temporaryFolder.newFolder().toString();
+    configuration.set(HDDS_METADATA_DIR_NAME, prefix);
+    keyGenerator = new HDDSKeyGenerator(configuration);
+  }
+
+  /**
+   * Assert basic things like we are able to create a file, and the names are
+   * in expected format etc.
+   *
+   * @throws NoSuchProviderException
+   * @throws NoSuchAlgorithmException
+   * @throws IOException
+   */
+  @Test
+  public void testWriteKey()
+      throws NoSuchProviderException, NoSuchAlgorithmException,
+      IOException, InvalidKeySpecException {
+    KeyPair keys = keyGenerator.generateKey();
+    HDDSKeyPEMWriter pemWriter = new HDDSKeyPEMWriter(configuration);
+    pemWriter.writeKey(keys);
+
+    // Assert that locations have been created.
+    Path keyLocation = pemWriter.getSecurityConfig().getKeyLocation();
+    Assert.assertTrue(keyLocation.toFile().exists());
+
+    // Assert that locations are created in the locations that we specified
+    // using the Config.
+    Assert.assertTrue(keyLocation.toString().startsWith(prefix));
+    Path privateKeyPath = Paths.get(keyLocation.toString(),
+        pemWriter.getSecurityConfig().getPrivateKeyName());
+    Assert.assertTrue(privateKeyPath.toFile().exists());
+    Path publicKeyPath = Paths.get(keyLocation.toString(),
+        pemWriter.getSecurityConfig().getPublicKeyName());
+    Assert.assertTrue(publicKeyPath.toFile().exists());
+
+    // Read the private key and test if the expected String in the PEM file
+    // format exists.
+    byte[] privateKey = Files.readAllBytes(privateKeyPath);
+    String privateKeydata = new String(privateKey, StandardCharsets.UTF_8);
+    Assert.assertTrue(privateKeydata.contains("PRIVATE KEY"));
+
+    // Read the public key and test if the expected String in the PEM file
+    // format exists.
+    byte[] publicKey = Files.readAllBytes(publicKeyPath);
+    String publicKeydata = new String(publicKey, StandardCharsets.UTF_8);
+    Assert.assertTrue(publicKeydata.contains("PUBLIC KEY"));
+
+    // Let us decode the PEM file and parse it back into binary.
+    KeyFactory kf = KeyFactory.getInstance(
+        pemWriter.getSecurityConfig().getAlgo());
+
+    // Replace the PEM Human readable guards.
+    privateKeydata =
+        privateKeydata.replace("-----BEGIN PRIVATE KEY-----\n", "");
+    privateKeydata =
+        privateKeydata.replace("-----END PRIVATE KEY-----", "");
+
+    // Decode the bas64 to binary format and then use an ASN.1 parser to
+    // parse the binary format.
+
+    byte[] keyBytes = Base64.decodeBase64(privateKeydata);
+    PKCS8EncodedKeySpec spec = new PKCS8EncodedKeySpec(keyBytes);
+    PrivateKey privateKeyDecoded = kf.generatePrivate(spec);
+    Assert.assertNotNull("Private Key should not be null",
+        privateKeyDecoded);
+
+    // Let us decode the public key and veriy that we can parse it back into
+    // binary.
+    publicKeydata =
+        publicKeydata.replace("-----BEGIN PUBLIC KEY-----\n", "");
+    publicKeydata =
+        publicKeydata.replace("-----END PUBLIC KEY-----", "");
+
+    keyBytes = Base64.decodeBase64(publicKeydata);
+    X509EncodedKeySpec pubKeyspec = new X509EncodedKeySpec(keyBytes);
+    PublicKey publicKeyDecoded = kf.generatePublic(pubKeyspec);
+    Assert.assertNotNull("Public Key should not be null",
+        publicKeyDecoded);
+
+    // Now let us assert the permissions on the Directories and files are as
+    // expected.
+    Set<PosixFilePermission> expectedSet = pemWriter.getPermissionSet();
+    Set<PosixFilePermission> currentSet =
+        Files.getPosixFilePermissions(privateKeyPath);
+    currentSet.removeAll(expectedSet);
+    Assert.assertEquals(0, currentSet.size());
+
+    currentSet =
+        Files.getPosixFilePermissions(publicKeyPath);
+    currentSet.removeAll(expectedSet);
+    Assert.assertEquals(0, currentSet.size());
+
+    currentSet =
+        Files.getPosixFilePermissions(keyLocation);
+    currentSet.removeAll(expectedSet);
+    Assert.assertEquals(0, currentSet.size());
+  }
+
+  /**
+   * Assert key rewrite fails without force option.
+   *
+   * @throws IOException
+   */
+  @Test
+  public void testReWriteKey()
+      throws Exception {
+    KeyPair kp = keyGenerator.generateKey();
+    HDDSKeyPEMWriter pemWriter = new HDDSKeyPEMWriter(configuration);
+    SecurityConfig secConfig = pemWriter.getSecurityConfig();
+    pemWriter.writeKey(kp);
+
+    // Assert that rewriting of keys throws exception with valid messages.
+    LambdaTestUtils
+        .intercept(IOException.class, "Private Key file already exists.",
+            () -> pemWriter.writeKey(kp));
+    FileUtils.deleteQuietly(Paths.get(
+        secConfig.getKeyLocation().toString() + "/" + secConfig
+            .getPrivateKeyName()).toFile());
+    LambdaTestUtils
+        .intercept(IOException.class, "Public Key file already exists.",
+            () -> pemWriter.writeKey(kp));
+    FileUtils.deleteQuietly(Paths.get(
+        secConfig.getKeyLocation().toString() + "/" + secConfig
+            .getPublicKeyName()).toFile());
+
+    // Should succeed now as both public and private key are deleted.
+    pemWriter.writeKey(kp);
+    // Should succeed with overwrite flag as true.
+    pemWriter.writeKey(kp, true);
+
+  }
+
+  /**
+   * Assert key rewrite fails in non Posix file system.
+   *
+   * @throws IOException
+   */
+  @Test
+  public void testWriteKeyInNonPosixFS()
+      throws Exception {
+    KeyPair kp = keyGenerator.generateKey();
+    HDDSKeyPEMWriter pemWriter = new HDDSKeyPEMWriter(configuration);
+    pemWriter.setIsPosixFileSystem(() -> false);
+
+    // Assert key rewrite fails in non Posix file system.
+    LambdaTestUtils
+        .intercept(IOException.class, "Unsupported File System for pem file.",
+            () -> pemWriter.writeKey(kp));
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f8f02f28/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestOzoneConfigurationFields.java
----------------------------------------------------------------------
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestOzoneConfigurationFields.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestOzoneConfigurationFields.java
index 909cddf..8cc9c57 100644
--- a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestOzoneConfigurationFields.java
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestOzoneConfigurationFields.java
@@ -36,5 +36,11 @@ public class TestOzoneConfigurationFields extends TestConfigurationFieldsBase {
     errorIfMissingConfigProps = true;
     errorIfMissingXmlProps = true;
     xmlPropsToSkipCompare.add("hadoop.tags.custom");
+    addPropertiesNotInXml();
+  }
+
+  private void addPropertiesNotInXml() {
+    configurationPropsToSkipCompare.add(HddsConfigKeys.HDDS_KEY_ALGORITHM);
+    configurationPropsToSkipCompare.add(HddsConfigKeys.HDDS_SECURITY_PROVIDER);
   }
 }
\ No newline at end of file


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org