You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ae...@apache.org on 2016/06/16 18:33:10 UTC

[46/49] hadoop git commit: HADOOP-12875. [Azure Data Lake] Support for contract test and unit test cases. Contributed by Vishwajeet Dusane.

HADOOP-12875. [Azure Data Lake] Support for contract test and unit test cases. Contributed by Vishwajeet Dusane.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/c9e71382
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/c9e71382
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/c9e71382

Branch: refs/heads/HDFS-7240
Commit: c9e71382a58b6ffcb3fccb79d3c146877f1c8313
Parents: e14ee0d
Author: Chris Nauroth <cn...@apache.org>
Authored: Thu Jun 16 09:46:05 2016 -0700
Committer: Chris Nauroth <cn...@apache.org>
Committed: Thu Jun 16 09:46:05 2016 -0700

----------------------------------------------------------------------
 .../web/PrivateAzureDataLakeFileSystem.java     |   2 +-
 .../hadoop/fs/adl/TestADLResponseData.java      | 120 +++++++
 .../apache/hadoop/fs/adl/TestGetFileStatus.java |  65 ++++
 .../apache/hadoop/fs/adl/TestListStatus.java    |  95 ++++++
 .../hadoop/fs/adl/TestableAdlFileSystem.java    |  31 ++
 .../fs/adl/live/AdlStorageConfiguration.java    |  76 +++++
 .../hadoop/fs/adl/live/AdlStorageContract.java  |  65 ++++
 .../fs/adl/live/TestAdlContractAppendLive.java  |  53 +++
 .../fs/adl/live/TestAdlContractConcatLive.java  |  52 +++
 .../fs/adl/live/TestAdlContractCreateLive.java  |  52 +++
 .../fs/adl/live/TestAdlContractDeleteLive.java  |  44 +++
 .../fs/adl/live/TestAdlContractMkdirLive.java   |  55 +++
 .../fs/adl/live/TestAdlContractOpenLive.java    |  44 +++
 .../fs/adl/live/TestAdlContractRenameLive.java  |  63 ++++
 .../fs/adl/live/TestAdlContractRootDirLive.java |  52 +++
 .../fs/adl/live/TestAdlContractSeekLive.java    |  44 +++
 .../live/TestAdlDifferentSizeWritesLive.java    | 102 ++++++
 .../adl/live/TestAdlFileSystemContractLive.java | 111 ++++++
 .../hadoop/fs/adl/live/TestAdlReadLive.java     | 342 +++++++++++++++++++
 ...estAdlWebHdfsFileContextCreateMkdirLive.java |  79 +++++
 ...AdlWebHdfsFileContextMainOperationsLive.java | 104 ++++++
 ...hedRefreshTokenBasedAccessTokenProvider.java |   8 +-
 .../hadoop/fs/common/AdlMockWebServer.java      | 116 +++++++
 .../hadoop/fs/common/ExpectedResponse.java      |  72 ++++
 .../hadoop/fs/common/TestDataForRead.java       | 120 +++++++
 .../org/apache/hadoop/hdfs/web/TestAdlRead.java | 205 +++++++++++
 .../web/TestConcurrentDataReadOperations.java   | 306 +++++++++++++++++
 .../hdfs/web/TestConfigurationSetting.java      | 112 ++++++
 .../hdfs/web/TestSplitSizeCalculation.java      | 123 +++++++
 .../src/test/resources/adls.xml                 | 139 ++++++++
 .../test/resources/contract-test-options.xml    |  57 ++++
 31 files changed, 2905 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/c9e71382/hadoop-tools/hadoop-azure-datalake/src/main/java/org/apache/hadoop/hdfs/web/PrivateAzureDataLakeFileSystem.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure-datalake/src/main/java/org/apache/hadoop/hdfs/web/PrivateAzureDataLakeFileSystem.java b/hadoop-tools/hadoop-azure-datalake/src/main/java/org/apache/hadoop/hdfs/web/PrivateAzureDataLakeFileSystem.java
index 89011d2..c4a19d5 100644
--- a/hadoop-tools/hadoop-azure-datalake/src/main/java/org/apache/hadoop/hdfs/web/PrivateAzureDataLakeFileSystem.java
+++ b/hadoop-tools/hadoop-azure-datalake/src/main/java/org/apache/hadoop/hdfs/web/PrivateAzureDataLakeFileSystem.java
@@ -922,7 +922,7 @@ public class PrivateAzureDataLakeFileSystem extends SWebHdfsFileSystem {
         size = maxBufferSize;
       }
 
-      int equalBufferSplit = Math.max(Math.round(size / SIZE4MB), 1);
+      int equalBufferSplit = Math.max(size / SIZE4MB, 1);
       int splitSize = Math.min(equalBufferSplit, maxConcurrentConnection);
       return splitSize;
     }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/c9e71382/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestADLResponseData.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestADLResponseData.java b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestADLResponseData.java
new file mode 100644
index 0000000..1c4fcab
--- /dev/null
+++ b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestADLResponseData.java
@@ -0,0 +1,120 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.fs.adl;
+
+import org.apache.hadoop.fs.FileStatus;
+
+import java.util.Random;
+
+/**
+ * This class is responsible to provide generic test methods for mock up test
+ * to generate stub response for a network request.
+ */
+public final class TestADLResponseData {
+
+  private TestADLResponseData() {}
+
+  public static String getGetFileStatusJSONResponse(FileStatus status) {
+    String str = "{\"FileStatus\":{\"length\":" + status.getLen() + "," +
+        "\"pathSuffix\":\"\",\"type\":\"" + (status.isDirectory() ?
+        "DIRECTORY" :
+        "FILE") + "\"" +
+        ",\"blockSize\":" + status.getBlockSize() + ",\"accessTime\":" +
+        status.getAccessTime() + ",\"modificationTime\":" + status
+        .getModificationTime() + "" +
+        ",\"replication\":" + status.getReplication() + ",\"permission\":\""
+        + status.getPermission() + "\",\"owner\":\"" + status.getOwner()
+        + "\",\"group\":\"" + status.getGroup() + "\"}}";
+
+    return str;
+  }
+
+  public static String getGetFileStatusJSONResponse() {
+    return getGetFileStatusJSONResponse(4194304);
+  }
+
+  public static String getGetFileStatusJSONResponse(long length) {
+    String str = "{\"FileStatus\":{\"length\":" + length + "," +
+        "\"pathSuffix\":\"\",\"type\":\"FILE\",\"blockSize\":268435456," +
+        "\"accessTime\":1452103827023,\"modificationTime\":1452103827023," +
+        "\"replication\":0,\"permission\":\"777\"," +
+        "\"owner\":\"NotSupportYet\",\"group\":\"NotSupportYet\"}}";
+    return str;
+  }
+
+  public static String getListFileStatusJSONResponse(int dirSize) {
+    String list = "";
+    for (int i = 0; i < dirSize; ++i) {
+      list += "{\"length\":1024,\"pathSuffix\":\"" + java.util.UUID.randomUUID()
+          + "\",\"type\":\"FILE\",\"blockSize\":268435456," +
+          "\"accessTime\":1452103878833," +
+          "\"modificationTime\":1452103879190,\"replication\":0," +
+          "\"permission\":\"777\",\"owner\":\"NotSupportYet\"," +
+          "\"group\":\"NotSupportYet\"},";
+    }
+
+    list = list.substring(0, list.length() - 1);
+    String str = "{\"FileStatuses\":{\"FileStatus\":[" + list + "]}}";
+
+    return str;
+  }
+
+  public static String getJSONResponse(boolean status) {
+    String str = "{\"boolean\":" + status + "}";
+    return str;
+  }
+
+  public static String getErrorIllegalArgumentExceptionJSONResponse() {
+    String str = "{\n" +
+        "  \"RemoteException\":\n" +
+        "  {\n" +
+        "    \"exception\"    : \"IllegalArgumentException\",\n" +
+        "    \"javaClassName\": \"java.lang.IllegalArgumentException\",\n" +
+        "    \"message\"      : \"Bad Offset 0x83090015\"" +
+        "  }\n" +
+        "}";
+
+    return str;
+  }
+
+  public static String getErrorInternalServerExceptionJSONResponse() {
+    String str = "{\n" +
+        "  \"RemoteException\":\n" +
+        "  {\n" +
+        "    \"exception\"    : \"RumtimeException\",\n" +
+        "    \"javaClassName\": \"java.lang.RumtimeException\",\n" +
+        "    \"message\"      : \"Internal Server Error\"" +
+        "  }\n" +
+        "}";
+
+    return str;
+  }
+
+  public static byte[] getRandomByteArrayData() {
+    return getRandomByteArrayData(4 * 1024 * 1024);
+  }
+
+  public static byte[] getRandomByteArrayData(int size) {
+    byte[] b = new byte[size];
+    Random rand = new Random();
+    rand.nextBytes(b);
+    return b;
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/c9e71382/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestGetFileStatus.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestGetFileStatus.java b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestGetFileStatus.java
new file mode 100644
index 0000000..4268b27
--- /dev/null
+++ b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestGetFileStatus.java
@@ -0,0 +1,65 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.fs.adl;
+
+import com.squareup.okhttp.mockwebserver.MockResponse;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.common.AdlMockWebServer;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.util.Time;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.net.URISyntaxException;
+
+/**
+ * This class is responsible for testing local getFileStatus implementation
+ * to cover correct parsing of successful and error JSON response
+ * from the server.
+ * Adls GetFileStatus operation is in detail covered in
+ * org.apache.hadoop.fs.adl.live testing package.
+ */
+public class TestGetFileStatus extends AdlMockWebServer {
+
+  @Test
+  public void getFileStatusReturnsAsExpected()
+      throws URISyntaxException, IOException {
+    getMockServer().enqueue(new MockResponse().setResponseCode(200)
+        .setBody(TestADLResponseData.getGetFileStatusJSONResponse()));
+    long startTime = Time.monotonicNow();
+    FileStatus fileStatus = getMockAdlFileSystem().getFileStatus(
+        new Path("/test1/test2"));
+    long endTime = Time.monotonicNow();
+    System.out.println("Time : " + (endTime - startTime));
+    Assert.assertTrue(fileStatus.isFile());
+    Assert.assertEquals(fileStatus.getPath().toString(),
+        "adl://" + getMockServer().getHostName() + ":"
+            + getMockServer().getPort()
+            + "/test1/test2");
+    Assert.assertEquals(fileStatus.getLen(), 4194304);
+    Assert.assertEquals(fileStatus.getBlockSize(), 268435456);
+    Assert.assertEquals(fileStatus.getReplication(), 0);
+    Assert.assertEquals(fileStatus.getPermission(), new FsPermission("777"));
+    Assert.assertEquals(fileStatus.getOwner(), "NotSupportYet");
+    Assert.assertEquals(fileStatus.getGroup(), "NotSupportYet");
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/c9e71382/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestListStatus.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestListStatus.java b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestListStatus.java
new file mode 100644
index 0000000..82c2494
--- /dev/null
+++ b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestListStatus.java
@@ -0,0 +1,95 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.fs.adl;
+
+import com.squareup.okhttp.mockwebserver.MockResponse;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.common.AdlMockWebServer;
+import org.apache.hadoop.util.Time;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.IOException;
+
+/**
+ * This class is responsible for testing local listStatus implementation to
+ * cover correct parsing of successful and error JSON response from the server.
+ * Adls ListStatus functionality is in detail covered in
+ * org.apache.hadoop.fs.adl.live testing package.
+ */
+public class TestListStatus extends AdlMockWebServer {
+
+  @Test
+  public void listStatusReturnsAsExpected() throws IOException {
+    getMockServer().enqueue(new MockResponse().setResponseCode(200)
+        .setBody(TestADLResponseData.getListFileStatusJSONResponse(10)));
+    long startTime = Time.monotonicNow();
+    FileStatus[] ls = getMockAdlFileSystem().listStatus(
+        new Path("/test1/test2"));
+    long endTime = Time.monotonicNow();
+    System.out.println("Time : " + (endTime - startTime));
+    Assert.assertEquals(ls.length, 10);
+
+    getMockServer().enqueue(new MockResponse().setResponseCode(200)
+        .setBody(TestADLResponseData.getListFileStatusJSONResponse(200)));
+    startTime = Time.monotonicNow();
+    ls = getMockAdlFileSystem().listStatus(new Path("/test1/test2"));
+    endTime = Time.monotonicNow();
+    System.out.println("Time : " + (endTime - startTime));
+    Assert.assertEquals(ls.length, 200);
+
+    getMockServer().enqueue(new MockResponse().setResponseCode(200)
+        .setBody(TestADLResponseData.getListFileStatusJSONResponse(2048)));
+    startTime = Time.monotonicNow();
+    ls = getMockAdlFileSystem().listStatus(new Path("/test1/test2"));
+    endTime = Time.monotonicNow();
+    System.out.println("Time : " + (endTime - startTime));
+    Assert.assertEquals(ls.length, 2048);
+  }
+
+  @Test
+  public void listStatusonFailure() throws IOException {
+    getMockServer().enqueue(new MockResponse().setResponseCode(403).setBody(
+        TestADLResponseData.getErrorIllegalArgumentExceptionJSONResponse()));
+    FileStatus[] ls = null;
+    long startTime = Time.monotonicNow();
+    try {
+      ls = getMockAdlFileSystem().listStatus(new Path("/test1/test2"));
+    } catch (IOException e) {
+      Assert.assertTrue(e.getMessage().contains("Bad Offset 0x83090015"));
+    }
+    long endTime = Time.monotonicNow();
+    System.out.println("Time : " + (endTime - startTime));
+
+    getMockServer().enqueue(new MockResponse().setResponseCode(500)
+        .setBody(
+            TestADLResponseData.getErrorInternalServerExceptionJSONResponse()));
+    startTime = Time.monotonicNow();
+    try {
+      ls = getMockAdlFileSystem().listStatus(new Path("/test1/test2"));
+    } catch (IOException e) {
+      Assert.assertTrue(e.getMessage().contains("Internal Server Error"));
+    }
+    endTime = Time.monotonicNow();
+    System.out.println("Time : " + (endTime - startTime));
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/c9e71382/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestableAdlFileSystem.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestableAdlFileSystem.java b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestableAdlFileSystem.java
new file mode 100644
index 0000000..1e03bbf
--- /dev/null
+++ b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestableAdlFileSystem.java
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.fs.adl;
+
+/**
+ * This class overrides AdlFileSystem to change transport scheme to http instead
+ * of https to run against Mock Server.
+ */
+public class TestableAdlFileSystem extends AdlFileSystem {
+  @Override
+  protected String getTransportScheme() {
+    return "http";
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/c9e71382/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/AdlStorageConfiguration.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/AdlStorageConfiguration.java b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/AdlStorageConfiguration.java
new file mode 100644
index 0000000..d257768
--- /dev/null
+++ b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/AdlStorageConfiguration.java
@@ -0,0 +1,76 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.fs.adl.live;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.adl.AdlFileSystem;
+
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+
+/**
+ * Utility class to configure real Adls storage to run Live test suite against.
+ */
+public final class AdlStorageConfiguration {
+  private AdlStorageConfiguration() {}
+
+  private static final String CONTRACT_ENABLE_KEY =
+      "dfs.adl.test.contract" + ".enable";
+  private static final String TEST_CONFIGURATION_FILE_NAME =
+      "contract-test-options.xml";
+  private static final String TEST_SUPPORTED_TEST_CONFIGURATION_FILE_NAME =
+      "adls.xml";
+
+  private static boolean isContractTestEnabled = false;
+  private static Configuration conf = null;
+
+  public static Configuration getConfiguration() {
+    Configuration localConf = new Configuration();
+    localConf.addResource(TEST_CONFIGURATION_FILE_NAME);
+    localConf.addResource(TEST_SUPPORTED_TEST_CONFIGURATION_FILE_NAME);
+    return localConf;
+  }
+
+  public static boolean isContractTestEnabled() {
+    if (conf == null) {
+      conf = getConfiguration();
+    }
+
+    isContractTestEnabled = conf.getBoolean(CONTRACT_ENABLE_KEY, false);
+    return isContractTestEnabled;
+  }
+
+  public static FileSystem createAdlStorageConnector()
+      throws URISyntaxException, IOException {
+    if (conf == null) {
+      conf = getConfiguration();
+    }
+
+    if(!isContractTestEnabled()) {
+      return null;
+    }
+
+    AdlFileSystem fileSystem = new AdlFileSystem();
+    fileSystem.initialize(new URI(conf.get("fs.defaultFS")), conf);
+    return fileSystem;
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/c9e71382/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/AdlStorageContract.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/AdlStorageContract.java b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/AdlStorageContract.java
new file mode 100644
index 0000000..1e40199
--- /dev/null
+++ b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/AdlStorageContract.java
@@ -0,0 +1,65 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.fs.adl.live;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.contract.AbstractFSContract;
+
+import java.io.IOException;
+import java.net.URISyntaxException;
+
+/**
+ * Extension of AbstractFSContract representing a filesystem contract that
+ * a Adls filesystem implementation is expected implement.
+ */
+public class AdlStorageContract extends AbstractFSContract {
+  private FileSystem fs;
+
+  protected AdlStorageContract(Configuration conf) {
+    super(conf);
+    try {
+      fs = AdlStorageConfiguration.createAdlStorageConnector();
+    } catch (URISyntaxException e) {
+      throw new IllegalStateException("Can not initialize ADL FileSystem. "
+          + "Please check fs.defaultFS property.", e);
+    } catch (IOException e) {
+      throw new IllegalStateException("Can not initialize ADL FileSystem.", e);
+    }
+    this.setConf(AdlStorageConfiguration.getConfiguration());
+  }
+
+  @Override
+  public String getScheme() {
+    return "adl";
+  }
+
+  @Override
+  public FileSystem getTestFileSystem() throws IOException {
+    return this.fs;
+  }
+
+  @Override
+  public Path getTestPath() {
+    Path path = new Path("/test");
+    return path;
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/c9e71382/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractAppendLive.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractAppendLive.java b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractAppendLive.java
new file mode 100644
index 0000000..83390af
--- /dev/null
+++ b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractAppendLive.java
@@ -0,0 +1,53 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.fs.adl.live;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.contract.AbstractContractAppendTest;
+import org.apache.hadoop.fs.contract.AbstractFSContract;
+import org.apache.hadoop.fs.contract.ContractTestUtils;
+import org.junit.Before;
+import org.junit.Test;
+
+/**
+ * Verify Adls APPEND semantics compliance with Hadoop.
+ */
+public class TestAdlContractAppendLive extends AbstractContractAppendTest {
+
+  @Override
+  protected AbstractFSContract createContract(Configuration configuration) {
+    return new AdlStorageContract(configuration);
+  }
+
+  @Override
+  @Test
+  public void testRenameFileBeingAppended() throws Throwable {
+    ContractTestUtils.unsupported("Skipping since renaming file in append "
+        + "mode not supported in Adl");
+  }
+
+  @Before
+  @Override
+  public void setup() throws Exception {
+    org.junit.Assume
+        .assumeTrue(AdlStorageConfiguration.isContractTestEnabled());
+    super.setup();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/c9e71382/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractConcatLive.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractConcatLive.java b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractConcatLive.java
new file mode 100644
index 0000000..8474e9c
--- /dev/null
+++ b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractConcatLive.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.fs.adl.live;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.contract.AbstractContractConcatTest;
+import org.apache.hadoop.fs.contract.AbstractFSContract;
+import org.apache.hadoop.fs.contract.ContractTestUtils;
+import org.junit.Before;
+import org.junit.Test;
+
+/**
+ * Verify Adls CONCAT semantics compliance with Hadoop.
+ */
+public class TestAdlContractConcatLive extends AbstractContractConcatTest {
+
+  @Override
+  protected AbstractFSContract createContract(Configuration configuration) {
+    return new AdlStorageContract(configuration);
+  }
+
+  @Before
+  @Override
+  public void setup() throws Exception {
+    org.junit.Assume
+        .assumeTrue(AdlStorageConfiguration.isContractTestEnabled());
+    super.setup();
+  }
+
+  @Test
+  public void testConcatMissingTarget() throws Throwable {
+    ContractTestUtils.unsupported("BUG : Adl to support expectation from "
+        + "concat on missing targets.");
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/c9e71382/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractCreateLive.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractCreateLive.java b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractCreateLive.java
new file mode 100644
index 0000000..907c50c
--- /dev/null
+++ b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractCreateLive.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.fs.adl.live;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.contract.AbstractContractCreateTest;
+import org.apache.hadoop.fs.contract.AbstractFSContract;
+import org.apache.hadoop.fs.contract.ContractTestUtils;
+import org.junit.Before;
+import org.junit.Test;
+
+/**
+ * Verify Adls CREATE semantics compliance with Hadoop.
+ */
+public class TestAdlContractCreateLive extends AbstractContractCreateTest {
+
+  @Override
+  protected AbstractFSContract createContract(Configuration configuration) {
+    return new AdlStorageContract(configuration);
+  }
+
+  @Before
+  @Override
+  public void setup() throws Exception {
+    org.junit.Assume
+        .assumeTrue(AdlStorageConfiguration.isContractTestEnabled());
+    super.setup();
+  }
+
+  @Test
+  public void testOverwriteEmptyDirectory() throws Throwable {
+    ContractTestUtils
+        .unsupported("BUG : Adl to support override empty " + "directory.");
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/c9e71382/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractDeleteLive.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractDeleteLive.java b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractDeleteLive.java
new file mode 100644
index 0000000..30eaec7
--- /dev/null
+++ b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractDeleteLive.java
@@ -0,0 +1,44 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.fs.adl.live;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.contract.AbstractContractDeleteTest;
+import org.apache.hadoop.fs.contract.AbstractFSContract;
+import org.junit.Before;
+
+/**
+ * Verify Adls DELETE semantics compliance with Hadoop.
+ */
+public class TestAdlContractDeleteLive extends AbstractContractDeleteTest {
+
+  @Override
+  protected AbstractFSContract createContract(Configuration configuration) {
+    return new AdlStorageContract(configuration);
+  }
+
+  @Before
+  @Override
+  public void setup() throws Exception {
+    org.junit.Assume
+        .assumeTrue(AdlStorageConfiguration.isContractTestEnabled());
+    super.setup();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/c9e71382/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractMkdirLive.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractMkdirLive.java b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractMkdirLive.java
new file mode 100644
index 0000000..e498110
--- /dev/null
+++ b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractMkdirLive.java
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.fs.adl.live;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.contract.AbstractContractMkdirTest;
+import org.apache.hadoop.fs.contract.AbstractFSContract;
+import org.apache.hadoop.fs.contract.ContractTestUtils;
+import org.junit.Before;
+import org.junit.Test;
+
+/**
+ * Verify Adls MKDIR semantics compliance with Hadoop.
+ */
+public class TestAdlContractMkdirLive extends AbstractContractMkdirTest {
+  @Override
+  protected AbstractFSContract createContract(Configuration conf) {
+    return new AdlStorageContract(conf);
+  }
+
+  @Before
+  @Override
+  public void setup() throws Exception {
+    org.junit.Assume
+        .assumeTrue(AdlStorageConfiguration.isContractTestEnabled());
+    super.setup();
+  }
+
+  @Test
+  public void testMkdirOverParentFile() throws Throwable {
+    ContractTestUtils.unsupported("Not supported by Adl");
+  }
+
+  @Test
+  public void testNoMkdirOverFile() throws Throwable {
+    ContractTestUtils.unsupported("Not supported by Adl");
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/c9e71382/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractOpenLive.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractOpenLive.java b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractOpenLive.java
new file mode 100644
index 0000000..2bb2095
--- /dev/null
+++ b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractOpenLive.java
@@ -0,0 +1,44 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.fs.adl.live;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.contract.AbstractContractOpenTest;
+import org.apache.hadoop.fs.contract.AbstractFSContract;
+import org.junit.Before;
+
+/**
+ * Verify Adls OPEN/READ semantics compliance with Hadoop.
+ */
+public class TestAdlContractOpenLive extends AbstractContractOpenTest {
+
+  @Override
+  protected AbstractFSContract createContract(Configuration configuration) {
+    return new AdlStorageContract(configuration);
+  }
+
+  @Before
+  @Override
+  public void setup() throws Exception {
+    org.junit.Assume
+        .assumeTrue(AdlStorageConfiguration.isContractTestEnabled());
+    super.setup();
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/c9e71382/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractRenameLive.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractRenameLive.java b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractRenameLive.java
new file mode 100644
index 0000000..06063c5
--- /dev/null
+++ b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractRenameLive.java
@@ -0,0 +1,63 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.fs.adl.live;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.contract.AbstractContractRenameTest;
+import org.apache.hadoop.fs.contract.AbstractFSContract;
+import org.apache.hadoop.fs.contract.ContractTestUtils;
+import org.junit.Before;
+import org.junit.Test;
+
+/**
+ * Verify Adls RENAME semantics compliance with Hadoop.
+ */
+public class TestAdlContractRenameLive extends AbstractContractRenameTest {
+
+  @Override
+  protected AbstractFSContract createContract(Configuration configuration) {
+    return new AdlStorageContract(configuration);
+  }
+
+  @Before
+  @Override
+  public void setup() throws Exception {
+    org.junit.Assume
+        .assumeTrue(AdlStorageConfiguration.isContractTestEnabled());
+    super.setup();
+  }
+
+  @Test
+  public void testRenameFileOverExistingFile() throws Throwable {
+    ContractTestUtils
+        .unsupported("BUG : Adl to support full complete POSIX" + "behaviour");
+  }
+
+  @Test
+  public void testRenameFileNonexistentDir() throws Throwable {
+    ContractTestUtils
+        .unsupported("BUG : Adl to support create dir is not " + "exist");
+  }
+
+  @Test
+  public void testRenameWithNonEmptySubDir() throws Throwable {
+    ContractTestUtils.unsupported("BUG : Adl to support non empty dir move.");
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/c9e71382/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractRootDirLive.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractRootDirLive.java b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractRootDirLive.java
new file mode 100644
index 0000000..bf4e549
--- /dev/null
+++ b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractRootDirLive.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.fs.adl.live;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.contract.AbstractContractRootDirectoryTest;
+import org.apache.hadoop.fs.contract.AbstractFSContract;
+import org.apache.hadoop.fs.contract.ContractTestUtils;
+import org.junit.Before;
+import org.junit.Test;
+
+/**
+ * Verify Adls root level operation support.
+ */
+public class TestAdlContractRootDirLive
+    extends AbstractContractRootDirectoryTest {
+  @Override
+  protected AbstractFSContract createContract(Configuration configuration) {
+    return new AdlStorageContract(configuration);
+  }
+
+  @Before
+  @Override
+  public void setup() throws Exception {
+    org.junit.Assume
+        .assumeTrue(AdlStorageConfiguration.isContractTestEnabled());
+    super.setup();
+  }
+
+  @Test
+  public void testRmNonEmptyRootDirNonRecursive() throws Throwable {
+    ContractTestUtils.unsupported(
+        "BUG : Adl should throw exception instred " + "of returning false.");
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/c9e71382/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractSeekLive.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractSeekLive.java b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractSeekLive.java
new file mode 100644
index 0000000..0976464
--- /dev/null
+++ b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractSeekLive.java
@@ -0,0 +1,44 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.fs.adl.live;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.contract.AbstractContractSeekTest;
+import org.apache.hadoop.fs.contract.AbstractFSContract;
+import org.junit.Before;
+
+/**
+ * Verify Adls OPEN/READ seek operation support.
+ */
+public class TestAdlContractSeekLive extends AbstractContractSeekTest {
+
+  @Override
+  protected AbstractFSContract createContract(Configuration configuration) {
+    return new AdlStorageContract(configuration);
+  }
+
+  @Before
+  @Override
+  public void setup() throws Exception {
+    org.junit.Assume
+        .assumeTrue(AdlStorageConfiguration.isContractTestEnabled());
+    super.setup();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/c9e71382/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlDifferentSizeWritesLive.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlDifferentSizeWritesLive.java b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlDifferentSizeWritesLive.java
new file mode 100644
index 0000000..b712995
--- /dev/null
+++ b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlDifferentSizeWritesLive.java
@@ -0,0 +1,102 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.fs.adl.live;
+
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.net.URISyntaxException;
+import java.util.Random;
+
+/**
+ * Verify different data segment size writes ensure the integrity and
+ * order of the data.
+ */
+public class TestAdlDifferentSizeWritesLive {
+
+  public static byte[] getRandomByteArrayData(int size) {
+    byte[] b = new byte[size];
+    Random rand = new Random();
+    rand.nextBytes(b);
+    return b;
+  }
+
+  @Before
+  public void setup() throws Exception {
+    org.junit.Assume
+        .assumeTrue(AdlStorageConfiguration.isContractTestEnabled());
+  }
+
+  @Test
+  public void testSmallDataWrites() throws IOException {
+    testDataIntegrity(4 * 1024 * 1024, 1 * 1024);
+    testDataIntegrity(4 * 1024 * 1024, 7 * 1024);
+    testDataIntegrity(4 * 1024 * 1024, 10);
+    testDataIntegrity(2 * 1024 * 1024, 10);
+    testDataIntegrity(1 * 1024 * 1024, 10);
+    testDataIntegrity(100, 1);
+  }
+
+  @Test
+  public void testMediumDataWrites() throws IOException {
+    testDataIntegrity(4 * 1024 * 1024, 1 * 1024 * 1024);
+    testDataIntegrity(7 * 1024 * 1024, 2 * 1024 * 1024);
+    testDataIntegrity(9 * 1024 * 1024, 2 * 1024 * 1024);
+    testDataIntegrity(10 * 1024 * 1024, 3 * 1024 * 1024);
+  }
+
+  private void testDataIntegrity(int totalSize, int chunkSize)
+      throws IOException {
+    Path path = new Path("/test/dataIntegrityCheck");
+    FileSystem fs = null;
+    try {
+      fs = AdlStorageConfiguration.createAdlStorageConnector();
+    } catch (URISyntaxException e) {
+      throw new IllegalStateException("Can not initialize ADL FileSystem. "
+          + "Please check fs.defaultFS property.", e);
+    }
+    byte[] expectedData = getRandomByteArrayData(totalSize);
+
+    FSDataOutputStream out = fs.create(path, true);
+    int iteration = totalSize / chunkSize;
+    int reminderIteration = totalSize % chunkSize;
+    int offset = 0;
+    for (int i = 0; i < iteration; ++i) {
+      out.write(expectedData, offset, chunkSize);
+      offset += chunkSize;
+    }
+
+    out.write(expectedData, offset, reminderIteration);
+    out.close();
+
+    byte[] actualData = new byte[totalSize];
+    FSDataInputStream in = fs.open(path);
+    in.readFully(0, actualData);
+    in.close();
+    Assert.assertArrayEquals(expectedData, actualData);
+    Assert.assertTrue(fs.delete(path, true));
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/c9e71382/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlFileSystemContractLive.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlFileSystemContractLive.java b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlFileSystemContractLive.java
new file mode 100644
index 0000000..e007c4e
--- /dev/null
+++ b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlFileSystemContractLive.java
@@ -0,0 +1,111 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.fs.adl.live;
+
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FileSystemContractBaseTest;
+import org.apache.hadoop.fs.Path;
+import org.junit.Test;
+
+import java.io.IOException;
+
+/**
+ * Verify Adls adhere to Hadoop file system semantics.
+ */
+public class TestAdlFileSystemContractLive extends FileSystemContractBaseTest {
+  private FileSystem adlStore;
+
+  @Override
+  protected void setUp() throws Exception {
+    adlStore = AdlStorageConfiguration.createAdlStorageConnector();
+    if (AdlStorageConfiguration.isContractTestEnabled()) {
+      fs = adlStore;
+    }
+  }
+
+  @Override
+  protected void tearDown() throws Exception {
+    if (AdlStorageConfiguration.isContractTestEnabled()) {
+      cleanup();
+      adlStore = null;
+      fs = null;
+    }
+  }
+
+  private void cleanup() throws IOException {
+    adlStore.delete(new Path("/test"), true);
+  }
+
+  @Override
+  protected void runTest() throws Throwable {
+    if (AdlStorageConfiguration.isContractTestEnabled()) {
+      super.runTest();
+    }
+  }
+
+  public void testGetFileStatus() throws IOException {
+    if (!AdlStorageConfiguration.isContractTestEnabled()) {
+      return;
+    }
+
+    Path testPath = new Path("/test/adltest");
+    if (adlStore.exists(testPath)) {
+      adlStore.delete(testPath, false);
+    }
+
+    adlStore.create(testPath).close();
+    assertTrue(adlStore.delete(testPath, false));
+  }
+
+  /**
+   * The following tests are failing on Azure Data Lake and the Azure Data Lake
+   * file system code needs to be modified to make them pass.
+   * A separate work item has been opened for this.
+   */
+  @Test
+  @Override
+  public void testMkdirsFailsForSubdirectoryOfExistingFile() throws Exception {
+    // BUG : Adl should return exception instead of false.
+  }
+
+  @Test
+  @Override
+  public void testMkdirsWithUmask() throws Exception {
+    // Support under implementation in Adl
+  }
+
+  @Test
+  @Override
+  public void testMoveFileUnderParent() throws Exception {
+    // BUG: Adl server should return expected status code.
+  }
+
+  @Test
+  @Override
+  public void testRenameFileToSelf() throws Exception {
+    // BUG: Adl server should return expected status code.
+  }
+
+  @Test
+  @Override
+  public void testRenameToDirWithSamePrefixAllowed() throws Exception {
+    // BUG: Adl server should return expected status code.
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/c9e71382/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlReadLive.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlReadLive.java b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlReadLive.java
new file mode 100644
index 0000000..f1d909c
--- /dev/null
+++ b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlReadLive.java
@@ -0,0 +1,342 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.fs.adl.live;
+
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.net.URISyntaxException;
+import java.util.Random;
+import java.util.UUID;
+
+/**
+ * Verify different data segment size read from the file to ensure the
+ * integrity and order of the data over
+ * BufferManger and BatchByteArrayInputStream implementation.
+ */
+public class TestAdlReadLive {
+  private String expectedData = "1234567890abcdefghijklmnopqrstuvwxyz";
+
+  @Before
+  public void setup() throws Exception {
+    org.junit.Assume
+        .assumeTrue(AdlStorageConfiguration.isContractTestEnabled());
+  }
+
+  private FileSystem getFileSystem() throws IOException, URISyntaxException {
+    return AdlStorageConfiguration.createAdlStorageConnector();
+  }
+
+  private void setupFile(Path path) throws IOException, URISyntaxException {
+    setupFile(path, expectedData);
+  }
+
+  private void setupFile(Path path, String data)
+      throws IOException, URISyntaxException {
+    expectedData = data;
+    FileSystem fs = getFileSystem();
+    fs.delete(path, true);
+    FSDataOutputStream fdis = fs.create(path);
+    fdis.writeBytes(expectedData);
+    fdis.close();
+    fs.listStatus(path.getParent());
+    long actualLen = fs.getFileStatus(path).getLen();
+    long expectedLen = expectedData.length();
+    System.out.println(
+        " Length of file : " + fs.getFileStatus(path).getLen() + " " + fs
+            .getUri());
+    Assert.assertEquals(expectedLen, actualLen);
+  }
+
+  @Test
+  public void
+      testOpenReadMoreThanAvailableBufferCrashFixIndexOutOfBoundsException()
+      throws Throwable {
+    Path path = new Path("/test1");
+    FileSystem fs = getFileSystem();
+    setupFile(path);
+
+    if (fs.exists(path)) {
+      Assert.assertTrue(fs.delete(path, true));
+    }
+
+    FSDataOutputStream outputStream = fs.create(path);
+    final byte[] data = new byte[24 * 1024 * 1024];
+    Random ran = new Random();
+    ran.nextBytes(data);
+    outputStream.write(data);
+
+    FSDataInputStream bb = fs.open(path);
+    byte[] expected = new byte[4 * 1024 * 1024];
+    bb.read();
+    bb.readFully(16711581, expected, 33,
+        65640); // BugFix : Was causing crash IndexOutOfBoundsException
+    bb.seek(16711581);
+    bb.readFully(16711576, expected, 33, 65640);
+    bb.readFully(16711578, expected, 33, 65640);
+    bb.readFully(16711580, expected, 33, 65640);
+    bb.readFully(16711576, expected, 0, expected.length);
+    bb.seek(0);
+    expected = new byte[134144];
+    while (bb.read() != -1){
+      continue;
+    }
+    bb.readFully(0, data, 0, data.length);
+  }
+
+  @Test
+  public void readNullData() throws IOException, URISyntaxException {
+    String data = "SPL   \u0001Lorg.apache.hadoop.examples.terasort"
+        + ".TeraGen$RangeInputFormat$RangeInputSplit \u008DLK@Lorg.apache"
+        + ".hadoop.examples.terasort"
+        + ".TeraGen$RangeInputFormat$RangeInputSplit\u008DLK@\u008DLK@";
+    Path path = new Path("/test4");
+    FileSystem fs = this.getFileSystem();
+    setupFile(path, data);
+    FSDataInputStream bb = fs.open(path);
+    int i = 0;
+    String actualData = new String();
+    System.out.println("Data Length :" + expectedData.length());
+    byte[] arr = new byte[data.length()];
+    bb.readFully(0, arr);
+    actualData = new String(arr);
+    System.out.println(" Data : " + actualData);
+    Assert.assertEquals(actualData.length(), expectedData.length());
+
+    arr = new byte[data.length() - 7];
+    bb.readFully(7, arr);
+    actualData = new String(arr);
+    Assert.assertEquals(actualData.length(), expectedData.length() - 7);
+    bb.close();
+  }
+
+  @Test
+  public void readTest() throws IOException, URISyntaxException {
+    Path path = new Path("/test4");
+    FileSystem fs = this.getFileSystem();
+    setupFile(path);
+    FSDataInputStream bb = fs.open(path);
+    int i = 0;
+    String actualData = new String();
+    while (true) {
+      int c = bb.read();
+      if (c < 0) {
+        break;
+      }
+      actualData += (char) c;
+    }
+
+    byte[] b = new byte[100];
+    System.out.println(bb.read(b, 9, 91));
+    System.out.println(bb.read());
+    System.out.println(bb.read());
+    System.out.println(bb.read());
+    System.out.println(bb.read());
+    System.out.println(bb.read());
+    System.out.println(bb.read());
+
+    bb.close();
+    Assert.assertEquals(actualData, expectedData);
+
+    for (int j = 0; j < 100; ++j) {
+      fs = this.getFileSystem();
+      fs.exists(new Path("/test" + j));
+    }
+  }
+
+  @Test
+  public void readByteTest() throws IOException, URISyntaxException {
+    Path path = new Path("/test3");
+    FileSystem fs = this.getFileSystem();
+    setupFile(path);
+    FSDataInputStream bb = fs.open(path);
+    int i = 0;
+    byte[] data = new byte[expectedData.length()];
+    int readByte = bb.read(data);
+    bb.close();
+    Assert.assertEquals(readByte, expectedData.length());
+    Assert.assertEquals(new String(data), expectedData);
+  }
+
+  @Test
+  public void readByteFullyTest() throws IOException, URISyntaxException {
+    Path path = new Path("/test2");
+    FileSystem fs = this.getFileSystem();
+    setupFile(path);
+    FSDataInputStream bb = fs.open(path);
+    int i = 0;
+    byte[] data = new byte[expectedData.length()];
+    bb.readFully(data);
+    bb.close();
+    Assert.assertEquals(new String(data), expectedData);
+
+    bb = fs.open(path);
+    bb.readFully(data, 0, data.length);
+    bb.close();
+    Assert.assertEquals(new String(data), expectedData);
+  }
+
+  @Test
+  public void readCombinationTest() throws IOException, URISyntaxException {
+    Path path = new Path("/test1");
+    FileSystem fs = this.getFileSystem();
+    setupFile(path);
+    FSDataInputStream bb = fs.open(path);
+    int i = 0;
+    byte[] data = new byte[5];
+    int readByte = bb.read(data);
+    Assert.assertEquals(new String(data), expectedData.substring(0, 5));
+
+    bb.readFully(data, 0, data.length);
+    Assert.assertEquals(new String(data), expectedData.substring(5, 10));
+    bb.close();
+    bb = fs.open(path);
+    bb.readFully(5, data, 0, data.length);
+    Assert.assertEquals(new String(data), expectedData.substring(5, 10));
+
+    bb.read(data);
+    Assert.assertEquals(new String(data), expectedData.substring(0, 5));
+    bb.close();
+    bb = fs.open(path);
+    bb.read(new byte[100]);
+    bb.close();
+  }
+
+  @Test
+  public void readMultiSeekTest() throws IOException, URISyntaxException {
+    final Path path = new Path(
+        "/delete14/" + UUID.randomUUID().toString().replaceAll("-", ""));
+    FileSystem fs = this.getFileSystem();
+
+    final byte[] actualData = new byte[3267397];
+    Random ran = new Random();
+    ran.nextBytes(actualData);
+    byte[] testData = null;
+
+    fs.delete(path, true);
+    FSDataOutputStream os = fs.create(path);
+    os.write(actualData);
+    os.close();
+
+    FSDataInputStream bb = fs.open(path);
+    byte[] data = new byte[16384];
+    bb.readFully(3251013, data, 0, 16384);
+    testData = new byte[16384];
+    System.arraycopy(actualData, 3251013, testData, 0, 16384);
+    Assert.assertArrayEquals(testData, data);
+
+    data = new byte[1921];
+    bb.readFully(3265476, data, 0, 1921);
+    testData = new byte[1921];
+    System.arraycopy(actualData, 3265476, testData, 0, 1921);
+    Assert.assertArrayEquals(testData, data);
+
+    data = new byte[3267394];
+    bb.readFully(3, data, 0, 3267394);
+    testData = new byte[3267394];
+    System.arraycopy(actualData, 3, testData, 0, 3267394);
+    Assert.assertArrayEquals(testData, data);
+
+    data = new byte[3266943];
+    bb.readFully(454, data, 0, 3266943);
+    testData = new byte[3266943];
+    System.arraycopy(actualData, 454, testData, 0, 3266943);
+    Assert.assertArrayEquals(testData, data);
+
+    data = new byte[3265320];
+    bb.readFully(2077, data, 0, 3265320);
+    testData = new byte[3265320];
+    System.arraycopy(actualData, 2077, testData, 0, 3265320);
+    Assert.assertArrayEquals(testData, data);
+
+    bb.close();
+
+    bb = fs.open(path);
+
+    data = new byte[3263262];
+    bb.readFully(4135, data, 0, 3263262);
+    testData = new byte[3263262];
+    System.arraycopy(actualData, 4135, testData, 0, 3263262);
+    Assert.assertArrayEquals(testData, data);
+
+    data = new byte[2992591];
+    bb.readFully(274806, data, 0, 2992591);
+    testData = new byte[2992591];
+    System.arraycopy(actualData, 274806, testData, 0, 2992591);
+    Assert.assertArrayEquals(testData, data);
+
+    data = new byte[1985665];
+    bb.readFully(1281732, data, 0, 1985665);
+    testData = new byte[1985665];
+    System.arraycopy(actualData, 1281732, testData, 0, 1985665);
+    Assert.assertArrayEquals(testData, data);
+
+    data = new byte[3267394];
+    try {
+      bb.readFully(2420207, data, 0, 3267394);
+      Assert.fail("EOF expected");
+    } catch (IOException e) {
+    }
+
+    bb.close();
+  }
+
+  @Test
+  public void allASCIICharTest() throws IOException, URISyntaxException {
+    final Path path = new Path(
+        "/delete14/" + UUID.randomUUID().toString().replaceAll("-", ""));
+    FileSystem fs = this.getFileSystem();
+    final byte[] actualData = new byte[127];
+    for (byte i = 0; i < 127; ++i) {
+      actualData[i] = i;
+    }
+
+    fs.delete(path, true);
+    FSDataOutputStream os = fs.create(path);
+    os.write(actualData);
+    os.close();
+
+    FSDataInputStream bb = fs.open(path);
+    byte[] data = new byte[127];
+
+    bb.readFully(0, data, 0, data.length);
+    bb.close();
+    Assert.assertArrayEquals(data, actualData);
+
+    bb = fs.open(path);
+    int byteRead = 1;
+    while (bb.read() != -1) {
+      byteRead++;
+    }
+
+    bb.seek(0);
+    byteRead = 1;
+    while (bb.read() != -1) {
+      byteRead++;
+    }
+    bb.close();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/c9e71382/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlWebHdfsFileContextCreateMkdirLive.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlWebHdfsFileContextCreateMkdirLive.java b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlWebHdfsFileContextCreateMkdirLive.java
new file mode 100644
index 0000000..fc50c04
--- /dev/null
+++ b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlWebHdfsFileContextCreateMkdirLive.java
@@ -0,0 +1,79 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.fs.adl.live;
+
+import org.apache.commons.lang.RandomStringUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.DelegateToFileSystem;
+import org.apache.hadoop.fs.FileContext;
+import org.apache.hadoop.fs.FileContextCreateMkdirBaseTest;
+import org.apache.hadoop.fs.FileContextTestHelper;
+import org.apache.hadoop.fs.FileSystem;
+import org.junit.Assume;
+import org.junit.BeforeClass;
+
+import java.io.File;
+import java.net.URI;
+
+/**
+ * Verify Adls file system adhere to Hadoop file system contract using bunch of
+ * available test in FileContextCreateMkdirBaseTest.
+ */
+public class TestAdlWebHdfsFileContextCreateMkdirLive
+    extends FileContextCreateMkdirBaseTest {
+  private static final String KEY_FILE_SYSTEM = "fs.defaultFS";
+
+  @Override
+  public void setUp() throws Exception {
+    Configuration conf = AdlStorageConfiguration.getConfiguration();
+    String fileSystem = conf.get(KEY_FILE_SYSTEM);
+    if (fileSystem == null || fileSystem.trim().length() == 0) {
+      throw new Exception("Default file system not configured.");
+    }
+    URI uri = new URI(fileSystem);
+    FileSystem fs = AdlStorageConfiguration.createAdlStorageConnector();
+    fc = FileContext.getFileContext(
+        new DelegateToFileSystem(uri, fs, conf, fs.getScheme(), false) {
+        }, conf);
+    super.setUp();
+  }
+
+  /**
+   * Required to override since the getRandmizedTestDir on Windows generates
+   * absolute path of the local file path which contains ":" character.
+   * Example file system path generated is "adl://<FileSystem Path>/d:/a/b/c
+   *
+   * Adls does not support : character in the path hence overriding to remove
+   * unsupported character from the path.
+   *
+   * @return FileContextTestHelper
+   */
+  @Override
+  protected FileContextTestHelper createFileContextHelper() {
+    return new FileContextTestHelper(new File(
+        RandomStringUtils.randomAlphanumeric(10))
+        .getAbsolutePath().replaceAll(":", ""));
+  }
+
+  @BeforeClass
+  public static void skipTestCheck() {
+    Assume.assumeTrue(AdlStorageConfiguration.isContractTestEnabled());
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/c9e71382/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlWebHdfsFileContextMainOperationsLive.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlWebHdfsFileContextMainOperationsLive.java b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlWebHdfsFileContextMainOperationsLive.java
new file mode 100644
index 0000000..b135550
--- /dev/null
+++ b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlWebHdfsFileContextMainOperationsLive.java
@@ -0,0 +1,104 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.fs.adl.live;
+
+import org.apache.commons.lang.RandomStringUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.DelegateToFileSystem;
+import org.apache.hadoop.fs.FileContext;
+import org.apache.hadoop.fs.FileContextMainOperationsBaseTest;
+import org.apache.hadoop.fs.FileContextTestHelper;
+import org.apache.hadoop.fs.FileSystem;
+import org.junit.Assume;
+import org.junit.BeforeClass;
+
+import java.io.File;
+import java.io.IOException;
+import java.net.URI;
+
+/**
+ * Verify Adls file system adhere to Hadoop file system contract using bunch of
+ * available test in FileContextMainOperationsBaseTest.
+ */
+public class TestAdlWebHdfsFileContextMainOperationsLive
+    extends FileContextMainOperationsBaseTest {
+
+  private static final String KEY_FILE_SYSTEM = "fs.defaultFS";
+
+  @BeforeClass
+  public static void skipTestCheck() {
+    Assume.assumeTrue(AdlStorageConfiguration.isContractTestEnabled());
+  }
+
+  @Override
+  public void setUp() throws Exception {
+    Configuration conf = AdlStorageConfiguration.getConfiguration();
+    String fileSystem = conf.get(KEY_FILE_SYSTEM);
+    if (fileSystem == null || fileSystem.trim().length() == 0) {
+      throw new Exception("Default file system not configured.");
+    }
+    URI uri = new URI(fileSystem);
+    FileSystem fs = AdlStorageConfiguration.createAdlStorageConnector();
+    fc = FileContext.getFileContext(
+        new DelegateToFileSystem(uri, fs, conf, fs.getScheme(), false) {
+        }, conf);
+    super.setUp();
+  }
+
+  /**
+   * Required to override since the getRandmizedTestDir on Windows generates
+   * absolute path of the local file path which contains ":" character.
+   * Example file system path generated is "adl://<FileSystem Path>/d:/a/b/c
+   *
+   * Adls does not support : character in the path hence overriding to remove
+   * unsupported character from the path.
+   *
+   * @return FileContextTestHelper
+   */
+  @Override
+  protected FileContextTestHelper createFileContextHelper() {
+    return new FileContextTestHelper(
+        new File(RandomStringUtils.randomAlphanumeric(10)).getAbsolutePath()
+            .replaceAll(":", ""));
+  }
+
+  @Override
+  protected boolean listCorruptedBlocksSupported() {
+    return false;
+  }
+
+  @Override
+  public void testUnsupportedSymlink() throws IOException {
+    Assume.assumeTrue("Symbolic link are not supported by Adls", false);
+  }
+
+  /**
+   * In case this test is causing failure due to
+   * java.lang.RuntimeException: java.io.FileNotFoundException: Hadoop bin
+   * directory does not exist: <path>\hadoop-common-project
+   * \hadoop-common\target\bin -see https://wiki.apache
+   * .org/hadoop/WindowsProblems. then do build the hadoop dependencies
+   * otherwise mark this test as skip.
+   */
+  @Override
+  public void testWorkingDirectory() throws Exception {
+    super.testWorkingDirectory();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/c9e71382/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/oauth2/TestCachedRefreshTokenBasedAccessTokenProvider.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/oauth2/TestCachedRefreshTokenBasedAccessTokenProvider.java b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/oauth2/TestCachedRefreshTokenBasedAccessTokenProvider.java
index e57d3a9..c044594 100644
--- a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/oauth2/TestCachedRefreshTokenBasedAccessTokenProvider.java
+++ b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/oauth2/TestCachedRefreshTokenBasedAccessTokenProvider.java
@@ -20,17 +20,19 @@ package org.apache.hadoop.fs.adl.oauth2;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.web.oauth2.AccessTokenProvider;
-
 import org.junit.Before;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.TestName;
-import static org.junit.Assert.*;
-import static org.mockito.Mockito.*;
 
 import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.OAUTH_CLIENT_ID_KEY;
 import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.OAUTH_REFRESH_URL_KEY;
 import static org.apache.hadoop.hdfs.web.oauth2.ConfRefreshTokenBasedAccessTokenProvider.OAUTH_REFRESH_TOKEN_KEY;
+import static org.junit.Assert.fail;
+import static org.mockito.Mockito.any;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
 
 /**
  * Verify cache behavior of ConfRefreshTokenBasedAccessTokenProvider instances.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/c9e71382/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/common/AdlMockWebServer.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/common/AdlMockWebServer.java b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/common/AdlMockWebServer.java
new file mode 100644
index 0000000..69bb9ae
--- /dev/null
+++ b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/common/AdlMockWebServer.java
@@ -0,0 +1,116 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.fs.common;
+
+import com.eclipsesource.json.JsonObject;
+import com.squareup.okhttp.mockwebserver.MockResponse;
+import com.squareup.okhttp.mockwebserver.MockWebServer;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.adl.TestableAdlFileSystem;
+import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
+import org.apache.hadoop.hdfs.web.oauth2.ConfCredentialBasedAccessTokenProvider;
+import org.apache.hadoop.hdfs.web.oauth2.CredentialBasedAccessTokenProvider;
+import org.apache.hadoop.hdfs.web.oauth2.OAuth2Constants;
+import org.junit.After;
+import org.junit.Before;
+
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.net.URL;
+
+/**
+ * Mock server to simulate Adls backend calls. This infrastructure is expandable
+ * to override expected server response based on the derived test functionality.
+ * Common functionality to generate token information before request is send to
+ * adls backend is also managed within AdlMockWebServer implementation.
+ */
+public class AdlMockWebServer {
+
+  // Create a MockWebServer. These are lean enough that you can create a new
+  // instance for every unit test.
+  private MockWebServer server = null;
+  private TestableAdlFileSystem fs = null;
+  private int port = 0;
+  private Configuration conf = new Configuration();
+
+  public MockWebServer getMockServer() {
+    return server;
+  }
+
+  public TestableAdlFileSystem getMockAdlFileSystem() {
+    return fs;
+  }
+
+  public int getPort() {
+    return port;
+  }
+
+  public Configuration getConf() {
+    return conf;
+  }
+
+  public static MockResponse getTokenResponse() {
+    JsonObject jsonObject = new JsonObject()
+        .set(OAuth2Constants.EXPIRES_IN, "0987654321")
+        .set("token_type", "bearer").set(OAuth2Constants.ACCESS_TOKEN, "123");
+    MockResponse oauth2Response = new MockResponse();
+    oauth2Response.addHeader("Content-Type", "application/json");
+    oauth2Response.setResponseCode(200);
+    oauth2Response.setBody(jsonObject.toString());
+    return oauth2Response;
+  }
+
+  @Before
+  public void preTestSetup() throws IOException, URISyntaxException {
+
+    server = new MockWebServer();
+    server.enqueue(getTokenResponse());
+
+    // Start the server.
+    server.start();
+
+    // Ask the server for its URL. You'll need this to make HTTP requests.
+    URL baseUrl = server.getUrl("");
+    port = baseUrl.getPort();
+
+    // Exercise your application code, which should make those HTTP requests.
+    // Responses are returned in the same order that they are enqueued.
+    fs = new TestableAdlFileSystem();
+
+    conf.set(HdfsClientConfigKeys.OAUTH_CLIENT_ID_KEY, "MY_CLIENTID");
+    conf.set(HdfsClientConfigKeys.ACCESS_TOKEN_PROVIDER_KEY,
+        ConfCredentialBasedAccessTokenProvider.class.getName());
+    conf.set(HdfsClientConfigKeys.DFS_WEBHDFS_OAUTH_ENABLED_KEY, "true");
+    conf.set(HdfsClientConfigKeys.OAUTH_REFRESH_URL_KEY, "http://localhost:" +
+        port + "/refresh");
+    conf.set(CredentialBasedAccessTokenProvider.OAUTH_CREDENTIAL_KEY,
+        "credential");
+
+    URI uri = new URI("adl://localhost:" + port);
+    fs.initialize(uri, conf);
+  }
+
+  @After
+  public void postTestSetup() throws IOException {
+    fs.close();
+    server.shutdown();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/c9e71382/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/common/ExpectedResponse.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/common/ExpectedResponse.java b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/common/ExpectedResponse.java
new file mode 100644
index 0000000..727417e
--- /dev/null
+++ b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/common/ExpectedResponse.java
@@ -0,0 +1,72 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.fs.common;
+
+
+import com.squareup.okhttp.mockwebserver.MockResponse;
+
+import java.util.ArrayList;
+
+/**
+ * Supporting class to hold expected MockResponse object along with parameters
+ * for validation in test methods.
+ */
+public class ExpectedResponse {
+  private MockResponse response;
+  private ArrayList<String> expectedQueryParameters = new ArrayList<String>();
+  private int expectedBodySize;
+  private String httpRequestType;
+
+  public int getExpectedBodySize() {
+    return expectedBodySize;
+  }
+
+  public String getHttpRequestType() {
+    return httpRequestType;
+  }
+
+  public ArrayList<String> getExpectedQueryParameters() {
+    return expectedQueryParameters;
+  }
+
+  public MockResponse getResponse() {
+    return response;
+  }
+
+  ExpectedResponse set(MockResponse mockResponse) {
+    this.response = mockResponse;
+    return this;
+  }
+
+  ExpectedResponse addExpectedQueryParam(String param) {
+    expectedQueryParameters.add(param);
+    return this;
+  }
+
+  ExpectedResponse addExpectedBodySize(int bodySize) {
+    this.expectedBodySize = bodySize;
+    return this;
+  }
+
+  ExpectedResponse addExpectedHttpRequestType(String expectedHttpRequestType) {
+    this.httpRequestType = expectedHttpRequestType;
+    return this;
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/c9e71382/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/common/TestDataForRead.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/common/TestDataForRead.java b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/common/TestDataForRead.java
new file mode 100644
index 0000000..150dc6c
--- /dev/null
+++ b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/common/TestDataForRead.java
@@ -0,0 +1,120 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.fs.common;
+
+import com.squareup.okhttp.mockwebserver.Dispatcher;
+import com.squareup.okhttp.mockwebserver.MockResponse;
+import com.squareup.okhttp.mockwebserver.RecordedRequest;
+import okio.Buffer;
+import org.apache.hadoop.fs.adl.TestADLResponseData;
+
+import java.util.ArrayList;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+/**
+ * Supporting class for mock test to validate Adls read operation using
+ * BufferManager.java and BatchByteArrayInputStream implementation.
+ */
+public class TestDataForRead {
+
+  private byte[] actualData;
+  private ArrayList<ExpectedResponse> responses;
+  private Dispatcher dispatcher;
+  private int intensityOfTest;
+  private boolean checkOfNoOfCalls;
+  private int expectedNoNetworkCall;
+
+  public TestDataForRead(final byte[] actualData, int expectedNoNetworkCall,
+      int intensityOfTest, boolean checkOfNoOfCalls) {
+    this.checkOfNoOfCalls = checkOfNoOfCalls;
+    this.actualData = actualData;
+    responses = new ArrayList<ExpectedResponse>();
+    this.expectedNoNetworkCall = expectedNoNetworkCall;
+    this.intensityOfTest = intensityOfTest;
+
+    dispatcher = new Dispatcher() {
+      @Override
+      public MockResponse dispatch(RecordedRequest recordedRequest)
+          throws InterruptedException {
+        if (recordedRequest.getPath().equals("/refresh")) {
+          return AdlMockWebServer.getTokenResponse();
+        }
+
+        if (recordedRequest.getRequestLine().contains("op=GETFILESTATUS")) {
+          return new MockResponse().setResponseCode(200).setBody(
+              TestADLResponseData
+                  .getGetFileStatusJSONResponse(actualData.length));
+        }
+
+        if (recordedRequest.getRequestLine().contains("op=OPEN")) {
+          String request = recordedRequest.getRequestLine();
+          int offset = 0;
+          int byteCount = 0;
+
+          Pattern pattern = Pattern.compile("offset=([0-9]+)");
+          Matcher matcher = pattern.matcher(request);
+          if (matcher.find()) {
+            System.out.println(matcher.group(1));
+            offset = Integer.parseInt(matcher.group(1));
+          }
+
+          pattern = Pattern.compile("length=([0-9]+)");
+          matcher = pattern.matcher(request);
+          if (matcher.find()) {
+            System.out.println(matcher.group(1));
+            byteCount = Integer.parseInt(matcher.group(1));
+          }
+
+          Buffer buf = new Buffer();
+          buf.write(actualData, offset, byteCount);
+          return new MockResponse().setResponseCode(200)
+              .setChunkedBody(buf, 4 * 1024 * 1024);
+        }
+
+        return new MockResponse().setBody("NOT SUPPORTED").setResponseCode(501);
+      }
+    };
+  }
+
+  public boolean isCheckOfNoOfCalls() {
+    return checkOfNoOfCalls;
+  }
+
+  public int getExpectedNoNetworkCall() {
+    return expectedNoNetworkCall;
+  }
+
+  public int getIntensityOfTest() {
+    return intensityOfTest;
+  }
+
+  public byte[] getActualData() {
+    return actualData;
+  }
+
+  public ArrayList<ExpectedResponse> getResponses() {
+    return responses;
+  }
+
+  public Dispatcher getDispatcher() {
+    return dispatcher;
+  }
+}


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org