You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by st...@apache.org on 2016/12/08 22:29:10 UTC
[03/50] [abbrv] hadoop git commit: HADOOP-13257. Improve Azure Data
Lake contract tests. Contributed by Vishwajeet Dusane
HADOOP-13257. Improve Azure Data Lake contract tests. Contributed by Vishwajeet Dusane
Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/4113ec5f
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/4113ec5f
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/4113ec5f
Branch: refs/heads/HADOOP-13345
Commit: 4113ec5fa5ca049ebaba039b1faf3911c6a34f7b
Parents: 51211a7
Author: Mingliang Liu <li...@apache.org>
Authored: Fri Dec 2 15:54:57 2016 -0800
Committer: Mingliang Liu <li...@apache.org>
Committed: Fri Dec 2 15:54:57 2016 -0800
----------------------------------------------------------------------
.../org/apache/hadoop/fs/adl/AdlFileSystem.java | 24 +-
.../org/apache/hadoop/fs/adl/TestAdlRead.java | 6 +-
.../apache/hadoop/fs/adl/TestListStatus.java | 6 +-
.../fs/adl/live/TestAdlContractAppendLive.java | 11 +-
.../fs/adl/live/TestAdlContractConcatLive.java | 23 +-
.../fs/adl/live/TestAdlContractCreateLive.java | 19 +-
.../fs/adl/live/TestAdlContractDeleteLive.java | 11 +-
.../live/TestAdlContractGetFileStatusLive.java | 36 ++
.../fs/adl/live/TestAdlContractMkdirLive.java | 25 +-
.../fs/adl/live/TestAdlContractOpenLive.java | 11 +-
.../fs/adl/live/TestAdlContractRenameLive.java | 30 +-
.../fs/adl/live/TestAdlContractRootDirLive.java | 19 +-
.../fs/adl/live/TestAdlContractSeekLive.java | 11 +-
.../live/TestAdlDifferentSizeWritesLive.java | 69 ++--
.../live/TestAdlFileContextCreateMkdirLive.java | 67 ++++
.../TestAdlFileContextMainOperationsLive.java | 99 ++++++
.../adl/live/TestAdlFileSystemContractLive.java | 57 +---
.../live/TestAdlInternalCreateNonRecursive.java | 134 ++++++++
.../fs/adl/live/TestAdlPermissionLive.java | 116 +++++++
.../adl/live/TestAdlSupportedCharsetInPath.java | 334 +++++++++++++++++++
.../apache/hadoop/fs/adl/live/TestMetadata.java | 111 ++++++
21 files changed, 995 insertions(+), 224 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hadoop/blob/4113ec5f/hadoop-tools/hadoop-azure-datalake/src/main/java/org/apache/hadoop/fs/adl/AdlFileSystem.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure-datalake/src/main/java/org/apache/hadoop/fs/adl/AdlFileSystem.java b/hadoop-tools/hadoop-azure-datalake/src/main/java/org/apache/hadoop/fs/adl/AdlFileSystem.java
index 9083afc..bd43c52 100644
--- a/hadoop-tools/hadoop-azure-datalake/src/main/java/org/apache/hadoop/fs/adl/AdlFileSystem.java
+++ b/hadoop-tools/hadoop-azure-datalake/src/main/java/org/apache/hadoop/fs/adl/AdlFileSystem.java
@@ -346,7 +346,6 @@ public class AdlFileSystem extends FileSystem {
* @see #setPermission(Path, FsPermission)
* @deprecated API only for 0.20-append
*/
- @Deprecated
@Override
public FSDataOutputStream createNonRecursive(Path f, FsPermission permission,
EnumSet<CreateFlag> flags, int bufferSize, short replication,
@@ -471,6 +470,10 @@ public class AdlFileSystem extends FileSystem {
@Override
public boolean rename(final Path src, final Path dst) throws IOException {
statistics.incrementWriteOps(1);
+ if (toRelativeFilePath(src).equals("/")) {
+ return false;
+ }
+
return adlClient.rename(toRelativeFilePath(src), toRelativeFilePath(dst));
}
@@ -522,9 +525,24 @@ public class AdlFileSystem extends FileSystem {
public boolean delete(final Path path, final boolean recursive)
throws IOException {
statistics.incrementWriteOps(1);
+ String relativePath = toRelativeFilePath(path);
+ // Delete on root directory not supported.
+ if (relativePath.equals("/")) {
+ // This is important check after recent commit
+ // HADOOP-12977 and HADOOP-13716 validates on root for
+ // 1. if root is empty and non recursive delete then return false.
+ // 2. if root is non empty and non recursive delete then throw exception.
+ if (!recursive
+ && adlClient.enumerateDirectory(toRelativeFilePath(path), 1).size()
+ > 0) {
+ throw new IOException("Delete on root is not supported.");
+ }
+ return false;
+ }
+
return recursive ?
- adlClient.deleteRecursive(toRelativeFilePath(path)) :
- adlClient.delete(toRelativeFilePath(path));
+ adlClient.deleteRecursive(relativePath) :
+ adlClient.delete(relativePath);
}
/**
http://git-wip-us.apache.org/repos/asf/hadoop/blob/4113ec5f/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestAdlRead.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestAdlRead.java b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestAdlRead.java
index 734256a..172663c 100644
--- a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestAdlRead.java
+++ b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestAdlRead.java
@@ -102,7 +102,7 @@ public class TestAdlRead extends AdlMockWebServer {
n += count;
}
- Assert.assertEquals(testData.getActualData().length, expectedData.length);
+ Assert.assertEquals(expectedData.length, testData.getActualData().length);
Assert.assertArrayEquals(expectedData, testData.getActualData());
in.close();
if (testData.isCheckOfNoOfCalls()) {
@@ -119,8 +119,8 @@ public class TestAdlRead extends AdlMockWebServer {
for (int i = 0; i < 1000; ++i) {
int position = random.nextInt(testData.getActualData().length);
in.seek(position);
- Assert.assertEquals(in.getPos(), position);
- Assert.assertEquals(in.read(), testData.getActualData()[position] & 0xFF);
+ Assert.assertEquals(position, in.getPos());
+ Assert.assertEquals(testData.getActualData()[position] & 0xFF, in.read());
}
in.close();
if (testData.isCheckOfNoOfCalls()) {
http://git-wip-us.apache.org/repos/asf/hadoop/blob/4113ec5f/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestListStatus.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestListStatus.java b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestListStatus.java
index dd27a10..c151e89 100644
--- a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestListStatus.java
+++ b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/TestListStatus.java
@@ -50,7 +50,7 @@ public class TestListStatus extends AdlMockWebServer {
.listStatus(new Path("/test1/test2"));
long endTime = Time.monotonicNow();
LOG.debug("Time : " + (endTime - startTime));
- Assert.assertEquals(ls.length, 10);
+ Assert.assertEquals(10, ls.length);
getMockServer().enqueue(new MockResponse().setResponseCode(200)
.setBody(TestADLResponseData.getListFileStatusJSONResponse(200)));
@@ -58,7 +58,7 @@ public class TestListStatus extends AdlMockWebServer {
ls = getMockAdlFileSystem().listStatus(new Path("/test1/test2"));
endTime = Time.monotonicNow();
LOG.debug("Time : " + (endTime - startTime));
- Assert.assertEquals(ls.length, 200);
+ Assert.assertEquals(200, ls.length);
getMockServer().enqueue(new MockResponse().setResponseCode(200)
.setBody(TestADLResponseData.getListFileStatusJSONResponse(2048)));
@@ -66,7 +66,7 @@ public class TestListStatus extends AdlMockWebServer {
ls = getMockAdlFileSystem().listStatus(new Path("/test1/test2"));
endTime = Time.monotonicNow();
LOG.debug("Time : " + (endTime - startTime));
- Assert.assertEquals(ls.length, 2048);
+ Assert.assertEquals(2048, ls.length);
}
@Test
http://git-wip-us.apache.org/repos/asf/hadoop/blob/4113ec5f/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractAppendLive.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractAppendLive.java b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractAppendLive.java
index 83390af..ffe6dd3 100644
--- a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractAppendLive.java
+++ b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractAppendLive.java
@@ -23,11 +23,10 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.contract.AbstractContractAppendTest;
import org.apache.hadoop.fs.contract.AbstractFSContract;
import org.apache.hadoop.fs.contract.ContractTestUtils;
-import org.junit.Before;
import org.junit.Test;
/**
- * Verify Adls APPEND semantics compliance with Hadoop.
+ * Test Append on Adl file system.
*/
public class TestAdlContractAppendLive extends AbstractContractAppendTest {
@@ -42,12 +41,4 @@ public class TestAdlContractAppendLive extends AbstractContractAppendTest {
ContractTestUtils.unsupported("Skipping since renaming file in append "
+ "mode not supported in Adl");
}
-
- @Before
- @Override
- public void setup() throws Exception {
- org.junit.Assume
- .assumeTrue(AdlStorageConfiguration.isContractTestEnabled());
- super.setup();
- }
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/4113ec5f/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractConcatLive.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractConcatLive.java b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractConcatLive.java
index 8474e9c..60d30ac 100644
--- a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractConcatLive.java
+++ b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractConcatLive.java
@@ -20,14 +20,15 @@
package org.apache.hadoop.fs.adl.live;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.contract.AbstractContractConcatTest;
import org.apache.hadoop.fs.contract.AbstractFSContract;
-import org.apache.hadoop.fs.contract.ContractTestUtils;
-import org.junit.Before;
import org.junit.Test;
+import static org.apache.hadoop.fs.contract.ContractTestUtils.touch;
+
/**
- * Verify Adls CONCAT semantics compliance with Hadoop.
+ * Test concat on Adl file system.
*/
public class TestAdlContractConcatLive extends AbstractContractConcatTest {
@@ -36,17 +37,13 @@ public class TestAdlContractConcatLive extends AbstractContractConcatTest {
return new AdlStorageContract(configuration);
}
- @Before
- @Override
- public void setup() throws Exception {
- org.junit.Assume
- .assumeTrue(AdlStorageConfiguration.isContractTestEnabled());
- super.setup();
- }
-
@Test
public void testConcatMissingTarget() throws Throwable {
- ContractTestUtils.unsupported("BUG : Adl to support expectation from "
- + "concat on missing targets.");
+ Path testPath = path("test");
+ Path zeroByteFile = new Path(testPath, "zero.txt");
+ Path target = new Path(testPath, "target");
+ touch(getFileSystem(), zeroByteFile);
+ // Concat on missing target is allowed on Adls file system.
+ getFileSystem().concat(target, new Path[] {zeroByteFile});
}
}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/hadoop/blob/4113ec5f/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractCreateLive.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractCreateLive.java b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractCreateLive.java
index 907c50c..06347e9 100644
--- a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractCreateLive.java
+++ b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractCreateLive.java
@@ -22,12 +22,9 @@ package org.apache.hadoop.fs.adl.live;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.contract.AbstractContractCreateTest;
import org.apache.hadoop.fs.contract.AbstractFSContract;
-import org.apache.hadoop.fs.contract.ContractTestUtils;
-import org.junit.Before;
-import org.junit.Test;
/**
- * Verify Adls CREATE semantics compliance with Hadoop.
+ * Test creating files, overwrite options.
*/
public class TestAdlContractCreateLive extends AbstractContractCreateTest {
@@ -35,18 +32,4 @@ public class TestAdlContractCreateLive extends AbstractContractCreateTest {
protected AbstractFSContract createContract(Configuration configuration) {
return new AdlStorageContract(configuration);
}
-
- @Before
- @Override
- public void setup() throws Exception {
- org.junit.Assume
- .assumeTrue(AdlStorageConfiguration.isContractTestEnabled());
- super.setup();
- }
-
- @Test
- public void testOverwriteEmptyDirectory() throws Throwable {
- ContractTestUtils
- .unsupported("BUG : Adl to support override empty " + "directory.");
- }
}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/hadoop/blob/4113ec5f/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractDeleteLive.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractDeleteLive.java b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractDeleteLive.java
index 30eaec7..6961f15 100644
--- a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractDeleteLive.java
+++ b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractDeleteLive.java
@@ -22,10 +22,9 @@ package org.apache.hadoop.fs.adl.live;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.contract.AbstractContractDeleteTest;
import org.apache.hadoop.fs.contract.AbstractFSContract;
-import org.junit.Before;
/**
- * Verify Adls DELETE semantics compliance with Hadoop.
+ * Test delete contract test.
*/
public class TestAdlContractDeleteLive extends AbstractContractDeleteTest {
@@ -33,12 +32,4 @@ public class TestAdlContractDeleteLive extends AbstractContractDeleteTest {
protected AbstractFSContract createContract(Configuration configuration) {
return new AdlStorageContract(configuration);
}
-
- @Before
- @Override
- public void setup() throws Exception {
- org.junit.Assume
- .assumeTrue(AdlStorageConfiguration.isContractTestEnabled());
- super.setup();
- }
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/4113ec5f/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractGetFileStatusLive.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractGetFileStatusLive.java b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractGetFileStatusLive.java
new file mode 100644
index 0000000..d50dd68
--- /dev/null
+++ b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractGetFileStatusLive.java
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.fs.adl.live;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.contract.AbstractContractGetFileStatusTest;
+import org.apache.hadoop.fs.contract.AbstractFSContract;
+
+/**
+ * Test getFileStatus contract test.
+ */
+public class TestAdlContractGetFileStatusLive extends
+ AbstractContractGetFileStatusTest {
+
+ @Override
+ protected AbstractFSContract createContract(Configuration configuration) {
+ return new AdlStorageContract(configuration);
+ }
+}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/4113ec5f/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractMkdirLive.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractMkdirLive.java b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractMkdirLive.java
index e498110..5e760c5 100644
--- a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractMkdirLive.java
+++ b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractMkdirLive.java
@@ -22,34 +22,13 @@ package org.apache.hadoop.fs.adl.live;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.contract.AbstractContractMkdirTest;
import org.apache.hadoop.fs.contract.AbstractFSContract;
-import org.apache.hadoop.fs.contract.ContractTestUtils;
-import org.junit.Before;
-import org.junit.Test;
/**
- * Verify Adls MKDIR semantics compliance with Hadoop.
+ * Test Mkdir contract on Adl storage file system.
*/
public class TestAdlContractMkdirLive extends AbstractContractMkdirTest {
@Override
protected AbstractFSContract createContract(Configuration conf) {
return new AdlStorageContract(conf);
}
-
- @Before
- @Override
- public void setup() throws Exception {
- org.junit.Assume
- .assumeTrue(AdlStorageConfiguration.isContractTestEnabled());
- super.setup();
- }
-
- @Test
- public void testMkdirOverParentFile() throws Throwable {
- ContractTestUtils.unsupported("Not supported by Adl");
- }
-
- @Test
- public void testNoMkdirOverFile() throws Throwable {
- ContractTestUtils.unsupported("Not supported by Adl");
- }
-}
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/hadoop/blob/4113ec5f/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractOpenLive.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractOpenLive.java b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractOpenLive.java
index 2bb2095..7a35d2c 100644
--- a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractOpenLive.java
+++ b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractOpenLive.java
@@ -22,10 +22,9 @@ package org.apache.hadoop.fs.adl.live;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.contract.AbstractContractOpenTest;
import org.apache.hadoop.fs.contract.AbstractFSContract;
-import org.junit.Before;
/**
- * Verify Adls OPEN/READ semantics compliance with Hadoop.
+ * Test OPEN - read API.
*/
public class TestAdlContractOpenLive extends AbstractContractOpenTest {
@@ -33,12 +32,4 @@ public class TestAdlContractOpenLive extends AbstractContractOpenTest {
protected AbstractFSContract createContract(Configuration configuration) {
return new AdlStorageContract(configuration);
}
-
- @Before
- @Override
- public void setup() throws Exception {
- org.junit.Assume
- .assumeTrue(AdlStorageConfiguration.isContractTestEnabled());
- super.setup();
- }
}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/hadoop/blob/4113ec5f/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractRenameLive.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractRenameLive.java b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractRenameLive.java
index 06063c5..d72d35e 100644
--- a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractRenameLive.java
+++ b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractRenameLive.java
@@ -22,12 +22,9 @@ package org.apache.hadoop.fs.adl.live;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.contract.AbstractContractRenameTest;
import org.apache.hadoop.fs.contract.AbstractFSContract;
-import org.apache.hadoop.fs.contract.ContractTestUtils;
-import org.junit.Before;
-import org.junit.Test;
/**
- * Verify Adls RENAME semantics compliance with Hadoop.
+ * Test rename contract test cases on Adl file system.
*/
public class TestAdlContractRenameLive extends AbstractContractRenameTest {
@@ -35,29 +32,4 @@ public class TestAdlContractRenameLive extends AbstractContractRenameTest {
protected AbstractFSContract createContract(Configuration configuration) {
return new AdlStorageContract(configuration);
}
-
- @Before
- @Override
- public void setup() throws Exception {
- org.junit.Assume
- .assumeTrue(AdlStorageConfiguration.isContractTestEnabled());
- super.setup();
- }
-
- @Test
- public void testRenameFileOverExistingFile() throws Throwable {
- ContractTestUtils
- .unsupported("BUG : Adl to support full complete POSIX" + "behaviour");
- }
-
- @Test
- public void testRenameFileNonexistentDir() throws Throwable {
- ContractTestUtils
- .unsupported("BUG : Adl to support create dir is not " + "exist");
- }
-
- @Test
- public void testRenameWithNonEmptySubDir() throws Throwable {
- ContractTestUtils.unsupported("BUG : Adl to support non empty dir move.");
- }
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/4113ec5f/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractRootDirLive.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractRootDirLive.java b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractRootDirLive.java
index bf4e549..8ebc632 100644
--- a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractRootDirLive.java
+++ b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractRootDirLive.java
@@ -22,12 +22,9 @@ package org.apache.hadoop.fs.adl.live;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.contract.AbstractContractRootDirectoryTest;
import org.apache.hadoop.fs.contract.AbstractFSContract;
-import org.apache.hadoop.fs.contract.ContractTestUtils;
-import org.junit.Before;
-import org.junit.Test;
/**
- * Verify Adls root level operation support.
+ * Test operation on root level.
*/
public class TestAdlContractRootDirLive
extends AbstractContractRootDirectoryTest {
@@ -35,18 +32,4 @@ public class TestAdlContractRootDirLive
protected AbstractFSContract createContract(Configuration configuration) {
return new AdlStorageContract(configuration);
}
-
- @Before
- @Override
- public void setup() throws Exception {
- org.junit.Assume
- .assumeTrue(AdlStorageConfiguration.isContractTestEnabled());
- super.setup();
- }
-
- @Test
- public void testRmNonEmptyRootDirNonRecursive() throws Throwable {
- ContractTestUtils.unsupported(
- "BUG : Adl should throw exception instred " + "of returning false.");
- }
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/4113ec5f/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractSeekLive.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractSeekLive.java b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractSeekLive.java
index 0976464..62423b6 100644
--- a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractSeekLive.java
+++ b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractSeekLive.java
@@ -22,10 +22,9 @@ package org.apache.hadoop.fs.adl.live;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.contract.AbstractContractSeekTest;
import org.apache.hadoop.fs.contract.AbstractFSContract;
-import org.junit.Before;
/**
- * Verify Adls OPEN/READ seek operation support.
+ * Test seek operation on Adl file system.
*/
public class TestAdlContractSeekLive extends AbstractContractSeekTest {
@@ -33,12 +32,4 @@ public class TestAdlContractSeekLive extends AbstractContractSeekTest {
protected AbstractFSContract createContract(Configuration configuration) {
return new AdlStorageContract(configuration);
}
-
- @Before
- @Override
- public void setup() throws Exception {
- org.junit.Assume
- .assumeTrue(AdlStorageConfiguration.isContractTestEnabled());
- super.setup();
- }
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/4113ec5f/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlDifferentSizeWritesLive.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlDifferentSizeWritesLive.java b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlDifferentSizeWritesLive.java
index 8f53400..5421e0b 100644
--- a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlDifferentSizeWritesLive.java
+++ b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlDifferentSizeWritesLive.java
@@ -23,27 +23,63 @@ import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.adl.common.Parallelized;
import org.junit.Assert;
import org.junit.Before;
+import org.junit.BeforeClass;
import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
import java.io.IOException;
import java.net.URISyntaxException;
+import java.util.Arrays;
+import java.util.Collection;
import java.util.Random;
+import java.util.UUID;
+
+import static org.apache.hadoop.fs.adl.AdlConfKeys.WRITE_BUFFER_SIZE_KEY;
/**
- * Verify different data segment size writes ensure the integrity and
- * order of the data.
+ * Verify data integrity with different data sizes with buffer size.
*/
+@RunWith(Parallelized.class)
public class TestAdlDifferentSizeWritesLive {
+ private static Random rand = new Random();
+ private int totalSize;
+ private int chunkSize;
+
+ public TestAdlDifferentSizeWritesLive(int totalSize, int chunkSize) {
+ this.totalSize = totalSize;
+ this.chunkSize = chunkSize;
+ }
public static byte[] getRandomByteArrayData(int size) {
byte[] b = new byte[size];
- Random rand = new Random();
rand.nextBytes(b);
return b;
}
+ @Parameterized.Parameters(name = "{index}: Data Size [{0}] ; Chunk Size "
+ + "[{1}]")
+ public static Collection testDataForIntegrityTest() {
+ return Arrays.asList(
+ new Object[][] {{4 * 1024, 1 * 1024}, {4 * 1024, 7 * 1024},
+ {4 * 1024, 10}, {2 * 1024, 10}, {1 * 1024, 10}, {100, 1},
+ {4 * 1024, 1 * 1024}, {7 * 1024, 2 * 1024}, {9 * 1024, 2 * 1024},
+ {10 * 1024, 3 * 1024}, {10 * 1024, 1 * 1024},
+ {10 * 1024, 8 * 1024}});
+ }
+
+ @BeforeClass
+ public static void cleanUpParent() throws IOException, URISyntaxException {
+ if (AdlStorageConfiguration.isContractTestEnabled()) {
+ Path path = new Path("/test/dataIntegrityCheck/");
+ FileSystem fs = AdlStorageConfiguration.createStorageConnector();
+ fs.delete(path, true);
+ }
+ }
+
@Before
public void setup() throws Exception {
org.junit.Assume
@@ -51,32 +87,17 @@ public class TestAdlDifferentSizeWritesLive {
}
@Test
- public void testSmallDataWrites() throws IOException {
- testDataIntegrity(4 * 1024 * 1024, 1 * 1024);
- testDataIntegrity(4 * 1024 * 1024, 7 * 1024);
- testDataIntegrity(4 * 1024 * 1024, 10);
- testDataIntegrity(2 * 1024 * 1024, 10);
- testDataIntegrity(1 * 1024 * 1024, 10);
- testDataIntegrity(100, 1);
- }
-
- @Test
- public void testMediumDataWrites() throws IOException {
- testDataIntegrity(4 * 1024 * 1024, 1 * 1024 * 1024);
- testDataIntegrity(7 * 1024 * 1024, 2 * 1024 * 1024);
- testDataIntegrity(9 * 1024 * 1024, 2 * 1024 * 1024);
- testDataIntegrity(10 * 1024 * 1024, 3 * 1024 * 1024);
- }
-
- private void testDataIntegrity(int totalSize, int chunkSize)
- throws IOException {
- Path path = new Path("/test/dataIntegrityCheck");
+ public void testDataIntegrity() throws IOException {
+ Path path = new Path(
+ "/test/dataIntegrityCheck/" + UUID.randomUUID().toString());
FileSystem fs = null;
+ AdlStorageConfiguration.getConfiguration()
+ .setInt(WRITE_BUFFER_SIZE_KEY, 4 * 1024);
try {
fs = AdlStorageConfiguration.createStorageConnector();
} catch (URISyntaxException e) {
throw new IllegalStateException("Can not initialize ADL FileSystem. "
- + "Please check fs.defaultFS property.", e);
+ + "Please check test.fs.adl.name property.", e);
}
byte[] expectedData = getRandomByteArrayData(totalSize);
http://git-wip-us.apache.org/repos/asf/hadoop/blob/4113ec5f/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlFileContextCreateMkdirLive.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlFileContextCreateMkdirLive.java b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlFileContextCreateMkdirLive.java
new file mode 100644
index 0000000..5166de1
--- /dev/null
+++ b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlFileContextCreateMkdirLive.java
@@ -0,0 +1,67 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.fs.adl.live;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.DelegateToFileSystem;
+import org.apache.hadoop.fs.FileContext;
+import org.apache.hadoop.fs.FileContextCreateMkdirBaseTest;
+import org.apache.hadoop.fs.FileContextTestHelper;
+import org.apache.hadoop.fs.FileSystem;
+import org.junit.Assume;
+import org.junit.BeforeClass;
+
+import java.net.URI;
+import java.util.UUID;
+
+/**
+ * Test file context Create/Mkdir operation.
+ */
+public class TestAdlFileContextCreateMkdirLive
+ extends FileContextCreateMkdirBaseTest {
+ private static final String KEY_FILE_SYSTEM = "test.fs.adl.name";
+
+ @BeforeClass
+ public static void skipTestCheck() {
+ Assume.assumeTrue(AdlStorageConfiguration.isContractTestEnabled());
+ }
+
+ @Override
+ public void setUp() throws Exception {
+ Configuration conf = AdlStorageConfiguration.getConfiguration();
+ String fileSystem = conf.get(KEY_FILE_SYSTEM);
+ if (fileSystem == null || fileSystem.trim().length() == 0) {
+ throw new Exception("Default file system not configured.");
+ }
+ URI uri = new URI(fileSystem);
+ FileSystem fs = AdlStorageConfiguration.createStorageConnector();
+ fc = FileContext.getFileContext(
+ new DelegateToFileSystem(uri, fs, conf, fs.getScheme(), false) {
+ }, conf);
+ super.setUp();
+ }
+
+ @Override
+ protected FileContextTestHelper createFileContextHelper() {
+ // On Windows, root directory path is created from local running directory.
+ // Adl does not support ':' as part of the path which results in failure.
+ return new FileContextTestHelper(UUID.randomUUID().toString());
+ }
+}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/4113ec5f/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlFileContextMainOperationsLive.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlFileContextMainOperationsLive.java b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlFileContextMainOperationsLive.java
new file mode 100644
index 0000000..ee10da7
--- /dev/null
+++ b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlFileContextMainOperationsLive.java
@@ -0,0 +1,99 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.fs.adl.live;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.*;
+import org.junit.Assume;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.net.URI;
+import java.util.UUID;
+
+import static org.apache.hadoop.util.Shell.WINDOWS;
+
+/**
+ * Run collection of tests for the {@link FileContext}.
+ */
+public class TestAdlFileContextMainOperationsLive
+ extends FileContextMainOperationsBaseTest {
+
+ private static final String KEY_FILE_SYSTEM = "test.fs.adl.name";
+
+ @BeforeClass
+ public static void skipTestCheck() {
+ Assume.assumeTrue(AdlStorageConfiguration.isContractTestEnabled());
+ }
+
+ @Override
+ public void setUp() throws Exception {
+ Configuration conf = AdlStorageConfiguration.getConfiguration();
+ String fileSystem = conf.get(KEY_FILE_SYSTEM);
+ if (fileSystem == null || fileSystem.trim().length() == 0) {
+ throw new Exception("Default file system not configured.");
+ }
+ URI uri = new URI(fileSystem);
+ FileSystem fs = AdlStorageConfiguration.createStorageConnector();
+ fc = FileContext.getFileContext(
+ new DelegateToFileSystem(uri, fs, conf, fs.getScheme(), false) {
+ }, conf);
+ super.setUp();
+ }
+
+ @Override
+ protected FileContextTestHelper createFileContextHelper() {
+ // On Windows, root directory path is created from local running directory.
+ // Adl does not support ':' as part of the path which results in failure.
+ // return new FileContextTestHelper(GenericTestUtils
+ // .getRandomizedTestDir()
+ // .getAbsolutePath().replaceAll(":",""));
+ return new FileContextTestHelper(UUID.randomUUID().toString());
+ }
+
+ @Override
+ protected boolean listCorruptedBlocksSupported() {
+ return false;
+ }
+
+ @Override
+ public void testWorkingDirectory() throws Exception {
+ if (WINDOWS) {
+ // TODO :Fix is required in Hadoop shell to support windows permission
+ // set.
+ // The test is failing with NPE on windows platform only, with Linux
+ // platform test passes.
+ Assume.assumeTrue(false);
+ } else {
+ super.testWorkingDirectory();
+ }
+ }
+
+ @Override
+ public void testUnsupportedSymlink() throws IOException {
+ Assume.assumeTrue(false);
+ }
+
+ @Test
+ public void testSetVerifyChecksum() throws IOException {
+ Assume.assumeTrue(false);
+ }
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/hadoop/blob/4113ec5f/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlFileSystemContractLive.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlFileSystemContractLive.java b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlFileSystemContractLive.java
index 0df7d05..657947e 100644
--- a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlFileSystemContractLive.java
+++ b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlFileSystemContractLive.java
@@ -22,12 +22,13 @@ package org.apache.hadoop.fs.adl.live;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileSystemContractBaseTest;
import org.apache.hadoop.fs.Path;
-import org.junit.Test;
+import org.junit.Assume;
+import org.junit.Before;
import java.io.IOException;
/**
- * Verify Adls adhere to Hadoop file system semantics.
+ * Test Base contract tests on Adl file system.
*/
public class TestAdlFileSystemContractLive extends FileSystemContractBaseTest {
private FileSystem adlStore;
@@ -60,52 +61,8 @@ public class TestAdlFileSystemContractLive extends FileSystemContractBaseTest {
}
}
- public void testGetFileStatus() throws IOException {
- if (!AdlStorageConfiguration.isContractTestEnabled()) {
- return;
- }
-
- Path testPath = new Path("/test/adltest");
- if (adlStore.exists(testPath)) {
- adlStore.delete(testPath, false);
- }
-
- adlStore.create(testPath).close();
- assertTrue(adlStore.delete(testPath, false));
- }
-
- /**
- * The following tests are failing on Azure Data Lake and the Azure Data Lake
- * file system code needs to be modified to make them pass.
- * A separate work item has been opened for this.
- */
- @Test
- @Override
- public void testMkdirsFailsForSubdirectoryOfExistingFile() throws Exception {
- // BUG : Adl should return exception instead of false.
- }
-
- @Test
- @Override
- public void testMkdirsWithUmask() throws Exception {
- // Support under implementation in Adl
- }
-
- @Test
- @Override
- public void testMoveFileUnderParent() throws Exception {
- // BUG: Adl server should return expected status code.
- }
-
- @Test
- @Override
- public void testRenameFileToSelf() throws Exception {
- // BUG: Adl server should return expected status code.
- }
-
- @Test
- @Override
- public void testRenameToDirWithSamePrefixAllowed() throws Exception {
- // BUG: Adl server should return expected status code.
+ @Before
+ public void skipTestCheck() {
+ Assume.assumeTrue(AdlStorageConfiguration.isContractTestEnabled());
}
-}
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/hadoop/blob/4113ec5f/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlInternalCreateNonRecursive.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlInternalCreateNonRecursive.java b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlInternalCreateNonRecursive.java
new file mode 100644
index 0000000..7e11a54
--- /dev/null
+++ b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlInternalCreateNonRecursive.java
@@ -0,0 +1,134 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.fs.adl.live;
+
+import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
+import org.apache.hadoop.fs.FileAlreadyExistsException;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.adl.common.Parallelized;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.junit.Assert;
+import org.junit.Assume;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+
+import java.io.IOException;
+import java.io.UnsupportedEncodingException;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.UUID;
+
+/**
+ * Test createNonRecursive API.
+ */
+@RunWith(Parallelized.class)
+public class TestAdlInternalCreateNonRecursive {
+ private Path inputFileName;
+ private FsPermission inputPermission;
+ private boolean inputOverride;
+ private boolean inputFileAlreadyExist;
+ private boolean inputParentAlreadyExist;
+ private Class<IOException> expectedExceptionType;
+ private FileSystem adlStore;
+
+ public TestAdlInternalCreateNonRecursive(String testScenario, String fileName,
+ FsPermission permission, boolean override, boolean fileAlreadyExist,
+ boolean parentAlreadyExist, Class<IOException> exceptionType) {
+
+ // Random parent path for each test so that parallel execution does not fail
+ // other running test.
+ inputFileName = new Path(
+ "/test/createNonRecursive/" + UUID.randomUUID().toString(), fileName);
+ inputPermission = permission;
+ inputFileAlreadyExist = fileAlreadyExist;
+ inputOverride = override;
+ inputParentAlreadyExist = parentAlreadyExist;
+ expectedExceptionType = exceptionType;
+ }
+
+ @Parameterized.Parameters(name = "{0}")
+ public static Collection adlCreateNonRecursiveTestData()
+ throws UnsupportedEncodingException {
+ /*
+ Test Data
+ File name, Permission, Override flag, File already exist, Parent
+ already exist
+ shouldCreateSucceed, expectedExceptionIfFileCreateFails
+
+ File already exist and Parent already exist are mutually exclusive.
+ */
+ return Arrays.asList(new Object[][] {
+ {"CNR - When file do not exist.", UUID.randomUUID().toString(),
+ FsPermission.getFileDefault(), false, false, true, null},
+ {"CNR - When file exist. Override false", UUID.randomUUID().toString(),
+ FsPermission.getFileDefault(), false, true, true,
+ FileAlreadyExistsException.class},
+ {"CNR - When file exist. Override true", UUID.randomUUID().toString(),
+ FsPermission.getFileDefault(), true, true, true, null},
+
+ //TODO: This test is skipped till the fixes are not made it to prod.
+ /*{ "CNR - When parent do no exist.", UUID.randomUUID().toString(),
+ FsPermission.getFileDefault(), false, false, true, false,
+ IOException.class }*/});
+ }
+
+ @Before
+ public void setUp() throws Exception {
+ Assume.assumeTrue(AdlStorageConfiguration.isContractTestEnabled());
+ adlStore = AdlStorageConfiguration.createStorageConnector();
+ }
+
+ @Test
+ public void testCreateNonRecursiveFunctionality() throws IOException {
+ if (inputFileAlreadyExist) {
+ FileSystem.create(adlStore, inputFileName, inputPermission);
+ }
+
+ // Mutually exclusive to inputFileAlreadyExist
+ if (inputParentAlreadyExist) {
+ adlStore.mkdirs(inputFileName.getParent());
+ } else {
+ adlStore.delete(inputFileName.getParent(), true);
+ }
+
+ try {
+ adlStore.createNonRecursive(inputFileName, inputPermission, inputOverride,
+ CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_DEFAULT,
+ adlStore.getDefaultReplication(inputFileName),
+ adlStore.getDefaultBlockSize(inputFileName), null);
+ } catch (IOException e) {
+
+ if (expectedExceptionType == null) {
+ throw e;
+ }
+
+ Assert.assertEquals(expectedExceptionType, e.getClass());
+ return;
+ }
+
+ if (expectedExceptionType != null) {
+ Assert.fail("CreateNonRecursive should have failed with exception "
+ + expectedExceptionType.getName());
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/4113ec5f/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlPermissionLive.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlPermissionLive.java b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlPermissionLive.java
new file mode 100644
index 0000000..dd7c10d
--- /dev/null
+++ b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlPermissionLive.java
@@ -0,0 +1,116 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+
+package org.apache.hadoop.fs.adl.live;
+
+import org.apache.hadoop.fs.CommonConfigurationKeys;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.adl.common.Parallelized;
+import org.apache.hadoop.fs.permission.FsAction;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.junit.*;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+
+import java.io.IOException;
+import java.io.UnsupportedEncodingException;
+import java.net.URISyntaxException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.UUID;
+
+/**
+ * Test ACL permission on file/folder on Adl file system.
+ */
+@RunWith(Parallelized.class)
+public class TestAdlPermissionLive {
+
+ private static Path testRoot = new Path("/test");
+ private FsPermission permission;
+ private Path path;
+ private FileSystem adlStore;
+
+ public TestAdlPermissionLive(FsPermission testPermission) {
+ permission = testPermission;
+ }
+
+ @Parameterized.Parameters(name = "{0}")
+ public static Collection adlCreateNonRecursiveTestData()
+ throws UnsupportedEncodingException {
+ /*
+ Test Data
+ File/Folder name, User permission, Group permission, Other Permission,
+ Parent already exist
+ shouldCreateSucceed, expectedExceptionIfFileCreateFails
+ */
+ final Collection<Object[]> datas = new ArrayList<>();
+ for (FsAction g : FsAction.values()) {
+ for (FsAction o : FsAction.values()) {
+ datas.add(new Object[] {new FsPermission(FsAction.ALL, g, o)});
+ }
+ }
+ return datas;
+ }
+
+ @AfterClass
+ public static void cleanUp() throws IOException, URISyntaxException {
+ if (AdlStorageConfiguration.isContractTestEnabled()) {
+ Assert.assertTrue(AdlStorageConfiguration.createStorageConnector()
+ .delete(testRoot, true));
+ }
+ }
+
+ @Before
+ public void setUp() throws Exception {
+ Assume.assumeTrue(AdlStorageConfiguration.isContractTestEnabled());
+ adlStore = AdlStorageConfiguration.createStorageConnector();
+ }
+
+ @Test
+ public void testFilePermission() throws IOException {
+ path = new Path(testRoot, UUID.randomUUID().toString());
+ adlStore.getConf()
+ .set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "000");
+
+ adlStore.mkdirs(path.getParent(),
+ new FsPermission(FsAction.ALL, FsAction.WRITE, FsAction.NONE));
+ adlStore.removeDefaultAcl(path.getParent());
+
+ adlStore.create(path, permission, true, 1024, (short) 1, 1023, null);
+ FileStatus status = adlStore.getFileStatus(path);
+ Assert.assertEquals(permission, status.getPermission());
+ }
+
+ @Test
+ public void testFolderPermission() throws IOException {
+ path = new Path(testRoot, UUID.randomUUID().toString());
+ adlStore.getConf()
+ .set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "000");
+ adlStore.mkdirs(path.getParent(),
+ new FsPermission(FsAction.ALL, FsAction.WRITE, FsAction.NONE));
+ adlStore.removeDefaultAcl(path.getParent());
+
+ adlStore.mkdirs(path, permission);
+ FileStatus status = adlStore.getFileStatus(path);
+ Assert.assertEquals(permission, status.getPermission());
+ }
+}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/4113ec5f/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlSupportedCharsetInPath.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlSupportedCharsetInPath.java b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlSupportedCharsetInPath.java
new file mode 100644
index 0000000..d80b6bf
--- /dev/null
+++ b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlSupportedCharsetInPath.java
@@ -0,0 +1,334 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.fs.adl.live;
+
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.adl.common.Parallelized;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.io.UnsupportedEncodingException;
+import java.net.URISyntaxException;
+import java.util.*;
+
+/**
+ * Test supported ASCII, UTF-8 character set supported by Adl storage file
+ * system on file/folder operation.
+ */
+@RunWith(Parallelized.class)
+public class TestAdlSupportedCharsetInPath {
+
+ private static final String TEST_ROOT = "/test/";
+ private static final Logger LOG = LoggerFactory
+ .getLogger(TestAdlSupportedCharsetInPath.class);
+ private String path;
+
+ public TestAdlSupportedCharsetInPath(String filePath) {
+ path = filePath;
+ }
+
+ @Parameterized.Parameters(name = "{0}")
+ public static Collection<Object[]> adlCharTestData()
+ throws UnsupportedEncodingException {
+
+ ArrayList<String> filePathList = new ArrayList<>();
+ for (int i = 32; i < 127; ++i) {
+ String specialChar = (char) i + "";
+ if (i >= 48 && i <= 57) {
+ continue;
+ }
+
+ if (i >= 65 && i <= 90) {
+ continue;
+ }
+
+ if (i >= 97 && i <= 122) {
+ continue;
+ }
+
+ // Special char at start of the path
+ if (i != 92 && i != 58 && i != 46 && i != 47) {
+ filePathList.add(specialChar + "");
+ }
+
+ // Special char at end of string
+ if (i != 92 && i != 47 && i != 58) {
+ filePathList.add("file " + i + " " + specialChar);
+ }
+
+ // Special char in between string
+ if (i != 47 && i != 58 && i != 92) {
+ filePathList.add("file " + i + " " + specialChar + "_name");
+ }
+ }
+
+ filePathList.add("a ");
+ filePathList.add("a..b");
+ fillUnicodes(filePathList);
+ Collection<Object[]> result = new ArrayList<>();
+ for (String item : filePathList) {
+ result.add(new Object[] {item});
+ }
+ return result;
+ }
+
+ private static void fillUnicodes(ArrayList<String> filePathList) {
+ // Unicode characters
+ filePathList.add("\u0627\u0644\u0628\u064a\u0627\u0646\u0627\u062a \u0627\u0644\u0643\u0628\u064a\u0631\u0629"); // Arabic
+ filePathList.add("T� dh�nat i madh"); // Albanian
+ filePathList.add("\u0574\u0565\u056e \u057f\u057e\u0575\u0561\u056c\u0576\u0565\u0580\u0568"); // Armenian
+ filePathList.add("b�y�k data"); // Azerbaijani
+ filePathList.add("\u0432\u044f\u043b\u0456\u043a\u0456\u044f \u0434\u0430\u0434\u0437\u0435\u043d\u044b\u044f"); // Belarusian,
+ filePathList.add("\u09ac\u09bf\u0997 \u09a1\u09c7\u099f\u09be"); // Bengali
+ filePathList.add("veliki podataka"); // Bosnian
+ filePathList.add("\u0433\u043e\u043b\u044f\u043c\u0430 \u0434\u0430\u043d\u043d\u0438"); // Bulgarian
+ filePathList.add("\u5927\u6570\u636e"); // Chinese - Simplified
+ filePathList.add("\u5927\u6578\u64da"); // Chinese - Traditional
+ filePathList.add("\u10d3\u10d8\u10d3\u10d8 \u10db\u10dd\u10dc\u10d0\u10ea\u10d4\u10db\u10d7\u10d0"); // Georgian,
+ filePathList.add("gro�e Daten"); // German
+ filePathList.add("\u03bc\u03b5\u03b3\u03ac\u03bb\u03bf \u03b4\u03b5\u03b4\u03bf\u03bc\u03ad\u03bd\u03b1"); // Greek
+ filePathList.add("\u0aae\u0acb\u0a9f\u0abe \u0aae\u0abe\u0ab9\u0abf\u0aa4\u0ac0"); // Gujarati
+ filePathList.add("\u05e0\u05ea\u05d5\u05e0\u05d9\u05dd \u05d2\u05d3\u05d5\u05dc\u05d9\u05dd"); // Hebrew
+ filePathList.add("\u092c\u0921\u093c\u093e \u0921\u0947\u091f\u093e"); // Hindi
+ filePathList.add("st�r g�gn"); // Icelandic
+ filePathList.add("sonra� m�r"); // Irish
+ filePathList.add("\u30d3\u30c3\u30b0\u30c7\u30fc\u30bf"); // Japanese
+ filePathList.add("\u04af\u043b\u043a\u0435\u043d \u0434\u0435\u0440\u0435\u043a\u0442\u0435\u0440"); // Kazakh
+ filePathList.add("\u1791\u17b7\u1793\u17d2\u1793\u1793\u17d0\u1799\u1792\u17c6"); // Khmer
+ filePathList.add("\ube45 \ub370\uc774\ud130"); // Korean
+ filePathList.add("\u0e82\u0ecd\u0ec9\u0ea1\u0eb9\u0e99 \u0e82\u0eb0\u0eab\u0e99\u0eb2\u0e94\u0ec3\u0eab\u0e8d\u0ec8"); // Lao
+ filePathList.add("\u0433\u043e\u043b\u0435\u043c\u0438 \u043f\u043e\u0434\u0430\u0442\u043e\u0446\u0438"); // Macedonian
+ filePathList.add("\u0920\u0942\u0932\u094b \u0921\u093e\u091f\u093e"); // Nepali
+ filePathList.add("\u0d35\u0d32\u0d3f\u0d2f \u0d21\u0d3e\u0d31\u0d4d\u0d31"); // Malayalam
+ filePathList.add("\u092e\u094b\u0920\u0947 \u0921\u0947\u091f\u093e"); // Marathi
+ filePathList.add("\u0442\u043e\u043c \u043c\u044d\u0434\u044d\u044d\u043b\u044d\u043b"); // Mangolian
+ filePathList.add("\u0627\u0637\u0644\u0627\u0639\u0627\u062a \u0628\u0632\u0631\u06af"); // Persian
+ filePathList.add("\u0a35\u0a71\u0a21\u0a47 \u0a21\u0a3e\u0a1f\u0a47 \u0a28\u0a42\u0a70"); // Punjabi
+ filePathList.add("\u0431\u043e\u043b\u044c\u0448\u0438\u0435 \u0434\u0430\u043d\u043d\u044b\u0435"); // Russian
+ filePathList.add("\u0412\u0435\u043b\u0438\u043a\u0438 \u043f\u043e\u0434\u0430\u0442\u0430\u043a\u0430"); // Serbian
+ filePathList.add("\u0dc0\u0dd2\u0dc1\u0dcf\u0dbd \u0daf\u0dad\u0dca\u0dad"); // Sinhala
+ filePathList.add("big d�t"); // Slovak
+ filePathList.add("\u043c\u0430\u044a\u043b\u0443\u043c\u043e\u0442\u0438 \u043a\u0430\u043b\u043e\u043d"); // Tajik
+ filePathList.add("\u0baa\u0bc6\u0bb0\u0bbf\u0baf \u0ba4\u0bb0\u0bb5\u0bc1"); // Tamil
+ filePathList.add("\u0c2a\u0c46\u0c26\u0c4d\u0c26 \u0c21\u0c47\u0c1f\u0c3e"); // Telugu
+ filePathList.add("\u0e02\u0e49\u0e2d\u0e21\u0e39\u0e25\u0e43\u0e2b\u0e0d\u0e48"); // Thai
+ filePathList.add("b�y�k veri"); // Turkish
+ filePathList.add("\u0432\u0435\u043b\u0438\u043a\u0456 \u0434\u0430\u043d\u0456"); // Ukranian
+ filePathList.add("\u0628\u0691\u06d2 \u0627\u0639\u062f\u0627\u062f \u0648 \u0634\u0645\u0627\u0631"); // Urdu
+ filePathList.add("katta ma'lumotlar"); // Uzbek
+ filePathList.add("d\u1eef li\u1ec7u l\u1edbn"); // Vietanamese
+ filePathList.add("\u05d2\u05e8\u05d5\u05d9\u05e1 \u05d3\u05d0\u05b7\u05d8\u05df"); // Yiddish
+ filePathList.add("big idatha"); // Zulu
+ filePathList.add("rachel\u03c7");
+ filePathList.add("jessica\u03bf");
+ filePathList.add("sarah\u03b4");
+ filePathList.add("katie\u03bd");
+ filePathList.add("wendy\u03be");
+ filePathList.add("david\u03bc");
+ filePathList.add("priscilla\u03c5");
+ filePathList.add("oscar\u03b8");
+ filePathList.add("xavier\u03c7");
+ filePathList.add("gabriella\u03b8");
+ filePathList.add("david\u03c5");
+ filePathList.add("irene\u03bc");
+ filePathList.add("fred\u03c1");
+ filePathList.add("david\u03c4");
+ filePathList.add("ulysses\u03bd");
+ filePathList.add("gabriella\u03bc");
+ filePathList.add("zach\u03b6");
+ filePathList.add("gabriella\u03bb");
+ filePathList.add("ulysses\u03c6");
+ filePathList.add("david\u03c7");
+ filePathList.add("sarah\u03c3");
+ filePathList.add("holly\u03c8");
+ filePathList.add("nick\u03b1");
+ filePathList.add("ulysses\u03b9");
+ filePathList.add("mike\u03b2");
+ filePathList.add("priscilla\u03ba");
+ filePathList.add("wendy\u03b8");
+ filePathList.add("jessica\u03c2");
+ filePathList.add("fred\u03c7");
+ filePathList.add("fred\u03b6");
+ filePathList.add("sarah\u03ba");
+ filePathList.add("calvin\u03b7");
+ filePathList.add("xavier\u03c7");
+ filePathList.add("yuri\u03c7");
+ filePathList.add("ethan\u03bb");
+ filePathList.add("holly\u03b5");
+ filePathList.add("xavier\u03c3");
+ filePathList.add("victor\u03c4");
+ filePathList.add("wendy\u03b2");
+ filePathList.add("jessica\u03c2");
+ filePathList.add("quinn\u03c6");
+ filePathList.add("xavier\u03c5");
+ filePathList.add("nick\u03b9");
+ filePathList.add("rachel\u03c6");
+ filePathList.add("oscar\u03be");
+ filePathList.add("zach\u03b4");
+ filePathList.add("zach\u03bb");
+ filePathList.add("rachel\u03b1");
+ filePathList.add("jessica\u03c6");
+ filePathList.add("luke\u03c6");
+ filePathList.add("tom\u03b6");
+ filePathList.add("nick\u03be");
+ filePathList.add("nick\u03ba");
+ filePathList.add("ethan\u03b4");
+ filePathList.add("fred\u03c7");
+ filePathList.add("priscilla\u03b8");
+ filePathList.add("zach\u03be");
+ filePathList.add("xavier\u03be");
+ filePathList.add("zach\u03c8");
+ filePathList.add("ethan\u03b1");
+ filePathList.add("oscar\u03b9");
+ filePathList.add("irene\u03b4");
+ filePathList.add("irene\u03b6");
+ filePathList.add("victor\u03bf");
+ filePathList.add("wendy\u03b2");
+ filePathList.add("mike\u03c3");
+ filePathList.add("fred\u03bf");
+ filePathList.add("mike\u03b7");
+ filePathList.add("sarah\u03c1");
+ filePathList.add("quinn\u03b2");
+ filePathList.add("mike\u03c5");
+ filePathList.add("nick\u03b6");
+ filePathList.add("nick\u03bf");
+ filePathList.add("tom\u03ba");
+ filePathList.add("bob\u03bb");
+ filePathList.add("yuri\u03c0");
+ filePathList.add("david\u03c4");
+ filePathList.add("quinn\u03c0");
+ filePathList.add("mike\u03bb");
+ filePathList.add("david\u03b7");
+ filePathList.add("ethan\u03c4");
+ filePathList.add("nick\u03c6");
+ filePathList.add("yuri\u03bf");
+ filePathList.add("ethan\u03c5");
+ filePathList.add("bob\u03b8");
+ filePathList.add("david\u03bb");
+ filePathList.add("priscilla\u03be");
+ filePathList.add("nick\u03b3");
+ filePathList.add("luke\u03c5");
+ filePathList.add("irene\u03bb");
+ filePathList.add("xavier\u03bf");
+ filePathList.add("fred\u03c5");
+ filePathList.add("ulysses\u03bc");
+ filePathList.add("wendy\u03b3");
+ filePathList.add("zach\u03bb");
+ filePathList.add("rachel\u03c2");
+ filePathList.add("sarah\u03c0");
+ filePathList.add("alice\u03c8");
+ filePathList.add("bob\u03c4");
+ }
+
+ @AfterClass
+ public static void testReport() throws IOException, URISyntaxException {
+ if (!AdlStorageConfiguration.isContractTestEnabled()) {
+ return;
+ }
+
+ FileSystem fs = AdlStorageConfiguration.createStorageConnector();
+ fs.delete(new Path(TEST_ROOT), true);
+ }
+
+ @Test
+ public void testAllowedSpecialCharactersMkdir()
+ throws IOException, URISyntaxException {
+ Path parentPath = new Path(TEST_ROOT, UUID.randomUUID().toString() + "/");
+ Path specialFile = new Path(parentPath, path);
+ FileSystem fs = AdlStorageConfiguration.createStorageConnector();
+
+ Assert.assertTrue("Mkdir failed : " + specialFile, fs.mkdirs(specialFile));
+ Assert.assertTrue("File not Found after Mkdir success" + specialFile,
+ fs.exists(specialFile));
+ Assert.assertTrue("Not listed under parent " + parentPath,
+ contains(fs.listStatus(parentPath),
+ fs.makeQualified(specialFile).toString()));
+ Assert.assertTrue("Delete failed : " + specialFile,
+ fs.delete(specialFile, true));
+ Assert.assertFalse("File still exist after delete " + specialFile,
+ fs.exists(specialFile));
+ }
+
+ private boolean contains(FileStatus[] statuses, String remotePath) {
+ for (FileStatus status : statuses) {
+ if (status.getPath().toString().equals(remotePath)) {
+ return true;
+ }
+ }
+
+ Arrays.stream(statuses).forEach(s -> LOG.info(s.getPath().toString()));
+ return false;
+ }
+
+ @Before
+ public void setup() throws Exception {
+ org.junit.Assume
+ .assumeTrue(AdlStorageConfiguration.isContractTestEnabled());
+ }
+
+ @Test
+ public void testAllowedSpecialCharactersRename()
+ throws IOException, URISyntaxException {
+
+ String parentPath = TEST_ROOT + UUID.randomUUID().toString() + "/";
+ Path specialFile = new Path(parentPath + path);
+ Path anotherLocation = new Path(parentPath + UUID.randomUUID().toString());
+ FileSystem fs = AdlStorageConfiguration.createStorageConnector();
+
+ Assert.assertTrue("Could not create " + specialFile.toString(),
+ fs.createNewFile(specialFile));
+ Assert.assertTrue(
+ "Failed to rename " + specialFile.toString() + " --> " + anotherLocation
+ .toString(), fs.rename(specialFile, anotherLocation));
+ Assert.assertFalse("File should not be present after successful rename : "
+ + specialFile.toString(), fs.exists(specialFile));
+ Assert.assertTrue("File should be present after successful rename : "
+ + anotherLocation.toString(), fs.exists(anotherLocation));
+ Assert.assertFalse(
+ "Listed under parent whereas expected not listed : " + parentPath,
+ contains(fs.listStatus(new Path(parentPath)),
+ fs.makeQualified(specialFile).toString()));
+
+ Assert.assertTrue(
+ "Failed to rename " + anotherLocation.toString() + " --> " + specialFile
+ .toString(), fs.rename(anotherLocation, specialFile));
+ Assert.assertTrue(
+ "File should be present after successful rename : " + "" + specialFile
+ .toString(), fs.exists(specialFile));
+ Assert.assertFalse("File should not be present after successful rename : "
+ + anotherLocation.toString(), fs.exists(anotherLocation));
+
+ Assert.assertTrue("Not listed under parent " + parentPath,
+ contains(fs.listStatus(new Path(parentPath)),
+ fs.makeQualified(specialFile).toString()));
+
+ Assert.assertTrue("Failed to delete " + parentPath,
+ fs.delete(new Path(parentPath), true));
+ }
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/hadoop/blob/4113ec5f/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestMetadata.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestMetadata.java b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestMetadata.java
new file mode 100644
index 0000000..3b9e7da
--- /dev/null
+++ b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestMetadata.java
@@ -0,0 +1,111 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.adl.live;
+
+import org.apache.hadoop.fs.ContentSummary;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Assume;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.util.UUID;
+
+/**
+ * This class is responsible for testing ContentSummary, ListStatus on
+ * file/folder.
+ */
+public class TestMetadata {
+
+ private FileSystem adlStore;
+ private Path parent;
+
+ public TestMetadata() {
+ parent = new Path("test");
+ }
+
+ @Before
+ public void setUp() throws Exception {
+ Assume.assumeTrue(AdlStorageConfiguration.isContractTestEnabled());
+ adlStore = AdlStorageConfiguration.createStorageConnector();
+ }
+
+ @After
+ public void cleanUp() throws Exception {
+ if (AdlStorageConfiguration.isContractTestEnabled()) {
+ adlStore.delete(parent, true);
+ }
+ }
+
+ @Test
+ public void testContentSummaryOnFile() throws IOException {
+ Path child = new Path(UUID.randomUUID().toString());
+ Path testFile = new Path(parent, child);
+ OutputStream out = adlStore.create(testFile);
+
+ for (int i = 0; i < 1024; ++i) {
+ out.write(97);
+ }
+ out.close();
+
+ Assert.assertTrue(adlStore.isFile(testFile));
+ ContentSummary summary = adlStore.getContentSummary(testFile);
+ Assert.assertEquals(1024, summary.getSpaceConsumed());
+ Assert.assertEquals(1, summary.getFileCount());
+ Assert.assertEquals(0, summary.getDirectoryCount());
+ Assert.assertEquals(1024, summary.getLength());
+ }
+
+ @Test
+ public void testContentSummaryOnFolder() throws IOException {
+ Path child = new Path(UUID.randomUUID().toString());
+ Path testFile = new Path(parent, child);
+ OutputStream out = adlStore.create(testFile);
+
+ for (int i = 0; i < 1024; ++i) {
+ out.write(97);
+ }
+ out.close();
+
+ Assert.assertTrue(adlStore.isFile(testFile));
+ ContentSummary summary = adlStore.getContentSummary(parent);
+ Assert.assertEquals(1024, summary.getSpaceConsumed());
+ Assert.assertEquals(1, summary.getFileCount());
+ Assert.assertEquals(1, summary.getDirectoryCount());
+ Assert.assertEquals(1024, summary.getLength());
+ }
+
+ @Test
+ public void listStatusOnFile() throws IOException {
+ Path path = new Path(parent, "a.txt");
+ FileSystem fs = adlStore;
+ fs.createNewFile(path);
+ Assert.assertTrue(fs.isFile(path));
+ FileStatus[] statuses = fs.listStatus(path);
+ Assert
+ .assertEquals(path.makeQualified(fs.getUri(), fs.getWorkingDirectory()),
+ statuses[0].getPath());
+ }
+}
+
---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org