You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@twill.apache.org by ch...@apache.org on 2017/04/08 18:08:34 UTC

[11/24] twill git commit: (TWILL-226) Remove the deprecated HDFSLocationFactory

(TWILL-226) Remove the deprecated HDFSLocationFactory

This closes #44 on Github.

Signed-off-by: Terence Yim <ch...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/twill/repo
Commit: http://git-wip-us.apache.org/repos/asf/twill/commit/7f348716
Tree: http://git-wip-us.apache.org/repos/asf/twill/tree/7f348716
Diff: http://git-wip-us.apache.org/repos/asf/twill/diff/7f348716

Branch: refs/heads/site
Commit: 7f3487169da9639e83f797f38733c8c5d5d4d9f6
Parents: 390dfab
Author: Terence Yim <ch...@apache.org>
Authored: Mon Mar 27 16:14:17 2017 -0700
Committer: Terence Yim <ch...@apache.org>
Committed: Mon Mar 27 16:54:02 2017 -0700

----------------------------------------------------------------------
 .../apache/twill/filesystem/HDFSLocation.java   | 371 -------------------
 .../twill/filesystem/HDFSLocationFactory.java   | 107 ------
 .../apache/twill/internal/yarn/YarnUtils.java   |   6 +-
 .../twill/filesystem/HDFSLocationTest.java      |  31 --
 4 files changed, 1 insertion(+), 514 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/twill/blob/7f348716/twill-yarn/src/main/java/org/apache/twill/filesystem/HDFSLocation.java
----------------------------------------------------------------------
diff --git a/twill-yarn/src/main/java/org/apache/twill/filesystem/HDFSLocation.java b/twill-yarn/src/main/java/org/apache/twill/filesystem/HDFSLocation.java
deleted file mode 100644
index ef1138e..0000000
--- a/twill-yarn/src/main/java/org/apache/twill/filesystem/HDFSLocation.java
+++ /dev/null
@@ -1,371 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.twill.filesystem;
-
-import com.google.common.base.Objects;
-import com.google.common.collect.ImmutableList;
-import org.apache.hadoop.fs.CreateFlag;
-import org.apache.hadoop.fs.FileAlreadyExistsException;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Options;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.hdfs.DistributedFileSystem;
-import org.apache.hadoop.security.AccessControlException;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.net.URI;
-import java.util.EnumSet;
-import java.util.List;
-import java.util.UUID;
-
-/**
- * A concrete implementation of {@link Location} for the HDFS filesystem using {@link FileSystem}.
- */
-final class HDFSLocation implements Location {
-  private final FileSystem fs;
-  private final Path path;
-  private final HDFSLocationFactory locationFactory;
-
-  /**
-   * Constructs a HDFSLocation.
-   *
-   * @param locationFactory The {@link HDFSLocationFactory} that creates this instance.
-   * @param path of the file.
-   */
-  HDFSLocation(HDFSLocationFactory locationFactory, Path path) {
-    this.fs = locationFactory.getFileSystem();
-    this.path = path;
-    this.locationFactory = locationFactory;
-  }
-
-  /**
-   * Checks if this location exists on HDFS.
-   *
-   * @return true if found; false otherwise.
-   * @throws IOException
-   */
-  @Override
-  public boolean exists() throws IOException {
-    return fs.exists(path);
-  }
-
-  /**
-   * @return An {@link InputStream} for this location on HDFS.
-   * @throws IOException
-   */
-  @Override
-  public InputStream getInputStream() throws IOException {
-    return fs.open(path);
-  }
-
-  /**
-   * @return An {@link OutputStream} for this location on HDFS.
-   * @throws IOException
-   */
-  @Override
-  public OutputStream getOutputStream() throws IOException {
-    return fs.create(path);
-  }
-
-  @Override
-  public OutputStream getOutputStream(String permission) throws IOException {
-    FsPermission fsPermission = parsePermissions(permission);
-    OutputStream os = fs.create(path,
-                                fsPermission,
-                                true,
-                                fs.getConf().getInt("io.file.buffer.size", 4096),
-                                fs.getDefaultReplication(path),
-                                fs.getDefaultBlockSize(path),
-                                null);
-    // Set the permission explicitly again to skip the umask
-    fs.setPermission(path, fsPermission);
-    return os;
-  }
-
-  /**
-   * Appends the child to the current {@link Location} on HDFS.
-   * <p>
-   * Returns a new instance of Location.
-   * </p>
-   *
-   * @param child to be appended to this location.
-   * @return A new instance of {@link Location}
-   * @throws IOException
-   */
-  @Override
-  public Location append(String child) throws IOException {
-    if (child.startsWith("/")) {
-      child = child.substring(1);
-    }
-    return new HDFSLocation(locationFactory, new Path(URI.create(path.toUri() + "/" + child)));
-  }
-
-  @Override
-  public Location getTempFile(String suffix) throws IOException {
-    Path path = new Path(
-      URI.create(this.path.toUri() + "." + UUID.randomUUID() + (suffix == null ? TEMP_FILE_SUFFIX : suffix)));
-    return new HDFSLocation(locationFactory, path);
-  }
-
-  /**
-   * @return Returns the name of the file or directory denoted by this abstract pathname.
-   */
-  @Override
-  public String getName() {
-    return path.getName();
-  }
-
-  @Override
-  public boolean createNew() throws IOException {
-    return fs.createNewFile(path);
-  }
-
-  @Override
-  public boolean createNew(String permission) throws IOException {
-    try {
-      FsPermission fsPermission = parsePermissions(permission);
-      fs.create(path, fsPermission, EnumSet.of(CreateFlag.CREATE),
-                fs.getConf().getInt("io.file.buffer.size", 4096),
-                fs.getDefaultReplication(path),
-                fs.getDefaultBlockSize(path),
-                null).close();
-      // Set the permission explicitly again to skip the umask
-      fs.setPermission(path, fsPermission);
-      return true;
-    } catch (FileAlreadyExistsException e) {
-      return false;
-    }
-  }
-
-  @Override
-  public String getOwner() throws IOException {
-    return fs.getFileStatus(path).getOwner();
-  }
-
-  @Override
-  public String getGroup() throws IOException {
-    return fs.getFileStatus(path).getGroup();
-  }
-
-  @Override
-  public void setGroup(String group) throws IOException {
-    fs.setOwner(path, null, group);
-  }
-
-  @Override
-  public String getPermissions() throws IOException {
-    FsPermission permission = fs.getFileStatus(path).getPermission();
-    return permission.getUserAction().SYMBOL + permission.getGroupAction().SYMBOL + permission.getOtherAction().SYMBOL;
-  }
-
-  @Override
-  public void setPermissions(String permission) throws IOException {
-    fs.setPermission(path, parsePermissions(permission));
-  }
-
-  /**
-   * @return A {@link URI} for this location on HDFS.
-   */
-  @Override
-  public URI toURI() {
-    return path.toUri();
-  }
-
-  /**
-   * Deletes the file or directory denoted by this abstract pathname. If this
-   * pathname denotes a directory, then the directory must be empty in order
-   * to be deleted.
-   *
-   * @return true if and only if the file or directory is successfully deleted; false otherwise.
-   */
-  @Override
-  public boolean delete() throws IOException {
-    return fs.delete(path, false);
-  }
-
-  @Override
-  public boolean delete(boolean recursive) throws IOException {
-    return fs.delete(path, true);
-  }
-
-  @Override
-  public Location renameTo(Location destination) throws IOException {
-    // Destination will always be of the same type as this location.
-    if (fs instanceof DistributedFileSystem) {
-      ((DistributedFileSystem) fs).rename(path, ((HDFSLocation) destination).path, Options.Rename.OVERWRITE);
-      return new HDFSLocation(locationFactory, new Path(destination.toURI()));
-    }
-
-    if (fs.rename(path, ((HDFSLocation) destination).path)) {
-      return new HDFSLocation(locationFactory, new Path(destination.toURI()));
-    } else {
-      return null;
-    }
-  }
-
-  @Override
-  public boolean mkdirs() throws IOException {
-    try {
-      if (fs.exists(path)) {
-        return false;
-      }
-      return fs.mkdirs(path);
-    } catch (FileAlreadyExistsException | AccessControlException e) {
-      // curiously, if one of the parent dirs exists but is a file, Hadoop throws this:
-      // org.apache...AccessControlException: Permission denied: user=..., access=EXECUTE, inode=".../existingfile"
-      // however, if the directory itself exists, it will throw FileAlreadyExistsException
-      return false;
-    }
-  }
-
-  @Override
-  public boolean mkdirs(String permission) throws IOException {
-    return mkdirs(path, parsePermissions(permission));
-  }
-
-  /**
-   * Helper to create a directory and its parents id necessary, all with the given permissions.
-   * We cannot use the fs.mkdirs() because that would apply the umask to the permissions.
-   */
-  private boolean mkdirs(Path path, FsPermission permission) throws IOException {
-    try {
-      if (fs.exists(path)) {
-        return false;
-      }
-    } catch (AccessControlException e) {
-      // curiously, if one of the parent dirs exists but is a file, Hadoop throws this:
-      // org.apache...AccessControlException: Permission denied: user=..., access=EXECUTE, inode=".../existingfile"
-      return false;
-    }
-    Path parent = path.getParent();
-    if (null == parent) {
-      return false;
-    }
-    // if parent exists, attempt to create the path as a directory.
-    if (fs.exists(parent)) {
-      return mkdir(path, permission);
-    }
-    // attempt to create the parent with the proper permissions
-    if (!mkdirs(parent, permission) && !fs.isDirectory(parent)) {
-      return false;
-    }
-    // now the parent exists and we can create this directory
-    return mkdir(path, permission);
-  }
-
-  /**
-   * Helper to create a directory (but not its parents) with the given permissions.
-   * We cannot use fs.mkdirs() and then apply the permissions to override the umask.
-   */
-  private boolean mkdir(Path path, FsPermission permission) throws IOException {
-    try {
-      if (!fs.mkdirs(path) && !fs.isDirectory(path)) {
-        return false;
-      }
-    } catch (FileAlreadyExistsException e) {
-      return false;
-    }
-    // explicitly set permissions to get around the umask
-    fs.setPermission(path, permission);
-    return true;
-  }
-
-  /**
-   * @return Length of file.
-   */
-  @Override
-  public long length() throws IOException {
-    return fs.getFileStatus(path).getLen();
-  }
-
-  @Override
-  public long lastModified() throws IOException {
-    return fs.getFileStatus(path).getModificationTime();
-  }
-
-  @Override
-  public boolean isDirectory() throws IOException {
-    return fs.isDirectory(path);
-  }
-
-  @Override
-  public List<Location> list() throws IOException {
-    FileStatus[] statuses = fs.listStatus(path);
-    ImmutableList.Builder<Location> result = ImmutableList.builder();
-    if (statuses != null) {
-      for (FileStatus status : statuses) {
-        if (!Objects.equal(path, status.getPath())) {
-          result.add(new HDFSLocation(locationFactory, status.getPath()));
-        }
-      }
-    }
-    return result.build();
-  }
-
-  @Override
-  public LocationFactory getLocationFactory() {
-    return locationFactory;
-  }
-
-  @Override
-  public boolean equals(Object o) {
-    if (this == o) {
-      return true;
-    }
-    if (o == null || getClass() != o.getClass()) {
-      return false;
-    }
-
-    HDFSLocation other = (HDFSLocation) o;
-    return Objects.equal(path, other.path);
-  }
-
-  @Override
-  public int hashCode() {
-    return path.hashCode();
-  }
-
-  @Override
-  public String toString() {
-    return path.toString();
-  }
-
-  /**
-   * Parses the given permission to {@link FsPermission}. Since the {@link HDFSLocationFactory} and this class are
-   * deprecated, this method is copied from {@link FileContextLocation} instead of creating an extra library to share.
-   *
-   * @param permission the permission as passed to the {@link #createNew(String)} or {@link #getOutputStream(String)}
-   *                   methods.
-   * @return a new {@link FsPermission}.
-   */
-  private FsPermission parsePermissions(String permission) {
-    if (permission.length() == 3) {
-      return new FsPermission(permission);
-    } else if (permission.length() == 9) {
-      // The FsPermission expect a 10 characters string, which it will ignore the first character
-      return FsPermission.valueOf("-" + permission);
-    } else {
-      throw new IllegalArgumentException("Invalid permission " + permission +
-                                           ". Permission should either be a three digit or nine character string.");
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/twill/blob/7f348716/twill-yarn/src/main/java/org/apache/twill/filesystem/HDFSLocationFactory.java
----------------------------------------------------------------------
diff --git a/twill-yarn/src/main/java/org/apache/twill/filesystem/HDFSLocationFactory.java b/twill-yarn/src/main/java/org/apache/twill/filesystem/HDFSLocationFactory.java
deleted file mode 100644
index 728de32..0000000
--- a/twill-yarn/src/main/java/org/apache/twill/filesystem/HDFSLocationFactory.java
+++ /dev/null
@@ -1,107 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.twill.filesystem;
-
-import com.google.common.base.Preconditions;
-import com.google.common.base.Throwables;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-
-import java.io.IOException;
-import java.net.URI;
-import java.util.Objects;
-
-/**
- * A {@link LocationFactory} that creates HDFS {@link Location} using {@link FileSystem}.
- *
- * @deprecated Deprecated since 0.7.0. Use {@link FileContextLocationFactory} instead.
- */
-@Deprecated
-public final class HDFSLocationFactory implements LocationFactory {
-
-  private final FileSystem fileSystem;
-  private final String pathBase;
-
-  public HDFSLocationFactory(Configuration configuration) {
-    this(getFileSystem(configuration));
-  }
-  
-  public HDFSLocationFactory(Configuration configuration, String pathBase) {
-    this(getFileSystem(configuration), pathBase);
-  }
-
-  public HDFSLocationFactory(FileSystem fileSystem) {
-    this(fileSystem, "/");
-  }
-
-  public HDFSLocationFactory(FileSystem fileSystem, String pathBase) {
-    String base = pathBase.equals("/") ? "" : pathBase;
-    base = base.endsWith("/") ? base.substring(0, base.length() - 1) : base;
-
-    this.fileSystem = fileSystem;
-    this.pathBase = base;
-  }
-
-  @Override
-  public Location create(String path) {
-    if (path.startsWith("/")) {
-      path = path.substring(1);
-    }
-    return new HDFSLocation(this, new Path(fileSystem.getUri() + "/" + pathBase + "/" + path));
-  }
-
-  @Override
-  public Location create(URI uri) {
-    URI fsURI = fileSystem.getUri();
-    if (Objects.equals(fsURI.getScheme(), uri.getScheme())
-      && Objects.equals(fsURI.getAuthority(), uri.getAuthority())) {
-      // It's a full URI
-      return new HDFSLocation(this, new Path(uri));
-    }
-
-    if (uri.isAbsolute()) {
-      // Needs to be of the same scheme
-      Preconditions.checkArgument(Objects.equals(fsURI.getScheme(), uri.getScheme()),
-                                  "Only URI with '%s' scheme is supported", fsURI.getScheme());
-      return new HDFSLocation(this, new Path(fileSystem.getUri() + uri.getPath()));
-    }
-
-    return create(uri.getPath());
-  }
-
-  @Override
-  public Location getHomeLocation() {
-    return new HDFSLocation(this, fileSystem.getHomeDirectory());
-  }
-
-  /**
-   * Returns the underlying {@link FileSystem} object.
-   */
-  public FileSystem getFileSystem() {
-    return fileSystem;
-  }
-
-  private static FileSystem getFileSystem(Configuration configuration) {
-    try {
-      return FileSystem.get(configuration);
-    } catch (IOException e) {
-      throw Throwables.propagate(e);
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/twill/blob/7f348716/twill-yarn/src/main/java/org/apache/twill/internal/yarn/YarnUtils.java
----------------------------------------------------------------------
diff --git a/twill-yarn/src/main/java/org/apache/twill/internal/yarn/YarnUtils.java b/twill-yarn/src/main/java/org/apache/twill/internal/yarn/YarnUtils.java
index e931144..3c4c270 100644
--- a/twill-yarn/src/main/java/org/apache/twill/internal/yarn/YarnUtils.java
+++ b/twill-yarn/src/main/java/org/apache/twill/internal/yarn/YarnUtils.java
@@ -41,7 +41,6 @@ import org.apache.hadoop.yarn.util.Records;
 import org.apache.twill.api.LocalFile;
 import org.apache.twill.filesystem.FileContextLocationFactory;
 import org.apache.twill.filesystem.ForwardingLocationFactory;
-import org.apache.twill.filesystem.HDFSLocationFactory;
 import org.apache.twill.filesystem.LocationFactory;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -163,7 +162,7 @@ public class YarnUtils {
     FileSystem fileSystem = getFileSystem(locationFactory, config);
 
     if (fileSystem == null) {
-      LOG.warn("Unexpected: LocationFactory is neither FileContextLocationFactory nor HDFSLocationFactory.");
+      LOG.warn("Unexpected: LocationFactory is not backed by FileContextLocationFactory");
       return ImmutableList.of();
     }
 
@@ -320,9 +319,6 @@ public class YarnUtils {
    */
   @Nullable
   private static FileSystem getFileSystem(LocationFactory locationFactory, Configuration config) throws IOException {
-    if (locationFactory instanceof HDFSLocationFactory) {
-      return ((HDFSLocationFactory) locationFactory).getFileSystem();
-    }
     if (locationFactory instanceof ForwardingLocationFactory) {
       return getFileSystem(((ForwardingLocationFactory) locationFactory).getDelegate(), config);
     }

http://git-wip-us.apache.org/repos/asf/twill/blob/7f348716/twill-yarn/src/test/java/org/apache/twill/filesystem/HDFSLocationTest.java
----------------------------------------------------------------------
diff --git a/twill-yarn/src/test/java/org/apache/twill/filesystem/HDFSLocationTest.java b/twill-yarn/src/test/java/org/apache/twill/filesystem/HDFSLocationTest.java
deleted file mode 100644
index de0b4c5..0000000
--- a/twill-yarn/src/test/java/org/apache/twill/filesystem/HDFSLocationTest.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.twill.filesystem;
-
-import java.io.IOException;
-
-/**
- * Everything here is set up the same as for FileContextLocation; except that we use an HDFSLocationFactory.
- */
-public class HDFSLocationTest extends FileContextLocationTest {
-
-  @Override
-  protected LocationFactory doCreateLocationFactory(String pathBase) throws IOException {
-    return new HDFSLocationFactory(dfsCluster.getFileSystem(), pathBase);
-  }
-}