You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@commons.apache.org by gg...@apache.org on 2013/01/03 17:37:53 UTC

svn commit: r1428461 - in /commons/proper/vfs/trunk: ./ core/ core/src/main/java/org/apache/commons/vfs2/impl/ core/src/main/java/org/apache/commons/vfs2/provider/hdfs/ core/src/test/java/org/apache/commons/vfs2/provider/hdfs/ core/src/test/java/org/ap...

Author: ggregory
Date: Thu Jan  3 16:37:53 2013
New Revision: 1428461

URL: http://svn.apache.org/viewvc?rev=1428461&view=rev
Log:
[VFS-442] Add an HDFS FileSystem Provider. First commit.

Added:
    commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/
    commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/HdfsFileAttributes.java   (with props)
    commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/HdfsFileContentInfoFactory.java   (with props)
    commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/HdfsFileObject.java   (with props)
    commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/HdfsFileProvider.java   (with props)
    commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/HdfsFileSystem.java   (with props)
    commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/HdfsFileSystemConfigBuilder.java   (with props)
    commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/HdfsRandomAccessContent.java   (with props)
    commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/package.html   (with props)
    commons/proper/vfs/trunk/core/src/test/java/org/apache/commons/vfs2/provider/hdfs/
    commons/proper/vfs/trunk/core/src/test/java/org/apache/commons/vfs2/provider/hdfs/test/
    commons/proper/vfs/trunk/core/src/test/java/org/apache/commons/vfs2/provider/hdfs/test/HdfsFileProviderTest.java   (with props)
    commons/proper/vfs/trunk/core/src/test/java/org/apache/commons/vfs2/provider/hdfs/test/HdfsFileProviderTestCase.java   (with props)
Modified:
    commons/proper/vfs/trunk/core/pom.xml
    commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/impl/providers.xml
    commons/proper/vfs/trunk/pom.xml
    commons/proper/vfs/trunk/src/changes/changes.xml
    commons/proper/vfs/trunk/src/site/xdoc/filesystems.xml

Modified: commons/proper/vfs/trunk/core/pom.xml
URL: http://svn.apache.org/viewvc/commons/proper/vfs/trunk/core/pom.xml?rev=1428461&r1=1428460&r2=1428461&view=diff
==============================================================================
--- commons/proper/vfs/trunk/core/pom.xml (original)
+++ commons/proper/vfs/trunk/core/pom.xml Thu Jan  3 16:37:53 2013
@@ -134,6 +134,22 @@
       <artifactId>jackrabbit-standalone</artifactId>
       <scope>test</scope>
     </dependency>
+    <!-- Test HDFS with Apache Hadoop -->
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-core</artifactId>
+        <scope>provided</scope>
+      </dependency>    
+	  <dependency>
+		<groupId>org.apache.hadoop</groupId>
+		<artifactId>hadoop-test</artifactId>
+		<scope>test</scope>
+	  </dependency>
+	  <dependency>
+		<groupId>javax.ws.rs</groupId>
+		<artifactId>jsr311-api</artifactId>
+		<scope>test</scope>
+ 	  </dependency>
   </dependencies>
 
   <properties>
@@ -325,5 +341,28 @@
         </plugins>
       </build>
     </profile>
+    <profile>
+      <id>hdfs</id>
+      <activation>
+        <activeByDefault>false</activeByDefault>
+        <os>
+          <family>Windows</family>
+        </os>        
+      </activation>
+      <build>
+        <plugins>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-surefire-plugin</artifactId>
+            <configuration>
+              <excludes>
+                <exclude>**/HdfsFileProviderTest.java</exclude>
+                <exclude>**/HdfsFileProviderTestCase.java</exclude>
+              </excludes>
+            </configuration>
+          </plugin>
+        </plugins>
+      </build>      
+    </profile>
   </profiles>
 </project>

Modified: commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/impl/providers.xml
URL: http://svn.apache.org/viewvc/commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/impl/providers.xml?rev=1428461&r1=1428460&r2=1428461&view=diff
==============================================================================
--- commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/impl/providers.xml (original)
+++ commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/impl/providers.xml Thu Jan  3 16:37:53 2013
@@ -103,10 +103,13 @@
         <if-available scheme="bz2"/>
         <if-available scheme="tar"/>
     </provider>
-
     <provider class-name="org.apache.commons.vfs2.provider.ram.RamFileProvider">
         <scheme name="ram"/>
     </provider>
+    <provider class-name="org.apache.commons.vfs2.provider.hdfs.HdfsFileProvider">
+        <scheme name="hdfs"/>
+        <if-available class-name="org.apache.hadoop.fs.FileSystem"/>
+    </provider>
 
     <extension-map extension="zip" scheme="zip"/>
     <extension-map extension="tar" scheme="tar"/>

Added: commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/HdfsFileAttributes.java
URL: http://svn.apache.org/viewvc/commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/HdfsFileAttributes.java?rev=1428461&view=auto
==============================================================================
--- commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/HdfsFileAttributes.java (added)
+++ commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/HdfsFileAttributes.java Thu Jan  3 16:37:53 2013
@@ -0,0 +1,60 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.commons.vfs2.provider.hdfs;
+
+/**
+ * HDFS file content attributes.
+ * 
+ * @since 2.1
+ */
+public enum HdfsFileAttributes
+{
+    /**
+     * Last access time.
+     */
+    LAST_ACCESS_TIME,
+
+    /**
+     * Block size.
+     */
+    BLOCK_SIZE,
+
+    /**
+     * Group.
+     */
+    GROUP,
+
+    /**
+     * Owner.
+     */
+    OWNER,
+
+    /**
+     * Permissions.
+     */
+    PERMISSIONS,
+
+    /**
+     * Length.
+     */
+    LENGTH,
+
+    /**
+     * Modification time.
+     */
+    MODIFICATION_TIME;
+}

Propchange: commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/HdfsFileAttributes.java
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/HdfsFileAttributes.java
------------------------------------------------------------------------------
    svn:keywords = Id

Added: commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/HdfsFileContentInfoFactory.java
URL: http://svn.apache.org/viewvc/commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/HdfsFileContentInfoFactory.java?rev=1428461&view=auto
==============================================================================
--- commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/HdfsFileContentInfoFactory.java (added)
+++ commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/HdfsFileContentInfoFactory.java Thu Jan  3 16:37:53 2013
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.commons.vfs2.provider.hdfs;
+
+import org.apache.commons.vfs2.FileContent;
+import org.apache.commons.vfs2.FileContentInfo;
+import org.apache.commons.vfs2.FileContentInfoFactory;
+import org.apache.commons.vfs2.FileSystemException;
+import org.apache.commons.vfs2.impl.DefaultFileContentInfo;
+
+/**
+ * Creates FileContentInfo instances for HDFS.
+ * 
+ * @since 2.1
+ */
+public class HdfsFileContentInfoFactory implements FileContentInfoFactory
+{
+    private static final String CONTENT = "text/plain";
+    private static final String ENCODING = "UTF-8";
+
+    /**
+     * Creates a FileContentInfo for a the given FileContent.
+     * 
+     * @param fileContent
+     *            Use this FileContent to create a matching FileContentInfo
+     * @return a FileContentInfo for the given FileContent with content set to "text/plain" and encoding set to "UTF-8"
+     * @throws FileSystemException
+     *             when a problem occurs creating the FileContentInfo.
+     */
+    @Override
+    public FileContentInfo create(final FileContent fileContent) throws FileSystemException
+    {
+        return new DefaultFileContentInfo(CONTENT, ENCODING);
+    }
+
+}

Propchange: commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/HdfsFileContentInfoFactory.java
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/HdfsFileContentInfoFactory.java
------------------------------------------------------------------------------
    svn:keywords = Id

Added: commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/HdfsFileObject.java
URL: http://svn.apache.org/viewvc/commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/HdfsFileObject.java?rev=1428461&view=auto
==============================================================================
--- commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/HdfsFileObject.java (added)
+++ commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/HdfsFileObject.java Thu Jan  3 16:37:53 2013
@@ -0,0 +1,353 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.commons.vfs2.provider.hdfs;
+
+import java.io.FileNotFoundException;
+import java.io.InputStream;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.commons.vfs2.FileNotFolderException;
+import org.apache.commons.vfs2.FileObject;
+import org.apache.commons.vfs2.FileSystemException;
+import org.apache.commons.vfs2.FileType;
+import org.apache.commons.vfs2.RandomAccessContent;
+import org.apache.commons.vfs2.provider.AbstractFileName;
+import org.apache.commons.vfs2.provider.AbstractFileObject;
+import org.apache.commons.vfs2.util.RandomAccessMode;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+
+/**
+ * A VFS representation of an HDFS file.
+ * 
+ * @since 2.1
+ */
+public class HdfsFileObject extends AbstractFileObject<HdfsFileSystem>
+{
+    private final HdfsFileSystem fs;
+    private final FileSystem hdfs;
+    private final Path path;
+    private FileStatus stat;
+
+    /**
+     * Constructs a new HDFS FileObject
+     * 
+     * @param name
+     *            FileName
+     * @param fs
+     *            HdfsFileSystem instance
+     * @param hdfs
+     *            Hadoop FileSystem instance
+     * @param p
+     *            Path to the file in HDFS
+     */
+    protected HdfsFileObject(final AbstractFileName name, final HdfsFileSystem fs, final FileSystem hdfs, final Path p)
+    {
+        super(name, fs);
+        this.fs = fs;
+        this.hdfs = hdfs;
+        this.path = p;
+    }
+
+    /**
+     * @see org.apache.commons.vfs2.provider.AbstractFileObject#canRenameTo(org.apache.commons.vfs2.FileObject)
+     */
+    @Override
+    public boolean canRenameTo(final FileObject newfile)
+    {
+        throw new UnsupportedOperationException();
+    }
+
+    /**
+     * @see org.apache.commons.vfs2.provider.AbstractFileObject#doAttach()
+     */
+    @Override
+    protected void doAttach() throws Exception
+    {
+        try
+        {
+            this.stat = this.hdfs.getFileStatus(this.path);
+        }
+        catch (final FileNotFoundException e)
+        {
+            return;
+        }
+    }
+
+    /**
+     * @see org.apache.commons.vfs2.provider.AbstractFileObject#doGetAttributes()
+     */
+    @Override
+    protected Map<String, Object> doGetAttributes() throws Exception
+    {
+        if (null == this.stat)
+        {
+            return super.doGetAttributes();
+        }
+        else
+        {
+            final Map<String, Object> attrs = new HashMap<String, Object>();
+            attrs.put(HdfsFileAttributes.LAST_ACCESS_TIME.toString(), this.stat.getAccessTime());
+            attrs.put(HdfsFileAttributes.BLOCK_SIZE.toString(), this.stat.getBlockSize());
+            attrs.put(HdfsFileAttributes.GROUP.toString(), this.stat.getGroup());
+            attrs.put(HdfsFileAttributes.OWNER.toString(), this.stat.getOwner());
+            attrs.put(HdfsFileAttributes.PERMISSIONS.toString(), this.stat.getPermission().toString());
+            attrs.put(HdfsFileAttributes.LENGTH.toString(), this.stat.getLen());
+            attrs.put(HdfsFileAttributes.MODIFICATION_TIME.toString(), this.stat.getModificationTime());
+            return attrs;
+        }
+    }
+
+    /**
+     * @see org.apache.commons.vfs2.provider.AbstractFileObject#doGetContentSize()
+     */
+    @Override
+    protected long doGetContentSize() throws Exception
+    {
+        return stat.getLen();
+    }
+
+    /**
+     * @see org.apache.commons.vfs2.provider.AbstractFileObject#doGetInputStream()
+     */
+    @Override
+    protected InputStream doGetInputStream() throws Exception
+    {
+        return this.hdfs.open(this.path);
+    }
+
+    /**
+     * @see org.apache.commons.vfs2.provider.AbstractFileObject#doGetLastModifiedTime()
+     */
+    @Override
+    protected long doGetLastModifiedTime() throws Exception
+    {
+        if (null != this.stat)
+        {
+            return this.stat.getModificationTime();
+        }
+        else
+        {
+            return -1;
+        }
+    }
+
+    /**
+     * @see org.apache.commons.vfs2.provider.AbstractFileObject#doGetRandomAccessContent
+     *      (org.apache.commons.vfs2.util.RandomAccessMode)
+     */
+    @Override
+    protected RandomAccessContent doGetRandomAccessContent(final RandomAccessMode mode) throws Exception
+    {
+        if (mode.equals(RandomAccessMode.READWRITE))
+        {
+            throw new UnsupportedOperationException();
+        }
+        return new HdfsRandomAccessContent(this.path, this.hdfs);
+    }
+
+    /**
+     * @see org.apache.commons.vfs2.provider.AbstractFileObject#doGetType()
+     */
+    @Override
+    protected FileType doGetType() throws Exception
+    {
+        try
+        {
+            doAttach();
+            if (null == stat)
+            {
+                return FileType.IMAGINARY;
+            }
+            if (stat.isDir())
+            {
+                return FileType.FOLDER;
+            }
+            else
+            {
+                return FileType.FILE;
+            }
+        }
+        catch (final FileNotFoundException fnfe)
+        {
+            return FileType.IMAGINARY;
+        }
+    }
+
+    /**
+     * @see org.apache.commons.vfs2.provider.AbstractFileObject#doIsHidden()
+     */
+    @Override
+    protected boolean doIsHidden() throws Exception
+    {
+        return false;
+    }
+
+    /**
+     * @see org.apache.commons.vfs2.provider.AbstractFileObject#doIsReadable()
+     */
+    @Override
+    protected boolean doIsReadable() throws Exception
+    {
+        return true;
+    }
+
+    /**
+     * @see org.apache.commons.vfs2.provider.AbstractFileObject#doIsSameFile(org.apache.commons.vfs2.FileObject)
+     */
+    @Override
+    protected boolean doIsSameFile(final FileObject destFile) throws FileSystemException
+    {
+        throw new UnsupportedOperationException();
+    }
+
+    /**
+     * @see org.apache.commons.vfs2.provider.AbstractFileObject#doIsWriteable()
+     */
+    @Override
+    protected boolean doIsWriteable() throws Exception
+    {
+        return false;
+    }
+
+    /**
+     * @see org.apache.commons.vfs2.provider.AbstractFileObject#doListChildren()
+     */
+    @Override
+    protected String[] doListChildren() throws Exception
+    {
+        if (this.doGetType() != FileType.FOLDER)
+        {
+            throw new FileNotFolderException(this);
+        }
+
+        final FileStatus[] files = this.hdfs.listStatus(this.path);
+        final String[] children = new String[files.length];
+        int i = 0;
+        for (final FileStatus status : files)
+        {
+            children[i++] = status.getPath().getName();
+        }
+        return children;
+    }
+
+    /**
+     * @see org.apache.commons.vfs2.provider.AbstractFileObject#doListChildrenResolved()
+     */
+    @Override
+    protected FileObject[] doListChildrenResolved() throws Exception
+    {
+        if (this.doGetType() != FileType.FOLDER)
+        {
+            return null;
+        }
+        final String[] children = doListChildren();
+        final FileObject[] fo = new FileObject[children.length];
+        for (int i = 0; i < children.length; i++)
+        {
+            final Path p = new Path(this.path, children[i]);
+            fo[i] = this.fs.resolveFile(p.toUri().toString());
+        }
+        return fo;
+    }
+
+    /**
+     * @see org.apache.commons.vfs2.provider.AbstractFileObject#doRemoveAttribute(java.lang.String)
+     */
+    @Override
+    protected void doRemoveAttribute(final String attrName) throws Exception
+    {
+        throw new UnsupportedOperationException();
+    }
+
+    /**
+     * @see org.apache.commons.vfs2.provider.AbstractFileObject#doSetAttribute(java.lang.String, java.lang.Object)
+     */
+    @Override
+    protected void doSetAttribute(final String attrName, final Object value) throws Exception
+    {
+        throw new UnsupportedOperationException();
+    }
+
+    /**
+     * @see org.apache.commons.vfs2.provider.AbstractFileObject#doSetLastModifiedTime(long)
+     */
+    @Override
+    protected boolean doSetLastModifiedTime(final long modtime) throws Exception
+    {
+        throw new UnsupportedOperationException();
+    }
+
+    /**
+     * @see java.lang.Object#equals(java.lang.Object)
+     */
+    @Override
+    public boolean equals(final Object o)
+    {
+        if (null == o)
+        {
+            return false;
+        }
+        if (o == this)
+        {
+            return true;
+        }
+        if (o instanceof HdfsFileObject)
+        {
+            final HdfsFileObject other = (HdfsFileObject) o;
+            if (other.path.equals(this.path))
+            {
+                return true;
+            }
+        }
+        return false;
+    }
+
+    /**
+     * @see org.apache.commons.vfs2.provider.AbstractFileObject#exists()
+     * @return boolean true if file exists, false if not
+     */
+    @Override
+    public boolean exists() throws FileSystemException
+    {
+        try
+        {
+            doAttach();
+            return this.stat != null;
+        }
+        catch (final FileNotFoundException fne)
+        {
+            return false;
+        }
+        catch (final Exception e)
+        {
+            throw new FileSystemException("Unable to check existance ", e);
+        }
+    }
+
+    /**
+     * @see java.lang.Object#hashCode()
+     */
+    @Override
+    public int hashCode()
+    {
+        return this.path.getName().toString().hashCode();
+    }
+
+}

Propchange: commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/HdfsFileObject.java
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/HdfsFileObject.java
------------------------------------------------------------------------------
    svn:keywords = Id

Added: commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/HdfsFileProvider.java
URL: http://svn.apache.org/viewvc/commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/HdfsFileProvider.java?rev=1428461&view=auto
==============================================================================
--- commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/HdfsFileProvider.java (added)
+++ commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/HdfsFileProvider.java Thu Jan  3 16:37:53 2013
@@ -0,0 +1,89 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.commons.vfs2.provider.hdfs;
+
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+
+import org.apache.commons.vfs2.Capability;
+import org.apache.commons.vfs2.FileName;
+import org.apache.commons.vfs2.FileSystem;
+import org.apache.commons.vfs2.FileSystemConfigBuilder;
+import org.apache.commons.vfs2.FileSystemException;
+import org.apache.commons.vfs2.FileSystemOptions;
+import org.apache.commons.vfs2.provider.AbstractOriginatingFileProvider;
+import org.apache.commons.vfs2.provider.http.HttpFileNameParser;
+
+/**
+ * FileProvider for HDFS files.
+ * 
+ * @since 2.1
+ */
+public class HdfsFileProvider extends AbstractOriginatingFileProvider
+{
+    protected static final Collection<Capability> CAPABILITIES = Collections.unmodifiableCollection(Arrays
+            .asList(new Capability[]
+            {
+                    Capability.GET_TYPE,
+                    Capability.READ_CONTENT,
+                    Capability.URI,
+                    Capability.GET_LAST_MODIFIED,
+                    Capability.ATTRIBUTES,
+                    Capability.RANDOM_ACCESS_READ,
+                    Capability.DIRECTORY_READ_CONTENT,
+                    Capability.LIST_CHILDREN }));
+
+    /**
+     * Constructs a new HdfsFileProvider
+     */
+    public HdfsFileProvider()
+    {
+        super();
+        this.setFileNameParser(HttpFileNameParser.getInstance());
+    }
+
+    /**
+     * @see org.apache.commons.vfs2.provider.AbstractOriginatingFileProvider#
+     *      doCreateFileSystem(org.apache.commons.vfs2.FileName, org.apache.commons.vfs2.FileSystemOptions)
+     */
+    @Override
+    protected FileSystem doCreateFileSystem(final FileName rootName, final FileSystemOptions fileSystemOptions)
+            throws FileSystemException
+    {
+        return new HdfsFileSystem(rootName, fileSystemOptions);
+    }
+
+    /**
+     * @see org.apache.commons.vfs2.provider.FileProvider#getCapabilities()
+     */
+    @Override
+    public Collection<Capability> getCapabilities()
+    {
+        return CAPABILITIES;
+    }
+
+    /**
+     * @see org.apache.commons.vfs2.provider.AbstractFileProvider#getConfigBuilder()
+     */
+    @Override
+    public FileSystemConfigBuilder getConfigBuilder()
+    {
+        return HdfsFileSystemConfigBuilder.getInstance();
+    }
+
+}

Propchange: commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/HdfsFileProvider.java
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/HdfsFileProvider.java
------------------------------------------------------------------------------
    svn:keywords = Id

Added: commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/HdfsFileSystem.java
URL: http://svn.apache.org/viewvc/commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/HdfsFileSystem.java?rev=1428461&view=auto
==============================================================================
--- commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/HdfsFileSystem.java (added)
+++ commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/HdfsFileSystem.java Thu Jan  3 16:37:53 2013
@@ -0,0 +1,147 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.commons.vfs2.provider.hdfs;
+
+import java.io.IOException;
+import java.io.UnsupportedEncodingException;
+import java.net.URLDecoder;
+import java.util.Collection;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.commons.vfs2.Capability;
+import org.apache.commons.vfs2.FileName;
+import org.apache.commons.vfs2.FileObject;
+import org.apache.commons.vfs2.FileSystemException;
+import org.apache.commons.vfs2.FileSystemOptions;
+import org.apache.commons.vfs2.provider.AbstractFileName;
+import org.apache.commons.vfs2.provider.AbstractFileSystem;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+
+/**
+ * A VFS FileSystem that interacts with HDFS.
+ * 
+ * @since 2.1
+ */
+public class HdfsFileSystem extends AbstractFileSystem
+{
+    private static final Log log = LogFactory.getLog(HdfsFileSystem.class);
+
+    private FileSystem fs;
+
+    /**
+     * 
+     * @param rootName
+     * @param fileSystemOptions
+     */
+    protected HdfsFileSystem(final FileName rootName, final FileSystemOptions fileSystemOptions)
+    {
+        super(rootName, null, fileSystemOptions);
+    }
+
+    /**
+     * @see org.apache.commons.vfs2.provider.AbstractFileSystem#addCapabilities(java .util.Collection)
+     */
+    @Override
+    protected void addCapabilities(final Collection<Capability> capabilities)
+    {
+        capabilities.addAll(HdfsFileProvider.CAPABILITIES);
+    }
+
+    /**
+     * @see org.apache.commons.vfs2.provider.AbstractFileSystem#close()
+     */
+    @Override
+    public void close()
+    {
+        try
+        {
+            if (null != fs)
+            {
+                fs.close();
+            }
+        }
+        catch (final IOException e)
+        {
+            throw new RuntimeException("Error closing HDFS client", e);
+        }
+        super.close();
+    }
+
+    /**
+     * @see org.apache.commons.vfs2.provider.AbstractFileSystem#createFile(org.apache
+     *      .commons.vfs2.provider.AbstractFileName)
+     */
+    @Override
+    protected FileObject createFile(final AbstractFileName name) throws Exception
+    {
+        throw new FileSystemException("Operation not supported");
+    }
+
+    /**
+     * @see org.apache.commons.vfs2.provider.AbstractFileSystem#resolveFile(org.apache .commons.vfs2.FileName)
+     */
+    @Override
+    public FileObject resolveFile(final FileName name) throws FileSystemException
+    {
+
+        synchronized (this)
+        {
+            if (null == this.fs)
+            {
+                final String hdfsUri = name.getRootURI();
+                final Configuration conf = new Configuration(true);
+                conf.set(org.apache.hadoop.fs.FileSystem.FS_DEFAULT_NAME_KEY, hdfsUri);
+                this.fs = null;
+                try
+                {
+                    fs = org.apache.hadoop.fs.FileSystem.get(conf);
+                }
+                catch (final IOException e)
+                {
+                    log.error("Error connecting to filesystem " + hdfsUri, e);
+                    throw new FileSystemException("Error connecting to filesystem " + hdfsUri, e);
+                }
+            }
+        }
+
+        FileObject file = this.getFileFromCache(name);
+        if (null == file)
+        {
+            String path = null;
+            try
+            {
+                path = URLDecoder.decode(name.getPath(), "UTF-8");
+            }
+            catch (final UnsupportedEncodingException e)
+            {
+                path = name.getPath();
+            }
+            final Path filePath = new Path(path);
+            file = new HdfsFileObject((AbstractFileName) name, this, fs, filePath);
+            this.putFileToCache(file);
+            return file;
+        }
+        else
+        {
+            return file;
+        }
+    }
+
+}

Propchange: commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/HdfsFileSystem.java
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/HdfsFileSystem.java
------------------------------------------------------------------------------
    svn:keywords = Id

Added: commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/HdfsFileSystemConfigBuilder.java
URL: http://svn.apache.org/viewvc/commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/HdfsFileSystemConfigBuilder.java?rev=1428461&view=auto
==============================================================================
--- commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/HdfsFileSystemConfigBuilder.java (added)
+++ commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/HdfsFileSystemConfigBuilder.java Thu Jan  3 16:37:53 2013
@@ -0,0 +1,72 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.commons.vfs2.provider.hdfs;
+
+import org.apache.commons.vfs2.FileSystem;
+import org.apache.commons.vfs2.FileSystemConfigBuilder;
+
+/**
+ * Configuration settings for the HdfsFileSystem.
+ * 
+ * @since 2.1
+ */
+public class HdfsFileSystemConfigBuilder extends FileSystemConfigBuilder
+{
+    private static final HdfsFileSystemConfigBuilder BUILDER = new HdfsFileSystemConfigBuilder();
+
+    /**
+     * @return HdfsFileSystemConfigBuilder instance
+     */
+    public static HdfsFileSystemConfigBuilder getInstance()
+    {
+        return BUILDER;
+    }
+
+    // TODO: Unused? REMOVE?
+    // private String hdfsUri;
+
+    /**
+     * @return HDFSFileSystem
+     */
+    @Override
+    protected Class<? extends FileSystem> getConfigClass()
+    {
+        return HdfsFileSystem.class;
+    }
+
+//    /**
+//     * TODO: Unused? REMOVE?
+//     * 
+//     * @return HDFS uri
+//     */
+//    public String getHdfsUri()
+//    {
+//        return hdfsUri;
+//    }
+//
+//    /**
+//     * TODO: Unused? REMOVE?
+//     * 
+//     * @param hdfsUri
+//     *            HDFS uri
+//     */
+//    public void setHdfsUri(final String hdfsUri)
+//    {
+//        this.hdfsUri = hdfsUri;
+//    }
+
+}

Propchange: commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/HdfsFileSystemConfigBuilder.java
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/HdfsFileSystemConfigBuilder.java
------------------------------------------------------------------------------
    svn:keywords = Id

Added: commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/HdfsRandomAccessContent.java
URL: http://svn.apache.org/viewvc/commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/HdfsRandomAccessContent.java?rev=1428461&view=auto
==============================================================================
--- commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/HdfsRandomAccessContent.java (added)
+++ commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/HdfsRandomAccessContent.java Thu Jan  3 16:37:53 2013
@@ -0,0 +1,371 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.commons.vfs2.provider.hdfs;
+
+import java.io.IOException;
+import java.io.InputStream;
+
+import org.apache.commons.vfs2.RandomAccessContent;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+
+/**
+ * Provides random access to content in an HdfsFileObject. Currently this only supports read operations. All write
+ * operations throw an {@link UnsupportedOperationException}.
+ * 
+ * @since 2.1
+ */
+public class HdfsRandomAccessContent implements RandomAccessContent
+{
+    private final FileSystem fs;
+    private final Path path;
+    private final FSDataInputStream fis;
+
+    /**
+     * 
+     * @param path
+     *            A Hadoop Path
+     * @param fs
+     *            A Hadoop FileSystem
+     * @throws IOException
+     *             when the path cannot be processed.
+     */
+    public HdfsRandomAccessContent(final Path path, final FileSystem fs) throws IOException
+    {
+        this.fs = fs;
+        this.path = path;
+        this.fis = this.fs.open(this.path);
+    }
+
+    /**
+     * @see org.apache.commons.vfs2.RandomAccessContent#close()
+     */
+    @Override
+    public void close() throws IOException
+    {
+        this.fis.close();
+    }
+
+    /**
+     * @see org.apache.commons.vfs2.RandomAccessContent#getFilePointer()
+     */
+    @Override
+    public long getFilePointer() throws IOException
+    {
+        return this.fis.getPos();
+    }
+
+    /**
+     * @see org.apache.commons.vfs2.RandomAccessContent#getInputStream()
+     */
+    @Override
+    public InputStream getInputStream() throws IOException
+    {
+        return this.fis;
+    }
+
+    /**
+     * @see org.apache.commons.vfs2.RandomAccessContent#length()
+     */
+    @Override
+    public long length() throws IOException
+    {
+        return this.fs.getFileStatus(this.path).getLen();
+    }
+
+    /**
+     * @see java.io.DataInput#readBoolean()
+     */
+    @Override
+    public boolean readBoolean() throws IOException
+    {
+        return this.fis.readBoolean();
+    }
+
+    /**
+     * @see java.io.DataInput#readByte()
+     */
+    @Override
+    public byte readByte() throws IOException
+    {
+        return this.fis.readByte();
+    }
+
+    /**
+     * @see java.io.DataInput#readChar()
+     */
+    @Override
+    public char readChar() throws IOException
+    {
+        return this.fis.readChar();
+    }
+
+    /**
+     * @see java.io.DataInput#readDouble()
+     */
+    @Override
+    public double readDouble() throws IOException
+    {
+        return this.fis.readDouble();
+    }
+
+    /**
+     * @see java.io.DataInput#readFloat()
+     */
+    @Override
+    public float readFloat() throws IOException
+    {
+        return this.fis.readFloat();
+    }
+
+    /**
+     * @see java.io.DataInput#readFully(byte[])
+     */
+    @Override
+    public void readFully(final byte[] b) throws IOException
+    {
+        throw new UnsupportedOperationException();
+    }
+
+    /**
+     * @see java.io.DataInput#readFully(byte[], int, int)
+     */
+    @Override
+    public void readFully(final byte[] b, final int off, final int len) throws IOException
+    {
+        throw new UnsupportedOperationException();
+    }
+
+    /**
+     * @see java.io.DataInput#readInt()
+     */
+    @Override
+    public int readInt() throws IOException
+    {
+        return this.fis.readInt();
+    }
+
+    /**
+     * @see java.io.DataInput#readLine()
+     */
+    @Override
+    @SuppressWarnings("deprecation")
+    public String readLine() throws IOException
+    {
+        return this.fis.readLine();
+    }
+
+    /**
+     * @see java.io.DataInput#readLong()
+     */
+    @Override
+    public long readLong() throws IOException
+    {
+        return this.fis.readLong();
+    }
+
+    /**
+     * @see java.io.DataInput#readShort()
+     */
+    @Override
+    public short readShort() throws IOException
+    {
+        return this.fis.readShort();
+    }
+
+    /**
+     * @see java.io.DataInput#readUnsignedByte()
+     */
+    @Override
+    public int readUnsignedByte() throws IOException
+    {
+        return this.fis.readUnsignedByte();
+    }
+
+    /**
+     * @see java.io.DataInput#readUnsignedShort()
+     */
+    @Override
+    public int readUnsignedShort() throws IOException
+    {
+        return this.fis.readUnsignedShort();
+    }
+
+    /**
+     * @see java.io.DataInput#readUTF()
+     */
+    @Override
+    public String readUTF() throws IOException
+    {
+        return this.fis.readUTF();
+    }
+
+    /**
+     * @see org.apache.commons.vfs2.RandomAccessContent#seek(long)
+     */
+    @Override
+    public void seek(final long pos) throws IOException
+    {
+        this.fis.seek(pos);
+    }
+
+    /**
+     * @see org.apache.commons.vfs2.RandomAccessContent#setLength(long)
+     */
+    @Override
+    public void setLength(final long newLength) throws IOException
+    {
+        throw new UnsupportedOperationException();
+    }
+
+    /**
+     * @see java.io.DataInput#skipBytes(int)
+     */
+    @Override
+    public int skipBytes(final int n) throws IOException
+    {
+        throw new UnsupportedOperationException();
+    }
+
+    /**
+     * @see java.io.DataOutput#write(byte[])
+     */
+    @Override
+    public void write(final byte[] b) throws IOException
+    {
+        throw new UnsupportedOperationException();
+    }
+
+    /**
+     * @see java.io.DataOutput#write(byte[], int, int)
+     */
+    @Override
+    public void write(final byte[] b, final int off, final int len) throws IOException
+    {
+        throw new UnsupportedOperationException();
+    }
+
+    /**
+     * @see java.io.DataOutput#write(int)
+     */
+    @Override
+    public void write(final int b) throws IOException
+    {
+        throw new UnsupportedOperationException();
+    }
+
+    /**
+     * @see java.io.DataOutput#writeBoolean(boolean)
+     */
+    @Override
+    public void writeBoolean(final boolean v) throws IOException
+    {
+        throw new UnsupportedOperationException();
+    }
+
+    /**
+     * @see java.io.DataOutput#writeByte(int)
+     */
+    @Override
+    public void writeByte(final int v) throws IOException
+    {
+        throw new UnsupportedOperationException();
+    }
+
+    /**
+     * @see java.io.DataOutput#writeBytes(java.lang.String)
+     */
+    @Override
+    public void writeBytes(final String s) throws IOException
+    {
+        throw new UnsupportedOperationException();
+    }
+
+    /**
+     * @see java.io.DataOutput#writeChar(int)
+     */
+    @Override
+    public void writeChar(final int v) throws IOException
+    {
+        throw new UnsupportedOperationException();
+    }
+
+    /**
+     * @see java.io.DataOutput#writeChars(java.lang.String)
+     */
+    @Override
+    public void writeChars(final String s) throws IOException
+    {
+        throw new UnsupportedOperationException();
+    }
+
+    /**
+     * @see java.io.DataOutput#writeDouble(double)
+     */
+    @Override
+    public void writeDouble(final double v) throws IOException
+    {
+        throw new UnsupportedOperationException();
+    }
+
+    /**
+     * @see java.io.DataOutput#writeFloat(float)
+     */
+    @Override
+    public void writeFloat(final float v) throws IOException
+    {
+        throw new UnsupportedOperationException();
+    }
+
+    /**
+     * @see java.io.DataOutput#writeInt(int)
+     */
+    @Override
+    public void writeInt(final int v) throws IOException
+    {
+        throw new UnsupportedOperationException();
+    }
+
+    /**
+     * @see java.io.DataOutput#writeLong(long)
+     */
+    @Override
+    public void writeLong(final long v) throws IOException
+    {
+        throw new UnsupportedOperationException();
+    }
+
+    /**
+     * @see java.io.DataOutput#writeShort(int)
+     */
+    @Override
+    public void writeShort(final int v) throws IOException
+    {
+        throw new UnsupportedOperationException();
+    }
+
+    /**
+     * @see java.io.DataOutput#writeUTF(java.lang.String)
+     */
+    @Override
+    public void writeUTF(final String s) throws IOException
+    {
+        throw new UnsupportedOperationException();
+    }
+
+}

Propchange: commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/HdfsRandomAccessContent.java
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/HdfsRandomAccessContent.java
------------------------------------------------------------------------------
    svn:keywords = Id

Added: commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/package.html
URL: http://svn.apache.org/viewvc/commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/package.html?rev=1428461&view=auto
==============================================================================
--- commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/package.html (added)
+++ commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/package.html Thu Jan  3 16:37:53 2013
@@ -0,0 +1,19 @@
+<!--
+    Licensed to the Apache Software Foundation (ASF) under one or more
+    contributor license agreements.  See the NOTICE file distributed with
+    this work for additional information regarding copyright ownership.
+    The ASF licenses this file to You under the Apache License, Version 2.0
+    (the "License"); you may not use this file except in compliance with
+    the License.  You may obtain a copy of the License at
+
+         http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    See the License for the specific language governing permissions and
+    limitations under the License.
+-->
+<body>
+<p>The HDFS File Provider</p>
+</body>

Propchange: commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/package.html
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: commons/proper/vfs/trunk/core/src/main/java/org/apache/commons/vfs2/provider/hdfs/package.html
------------------------------------------------------------------------------
    svn:keywords = Id

Added: commons/proper/vfs/trunk/core/src/test/java/org/apache/commons/vfs2/provider/hdfs/test/HdfsFileProviderTest.java
URL: http://svn.apache.org/viewvc/commons/proper/vfs/trunk/core/src/test/java/org/apache/commons/vfs2/provider/hdfs/test/HdfsFileProviderTest.java?rev=1428461&view=auto
==============================================================================
--- commons/proper/vfs/trunk/core/src/test/java/org/apache/commons/vfs2/provider/hdfs/test/HdfsFileProviderTest.java (added)
+++ commons/proper/vfs/trunk/core/src/test/java/org/apache/commons/vfs2/provider/hdfs/test/HdfsFileProviderTest.java Thu Jan  3 16:37:53 2013
@@ -0,0 +1,381 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.commons.vfs2.provider.hdfs.test;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.util.Map;
+
+import org.apache.commons.vfs2.CacheStrategy;
+import org.apache.commons.vfs2.FileObject;
+import org.apache.commons.vfs2.FileSystemException;
+import org.apache.commons.vfs2.FileType;
+import org.apache.commons.vfs2.cache.DefaultFilesCache;
+import org.apache.commons.vfs2.cache.SoftRefFilesCache;
+import org.apache.commons.vfs2.impl.DefaultFileReplicator;
+import org.apache.commons.vfs2.impl.DefaultFileSystemManager;
+import org.apache.commons.vfs2.impl.FileContentInfoFilenameFactory;
+import org.apache.commons.vfs2.provider.hdfs.HdfsFileAttributes;
+import org.apache.commons.vfs2.provider.hdfs.HdfsFileObject;
+import org.apache.commons.vfs2.provider.hdfs.HdfsFileProvider;
+import org.apache.commons.vfs2.util.RandomAccessMode;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hdfs.DFSConfigKeys;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.log4j.Level;
+import org.apache.log4j.Logger;
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+/**
+ * This test class uses the Hadoop MiniDFSCluster class to create an embedded Hadoop cluster. This will only work on
+ * systems that Hadoop supports. This test does not run on Windows because Hadoop does not run on Windows.
+ */
+@SuppressWarnings("resource")
+public class HdfsFileProviderTest
+{
+
+    // Turn off the MiniDFSCluster logging
+    static
+    {
+        System.setProperty("org.apache.commons.logging.Log", "org.apache.commons.logging.impl.NoOpLog");
+    }
+
+    private static final int PORT = 8620;
+    private static final String HDFS_URI = "hdfs://localhost:" + PORT;
+    private static final String TEST_DIR1 = HDFS_URI + "/test-dir";
+    private static final Path DIR1_PATH = new Path("/test-dir");
+    private static final String TEST_FILE1 = TEST_DIR1 + "/accumulo-test-1.jar";
+    private static final Path FILE1_PATH = new Path(DIR1_PATH, "accumulo-test-1.jar");
+
+    private static DefaultFileSystemManager manager;
+    private static FileSystem hdfs;
+
+    protected static Configuration conf;
+    protected static DefaultFileSystemManager vfs;
+    protected static MiniDFSCluster cluster;
+    static
+    {
+        Logger.getRootLogger().setLevel(Level.ERROR);
+
+        // Put the MiniDFSCluster directory in the target directory
+        System.setProperty("test.build.data", "target/build/test/data");
+
+        // Setup HDFS
+        conf = new Configuration();
+        conf.set(FileSystem.FS_DEFAULT_NAME_KEY, HDFS_URI);
+        conf.set("hadoop.security.token.service.use_ip", "true");
+
+        // MiniDFSCluster will check the permissions on the data directories, but does not do a good job of setting them
+        // properly. We need to get the users umask and set the appropriate Hadoop property so that the data directories
+        // will be created with the correct permissions.
+        try
+        {
+            final Process p = Runtime.getRuntime().exec("/bin/sh -c umask");
+            final BufferedReader bri = new BufferedReader(new InputStreamReader(p.getInputStream()));
+            final String line = bri.readLine();
+            p.waitFor();
+            // System.out.println("umask response: " + line);
+            final Short umask = Short.parseShort(line.trim(), 8);
+            // Need to set permission to 777 xor umask
+            // leading zero makes java interpret as base 8
+            final int newPermission = 0777 ^ umask;
+            // System.out.println("Umask is: " + String.format("%03o", umask));
+            // System.out.println("Perm is: " + String.format("%03o",
+            // newPermission));
+            conf.set("dfs.datanode.data.dir.perm", String.format("%03o", newPermission));
+        }
+        catch (final Exception e)
+        {
+            throw new RuntimeException("Error getting umask from O/S", e);
+        }
+
+        conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, 1024 * 100); // 100K blocksize
+
+        try
+        {
+            cluster = new MiniDFSCluster(PORT, conf, 1, true, true, true, null, null, null, null);
+            cluster.waitActive();
+        }
+        catch (final IOException e)
+        {
+            throw new RuntimeException("Error setting up mini cluster", e);
+        }
+
+        // Set up the VFS
+        vfs = new DefaultFileSystemManager();
+        try
+        {
+            vfs.setFilesCache(new DefaultFilesCache());
+            vfs.addProvider("res", new org.apache.commons.vfs2.provider.res.ResourceFileProvider());
+            vfs.addProvider("zip", new org.apache.commons.vfs2.provider.zip.ZipFileProvider());
+            vfs.addProvider("gz", new org.apache.commons.vfs2.provider.gzip.GzipFileProvider());
+            vfs.addProvider("ram", new org.apache.commons.vfs2.provider.ram.RamFileProvider());
+            vfs.addProvider("file", new org.apache.commons.vfs2.provider.local.DefaultLocalFileProvider());
+            vfs.addProvider("jar", new org.apache.commons.vfs2.provider.jar.JarFileProvider());
+            vfs.addProvider("http", new org.apache.commons.vfs2.provider.http.HttpFileProvider());
+            vfs.addProvider("https", new org.apache.commons.vfs2.provider.https.HttpsFileProvider());
+            vfs.addProvider("ftp", new org.apache.commons.vfs2.provider.ftp.FtpFileProvider());
+            vfs.addProvider("ftps", new org.apache.commons.vfs2.provider.ftps.FtpsFileProvider());
+            vfs.addProvider("war", new org.apache.commons.vfs2.provider.jar.JarFileProvider());
+            vfs.addProvider("par", new org.apache.commons.vfs2.provider.jar.JarFileProvider());
+            vfs.addProvider("ear", new org.apache.commons.vfs2.provider.jar.JarFileProvider());
+            vfs.addProvider("sar", new org.apache.commons.vfs2.provider.jar.JarFileProvider());
+            vfs.addProvider("ejb3", new org.apache.commons.vfs2.provider.jar.JarFileProvider());
+            vfs.addProvider("tmp", new org.apache.commons.vfs2.provider.temp.TemporaryFileProvider());
+            vfs.addProvider("tar", new org.apache.commons.vfs2.provider.tar.TarFileProvider());
+            vfs.addProvider("tbz2", new org.apache.commons.vfs2.provider.tar.TarFileProvider());
+            vfs.addProvider("tgz", new org.apache.commons.vfs2.provider.tar.TarFileProvider());
+            vfs.addProvider("bz2", new org.apache.commons.vfs2.provider.bzip2.Bzip2FileProvider());
+            vfs.addProvider("hdfs", new HdfsFileProvider());
+            vfs.addExtensionMap("jar", "jar");
+            vfs.addExtensionMap("zip", "zip");
+            vfs.addExtensionMap("gz", "gz");
+            vfs.addExtensionMap("tar", "tar");
+            vfs.addExtensionMap("tbz2", "tar");
+            vfs.addExtensionMap("tgz", "tar");
+            vfs.addExtensionMap("bz2", "bz2");
+            vfs.addMimeTypeMap("application/x-tar", "tar");
+            vfs.addMimeTypeMap("application/x-gzip", "gz");
+            vfs.addMimeTypeMap("application/zip", "zip");
+            vfs.setFileContentInfoFactory(new FileContentInfoFilenameFactory());
+            vfs.setFilesCache(new SoftRefFilesCache());
+            vfs.setReplicator(new DefaultFileReplicator());
+            vfs.setCacheStrategy(CacheStrategy.ON_RESOLVE);
+            vfs.init();
+        }
+        catch (final FileSystemException e)
+        {
+            throw new RuntimeException("Error setting up VFS", e);
+        }
+
+    }
+
+    @BeforeClass
+    public static void setUp() throws Exception
+    {
+        manager = new DefaultFileSystemManager();
+        manager.addProvider("hdfs", new HdfsFileProvider());
+        manager.init();
+        hdfs = cluster.getFileSystem();
+    }
+
+    @AfterClass
+    public static void tearDown() throws Exception
+    {
+        if (null != hdfs)
+        {
+            hdfs.close();
+        }
+        manager.close();
+    }
+
+    @After
+    public void after() throws Exception
+    {
+        if (null != hdfs)
+        {
+            hdfs.delete(DIR1_PATH, true);
+        }
+    }
+
+    private FileObject createTestFile(final FileSystem hdfs) throws IOException
+    {
+        // Create the directory
+        hdfs.mkdirs(DIR1_PATH);
+        final FileObject dir = manager.resolveFile(TEST_DIR1);
+        Assert.assertNotNull(dir);
+        Assert.assertTrue(dir.exists());
+        Assert.assertTrue(dir.getType().equals(FileType.FOLDER));
+
+        // Create the file in the directory
+        hdfs.create(FILE1_PATH).close();
+        final FileObject f = manager.resolveFile(TEST_FILE1);
+        Assert.assertNotNull(f);
+        Assert.assertTrue(f.exists());
+        Assert.assertTrue(f.getType().equals(FileType.FILE));
+        return f;
+    }
+
+    @Test(expected = UnsupportedOperationException.class)
+    public void testCanRenameTo() throws Exception
+    {
+        final FileObject fo = createTestFile(hdfs);
+        Assert.assertNotNull(fo);
+        fo.canRenameTo(fo);
+    }
+
+    @Test
+    public void testDoListChildren() throws Exception
+    {
+        final FileObject fo = manager.resolveFile(TEST_DIR1);
+        Assert.assertNotNull(fo);
+        Assert.assertTrue(fo.exists());
+
+        // Create the test file
+        final FileObject file = createTestFile(hdfs);
+        final FileObject dir = file.getParent();
+
+        final FileObject[] children = dir.getChildren();
+        Assert.assertTrue(children.length == 1);
+        Assert.assertTrue(children[0].getName().equals(file.getName()));
+
+    }
+
+    @Test
+    public void testEquals() throws Exception
+    {
+        final FileObject fo = manager.resolveFile(TEST_DIR1);
+        Assert.assertNotNull(fo);
+        Assert.assertTrue(fo.exists());
+
+        // Create the test file
+        final FileObject file = createTestFile(hdfs);
+        // Get a handle to the same file
+        final FileObject file2 = manager.resolveFile(TEST_FILE1);
+        Assert.assertEquals(file, file2);
+    }
+
+    @Test
+    public void testGetAttributes() throws Exception
+    {
+        final FileObject fo = manager.resolveFile(TEST_DIR1);
+        Assert.assertNotNull(fo);
+        Assert.assertTrue(fo.exists());
+
+        // Create the test file
+        final FileObject file = createTestFile(hdfs);
+        final Map<String, Object> attributes = file.getContent().getAttributes();
+        Assert.assertTrue(attributes.containsKey(HdfsFileAttributes.BLOCK_SIZE.toString()));
+        Assert.assertTrue(attributes.containsKey(HdfsFileAttributes.GROUP.toString()));
+        Assert.assertTrue(attributes.containsKey(HdfsFileAttributes.LAST_ACCESS_TIME.toString()));
+        Assert.assertTrue(attributes.containsKey(HdfsFileAttributes.LENGTH.toString()));
+        Assert.assertTrue(attributes.containsKey(HdfsFileAttributes.MODIFICATION_TIME.toString()));
+        Assert.assertTrue(attributes.containsKey(HdfsFileAttributes.OWNER.toString()));
+        Assert.assertTrue(attributes.containsKey(HdfsFileAttributes.PERMISSIONS.toString()));
+    }
+
+    @Test
+    public void testGetContentSize() throws Exception
+    {
+        final FileObject fo = manager.resolveFile(TEST_DIR1);
+        Assert.assertNotNull(fo);
+        Assert.assertTrue(fo.exists());
+
+        // Create the test file
+        final FileObject file = createTestFile(hdfs);
+        Assert.assertEquals(0, file.getContent().getSize());
+    }
+
+    @Test
+    public void testGetInputStream() throws Exception
+    {
+        final FileObject fo = manager.resolveFile(TEST_DIR1);
+        Assert.assertNotNull(fo);
+        Assert.assertTrue(fo.exists());
+
+        // Create the test file
+        final FileObject file = createTestFile(hdfs);
+        file.getContent().getInputStream().close();
+    }
+
+    @Test
+    public void testInit() throws Exception
+    {
+        final FileObject fo = manager.resolveFile(TEST_FILE1);
+        Assert.assertNotNull(fo);
+    }
+
+    @Test
+    public void testIsHidden() throws Exception
+    {
+        final FileObject fo = manager.resolveFile(TEST_DIR1);
+        Assert.assertNotNull(fo);
+        Assert.assertTrue(fo.exists());
+
+        // Create the test file
+        final FileObject file = createTestFile(hdfs);
+        Assert.assertFalse(file.isHidden());
+    }
+
+    @Test
+    public void testIsReadable() throws Exception
+    {
+        final FileObject fo = manager.resolveFile(TEST_DIR1);
+        Assert.assertNotNull(fo);
+        Assert.assertTrue(fo.exists());
+
+        // Create the test file
+        final FileObject file = createTestFile(hdfs);
+        Assert.assertTrue(file.isReadable());
+    }
+
+    @Test
+    public void testIsWritable() throws Exception
+    {
+        final FileObject fo = manager.resolveFile(TEST_DIR1);
+        Assert.assertNotNull(fo);
+        Assert.assertTrue(fo.exists());
+
+        // Create the test file
+        final FileObject file = createTestFile(hdfs);
+        Assert.assertFalse(file.isWriteable());
+    }
+
+    @Test
+    public void testLastModificationTime() throws Exception
+    {
+        final FileObject fo = manager.resolveFile(TEST_DIR1);
+        Assert.assertNotNull(fo);
+        Assert.assertTrue(fo.exists());
+
+        // Create the test file
+        final FileObject file = createTestFile(hdfs);
+        Assert.assertFalse(-1 == file.getContent().getLastModifiedTime());
+    }
+
+    @Test(expected = FileSystemException.class)
+    public void testRandomAccessContent() throws Exception
+    {
+        final FileObject fo = manager.resolveFile(TEST_DIR1);
+        Assert.assertNotNull(fo);
+        Assert.assertTrue(fo.exists());
+
+        // Create the test file
+        final FileObject file = createTestFile(hdfs);
+        file.getContent().getRandomAccessContent(RandomAccessMode.READWRITE).close();
+    }
+
+    @Test
+    public void testRandomAccessContent2() throws Exception
+    {
+        final FileObject fo = manager.resolveFile(TEST_DIR1);
+        Assert.assertNotNull(fo);
+        Assert.assertTrue(fo.exists());
+
+        // Create the test file
+        final FileObject file = createTestFile(hdfs);
+        file.getContent().getRandomAccessContent(RandomAccessMode.READ).close();
+    }
+
+}

Propchange: commons/proper/vfs/trunk/core/src/test/java/org/apache/commons/vfs2/provider/hdfs/test/HdfsFileProviderTest.java
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: commons/proper/vfs/trunk/core/src/test/java/org/apache/commons/vfs2/provider/hdfs/test/HdfsFileProviderTest.java
------------------------------------------------------------------------------
    svn:keywords = Id

Added: commons/proper/vfs/trunk/core/src/test/java/org/apache/commons/vfs2/provider/hdfs/test/HdfsFileProviderTestCase.java
URL: http://svn.apache.org/viewvc/commons/proper/vfs/trunk/core/src/test/java/org/apache/commons/vfs2/provider/hdfs/test/HdfsFileProviderTestCase.java?rev=1428461&view=auto
==============================================================================
--- commons/proper/vfs/trunk/core/src/test/java/org/apache/commons/vfs2/provider/hdfs/test/HdfsFileProviderTestCase.java (added)
+++ commons/proper/vfs/trunk/core/src/test/java/org/apache/commons/vfs2/provider/hdfs/test/HdfsFileProviderTestCase.java Thu Jan  3 16:37:53 2013
@@ -0,0 +1,190 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.commons.vfs2.provider.hdfs.test;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStreamReader;
+
+import junit.framework.Test;
+
+import org.apache.commons.AbstractVfsTestCase;
+import org.apache.commons.vfs2.FileObject;
+import org.apache.commons.vfs2.FileSystemManager;
+import org.apache.commons.vfs2.impl.DefaultFileSystemManager;
+import org.apache.commons.vfs2.provider.hdfs.HdfsFileProvider;
+import org.apache.commons.vfs2.test.AbstractProviderTestConfig;
+import org.apache.commons.vfs2.test.ProviderTestConfig;
+import org.apache.commons.vfs2.test.ProviderTestSuite;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hdfs.DFSConfigKeys;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.log4j.Level;
+import org.apache.log4j.Logger;
+
+/**
+ * This test class uses the Hadoop MiniDFSCluster class to create an embedded Hadoop cluster. This will only work on
+ * systems that Hadoop supports. This test does not run on Windows because Hadoop does not run on Windows.
+ */
+public class HdfsFileProviderTestCase extends AbstractProviderTestConfig implements ProviderTestConfig
+{
+    public static class HdfsProviderTestSuite extends ProviderTestSuite
+    {
+
+        // Turn off the MiniDFSCluster logging
+        static
+        {
+            System.setProperty("org.apache.commons.logging.Log", "org.apache.commons.logging.impl.NoOpLog");
+        }
+
+        public HdfsProviderTestSuite(final ProviderTestConfig providerConfig, final boolean addEmptyDir) throws Exception
+        {
+            super(providerConfig, addEmptyDir);
+        }
+
+        @SuppressWarnings("deprecation")
+        private void copyTestResources(final File directory, final Path parent) throws Exception
+        {
+            for (final File file : directory.listFiles())
+            {
+                if (file.isFile())
+                {
+                    final Path src = new Path(file.getAbsolutePath());
+                    final Path dst = new Path(parent, file.getName());
+                    hdfs.copyFromLocalFile(src, dst);
+                }
+                else if (file.isDirectory())
+                {
+                    final Path dir = new Path(parent, file.getName());
+                    if (hdfs.mkdirs(dir))
+                    {
+                        copyTestResources(file, dir);
+                    }
+                    else
+                    {
+                        fail("Unable to make directory: " + dir);
+                    }
+                }
+            }
+
+        }
+
+        @SuppressWarnings("deprecation")
+        @Override
+        protected void setUp() throws Exception
+        {
+            Logger.getRootLogger().setLevel(Level.OFF);
+
+            // Put the MiniDFSCluster directory in the target directory
+            System.setProperty("test.build.data", "target/build/test2/data");
+
+            // Setup HDFS
+            conf = new Configuration();
+            conf.set(FileSystem.FS_DEFAULT_NAME_KEY, HDFS_URI);
+            conf.set("hadoop.security.token.service.use_ip", "true");
+
+            // MiniDFSCluster will check the permissions on the data directories, but does not do a good job of setting them
+            // properly. We need to get the users umask and set the appropriate Hadoop property so that the data directories
+            // will be created with the correct permissions.
+            try
+            {
+                final Process p = Runtime.getRuntime().exec("/bin/sh -c umask");
+                final BufferedReader bri = new BufferedReader(new InputStreamReader(p.getInputStream()));
+                final String line = bri.readLine();
+                p.waitFor();
+                // System.out.println("umask response: " + line);
+                final Short umask = Short.parseShort(line.trim(), 8);
+                // Need to set permission to 777 xor umask
+                // leading zero makes java interpret as base 8
+                final int newPermission = 0777 ^ umask;
+                conf.set("dfs.datanode.data.dir.perm", String.format("%03o", newPermission));
+            }
+            catch (final Exception e)
+            {
+                throw new RuntimeException("Error getting umask from O/S", e);
+            }
+
+            conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, 1024 * 100); // 100K blocksize
+
+            try
+            {
+                cluster = new MiniDFSCluster(PORT, conf, 1, true, true, true, null, null, null, null);
+                cluster.waitActive();
+            }
+            catch (final IOException e)
+            {
+                throw new RuntimeException("Error setting up mini cluster", e);
+            }
+            hdfs = cluster.getFileSystem();
+
+            // Copy the test directory into HDFS
+            final Path base = new Path("/test-data");
+            assertTrue("Unable to create base directory", hdfs.mkdirs(base));
+            final File testDir = AbstractVfsTestCase.getTestDirectory();
+            copyTestResources(testDir, base);
+
+            super.setUp();
+        }
+
+        @Override
+        protected void tearDown() throws Exception
+        {
+            super.tearDown();
+            if (null != hdfs)
+            {
+                hdfs.close();
+            }
+        }
+    }
+    private static final int PORT = 8720;
+    private static final String HDFS_URI = "hdfs://localhost:" + PORT;
+    private static FileSystem hdfs;
+    private static Configuration conf;
+
+    private static MiniDFSCluster cluster;
+
+    /**
+     * Creates the test suite for the zip file system.
+     */
+    public static Test suite() throws Exception
+    {
+        return new HdfsProviderTestSuite(new HdfsFileProviderTestCase(), false);
+    }
+
+    /**
+     * Returns the base folder for read tests.
+     */
+    @Override
+    public FileObject getBaseTestFolder(final FileSystemManager manager) throws Exception
+    {
+        final String uri = HDFS_URI + "/test-data";
+        return manager.resolveFile(uri);
+    }
+
+    /**
+     * Prepares the file system manager.
+     */
+    @Override
+    public void prepare(final DefaultFileSystemManager manager) throws Exception
+    {
+        manager.addProvider("hdfs", new HdfsFileProvider());
+    }
+
+}

Propchange: commons/proper/vfs/trunk/core/src/test/java/org/apache/commons/vfs2/provider/hdfs/test/HdfsFileProviderTestCase.java
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: commons/proper/vfs/trunk/core/src/test/java/org/apache/commons/vfs2/provider/hdfs/test/HdfsFileProviderTestCase.java
------------------------------------------------------------------------------
    svn:keywords = Id

Modified: commons/proper/vfs/trunk/pom.xml
URL: http://svn.apache.org/viewvc/commons/proper/vfs/trunk/pom.xml?rev=1428461&r1=1428460&r2=1428461&view=diff
==============================================================================
--- commons/proper/vfs/trunk/pom.xml (original)
+++ commons/proper/vfs/trunk/pom.xml Thu Jan  3 16:37:53 2013
@@ -443,6 +443,25 @@
           </exclusion>
         </exclusions>
       </dependency>
+	  <!-- HDFS dependencies -->
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-core</artifactId>
+        <version>1.1.0</version>
+        <scope>provided</scope>
+      </dependency>    
+	  <dependency>
+		<groupId>org.apache.hadoop</groupId>
+		<artifactId>hadoop-test</artifactId>
+		<version>1.1.0</version>
+		<scope>test</scope>
+	  </dependency>
+	  <dependency>
+		<groupId>javax.ws.rs</groupId>
+		<artifactId>jsr311-api</artifactId>
+		<version>1.0</version>
+		<scope>test</scope>
+ 	  </dependency>
     </dependencies>
   </dependencyManagement>
 

Modified: commons/proper/vfs/trunk/src/changes/changes.xml
URL: http://svn.apache.org/viewvc/commons/proper/vfs/trunk/src/changes/changes.xml?rev=1428461&r1=1428460&r2=1428461&view=diff
==============================================================================
--- commons/proper/vfs/trunk/src/changes/changes.xml (original)
+++ commons/proper/vfs/trunk/src/changes/changes.xml Thu Jan  3 16:37:53 2013
@@ -26,6 +26,9 @@
 <!--       <action issue="VFS-443" dev="ggregory" type="update" due-to="nickallen"> -->
 <!--     	[Local] Need an easy way to convert from a FileObject to a File. -->
 <!--       </action> -->
+      <action issue="VFS-442" dev="ggregory" type="add" due-to="dlmarion">
+        Add an HDFS FileSystem Provider.
+      </action>
       <action issue="VFS-448" dev="ggregory" type="fix">
         commons-vfs 2.0 JAR has flawed OSGi MANIFEST.MF.
       </action>

Modified: commons/proper/vfs/trunk/src/site/xdoc/filesystems.xml
URL: http://svn.apache.org/viewvc/commons/proper/vfs/trunk/src/site/xdoc/filesystems.xml?rev=1428461&r1=1428460&r2=1428461&view=diff
==============================================================================
--- commons/proper/vfs/trunk/src/site/xdoc/filesystems.xml (original)
+++ commons/proper/vfs/trunk/src/site/xdoc/filesystems.xml Thu Jan  3 16:37:53 2013
@@ -93,6 +93,17 @@
               <td>No</td>
             </tr>
             <tr>
+              <td><a href="#HDFS">HDFS</a></td>
+              <td>Yes</td>
+              <td>No</td>
+              <td>Yes</td>
+              <td>No</td>
+              <td>No</td>
+              <td>Read</td>
+              <td>No</td>
+              <td>No</td>
+            </tr>
+            <tr>
               <td><a href="#HTTP and HTTPS">HTTP</a></td>
               <td>Yes</td>
               <td>Yes</td>
@@ -452,6 +463,35 @@
 
         </section>
 
+        <section name="HDFS">
+
+            <p>Provides access to files on an Apache Hadoop File System (HDFS).</p>
+
+            <p>
+                <b>URI Format</b>
+            </p>
+
+            <p>
+                <code>hdfs://
+                    <i>hostname</i>[:
+                    <i>port</i>][
+                    <i>absolute-path</i>]
+                </code>
+            </p>
+
+            <p>
+                <b>Examples</b>
+            </p>
+            <ul>
+                <li>
+                    <code>hdfs://somehost:8080/downloads/some_dir</code>
+                </li>
+                <li>
+                    <code>hdfs://somehost:8080/downloads/some_file.ext</code>
+                </li>
+            </ul>
+        </section>
+
         <section name="HTTP and HTTPS">
 
             <p>Provides access to files on an HTTP server.</p>