You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@jackrabbit.apache.org by mr...@apache.org on 2012/08/30 09:27:48 UTC

svn commit: r1378830 - in /jackrabbit/sandbox/jackrabbit-hadoop/src/main: java/org/apache/jackrabbit/hadoop/fs/ java/org/apache/jackrabbit/hadoop/spi/ resources/ resources/org/ resources/org/apache/ resources/org/apache/jackrabbit/ resources/org/apache...

Author: mreutegg
Date: Thu Aug 30 07:27:48 2012
New Revision: 1378830

URL: http://svn.apache.org/viewvc?rev=1378830&view=rev
Log:
- Jackrabbit FileSystem implementation on top of HDFS
- SPI implementation on HDFS, exposing files and folders in HDFS as nt:file and nt:folder nodes in JCR

Added:
    jackrabbit/sandbox/jackrabbit-hadoop/src/main/java/org/apache/jackrabbit/hadoop/fs/
    jackrabbit/sandbox/jackrabbit-hadoop/src/main/java/org/apache/jackrabbit/hadoop/fs/HadoopFileSystem.java   (with props)
    jackrabbit/sandbox/jackrabbit-hadoop/src/main/java/org/apache/jackrabbit/hadoop/spi/
    jackrabbit/sandbox/jackrabbit-hadoop/src/main/java/org/apache/jackrabbit/hadoop/spi/AbstractNodeInfo.java   (with props)
    jackrabbit/sandbox/jackrabbit-hadoop/src/main/java/org/apache/jackrabbit/hadoop/spi/FileInfo.java   (with props)
    jackrabbit/sandbox/jackrabbit-hadoop/src/main/java/org/apache/jackrabbit/hadoop/spi/FolderInfo.java   (with props)
    jackrabbit/sandbox/jackrabbit-hadoop/src/main/java/org/apache/jackrabbit/hadoop/spi/HadoopRepositoryService.java   (with props)
    jackrabbit/sandbox/jackrabbit-hadoop/src/main/java/org/apache/jackrabbit/hadoop/spi/ResourceInfo.java   (with props)
    jackrabbit/sandbox/jackrabbit-hadoop/src/main/resources/
    jackrabbit/sandbox/jackrabbit-hadoop/src/main/resources/org/
    jackrabbit/sandbox/jackrabbit-hadoop/src/main/resources/org/apache/
    jackrabbit/sandbox/jackrabbit-hadoop/src/main/resources/org/apache/jackrabbit/
    jackrabbit/sandbox/jackrabbit-hadoop/src/main/resources/org/apache/jackrabbit/hadoop/
    jackrabbit/sandbox/jackrabbit-hadoop/src/main/resources/org/apache/jackrabbit/hadoop/spi/
    jackrabbit/sandbox/jackrabbit-hadoop/src/main/resources/org/apache/jackrabbit/hadoop/spi/descriptor.properties   (with props)
    jackrabbit/sandbox/jackrabbit-hadoop/src/main/resources/org/apache/jackrabbit/hadoop/spi/files-and-folders.cnd   (with props)

Added: jackrabbit/sandbox/jackrabbit-hadoop/src/main/java/org/apache/jackrabbit/hadoop/fs/HadoopFileSystem.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/jackrabbit-hadoop/src/main/java/org/apache/jackrabbit/hadoop/fs/HadoopFileSystem.java?rev=1378830&view=auto
==============================================================================
--- jackrabbit/sandbox/jackrabbit-hadoop/src/main/java/org/apache/jackrabbit/hadoop/fs/HadoopFileSystem.java (added)
+++ jackrabbit/sandbox/jackrabbit-hadoop/src/main/java/org/apache/jackrabbit/hadoop/fs/HadoopFileSystem.java Thu Aug 30 07:27:48 2012
@@ -0,0 +1,255 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.hadoop.fs;
+
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.IOException;
+import java.util.List;
+import java.util.ArrayList;
+
+import org.apache.jackrabbit.core.fs.FileSystemException;
+import org.apache.jackrabbit.core.fs.RandomAccessOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.conf.Configuration;
+
+/**
+ * <code>HadoopFileSystem</code>...
+ */
+public class HadoopFileSystem implements org.apache.jackrabbit.core.fs.FileSystem {
+
+    private FileSystem fs;
+
+    /**
+     * The hadoop file system configuration.
+     */
+    private Configuration config = new Configuration();
+
+    public HadoopFileSystem() {
+        setFs_default_name("hdfs://localhost:9000");
+    }
+
+    public void init() throws FileSystemException {
+        try {
+            fs = FileSystem.get(config);
+        } catch (IOException e) {
+            throw new FileSystemException(e.getMessage(), e);
+        }
+    }
+
+    public void close() throws FileSystemException {
+        try {
+            fs.close();
+        } catch (IOException e) {
+            throw new FileSystemException(e.getMessage(), e);
+        }
+    }
+
+    public InputStream getInputStream(String filePath) throws FileSystemException {
+        try {
+            return fs.open(new Path(filePath));
+        } catch (IOException e) {
+            throw new FileSystemException(e.getMessage(), e);
+        }
+    }
+
+    public OutputStream getOutputStream(String filePath) throws FileSystemException {
+        try {
+            return fs.create(new Path(filePath));
+        } catch (IOException e) {
+            throw new FileSystemException(e.getMessage(), e);
+        }
+    }
+
+    public RandomAccessOutputStream getRandomAccessOutputStream(String filePath)
+            throws FileSystemException, UnsupportedOperationException {
+        throw new UnsupportedOperationException();
+    }
+
+    public void createFolder(String folderPath) throws FileSystemException {
+        try {
+            fs.mkdirs(new Path(folderPath));
+        } catch (IOException e) {
+            throw new FileSystemException(e.getMessage(), e);
+        }
+    }
+
+    public boolean exists(String path) throws FileSystemException {
+        try {
+            return fs.exists(new Path(path));
+        } catch (IOException e) {
+            throw new FileSystemException(e.getMessage(), e);
+        }
+    }
+
+    public boolean isFile(String path) throws FileSystemException {
+        Path p = new Path(path);
+        try {
+            return fs.exists(p) && !fs.getFileStatus(p).isDir();
+        } catch (IOException e) {
+            throw new FileSystemException(e.getMessage(), e);
+        }
+    }
+
+    public boolean isFolder(String path) throws FileSystemException {
+        Path p = new Path(path);
+        try {
+            return fs.exists(p) && fs.getFileStatus(p).isDir();
+        } catch (IOException e) {
+            throw new FileSystemException(e.getMessage(), e);
+        }
+    }
+
+    public boolean hasChildren(String path) throws FileSystemException {
+        Path p = new Path(path);
+        try {
+            return fs.getFileStatus(p).isDir() && fs.listStatus(p).length > 0;
+        } catch (IOException e) {
+            throw new FileSystemException(e.getMessage(), e);
+        }
+    }
+
+    public long length(String filePath) throws FileSystemException {
+        try {
+            return fs.getFileStatus(new Path(filePath)).getLen();
+        } catch (IOException e) {
+            throw new FileSystemException(e.getMessage(), e);
+        }
+    }
+
+    public long lastModified(String path) throws FileSystemException {
+        try {
+            return fs.getFileStatus(new Path(path)).getModificationTime();
+        } catch (IOException e) {
+            throw new FileSystemException(e.getMessage(), e);
+        }
+    }
+
+    public void touch(String filePath) throws FileSystemException {
+        try {
+            fs.setTimes(new Path(filePath), System.currentTimeMillis(), -1);
+        } catch (IOException e) {
+            throw new FileSystemException(e.getMessage(), e);
+        }
+    }
+
+    public String[] list(String folderPath) throws FileSystemException {
+        if (!isFolder(folderPath)) {
+            throw new FileSystemException(folderPath + " is not a folder");
+        }
+        try {
+            List<String> names = new ArrayList<String>();
+            for (FileStatus status : fs.listStatus(new Path(folderPath))) {
+                names.add(status.getPath().getName());
+            }
+            return names.toArray(new String[names.size()]);
+        } catch (IOException e) {
+            throw new FileSystemException(e.getMessage(), e);
+        }
+    }
+
+    public String[] listFiles(String folderPath) throws FileSystemException {
+        if (!isFolder(folderPath)) {
+            throw new FileSystemException(folderPath + " is not a folder");
+        }
+        try {
+            List<String> names = new ArrayList<String>();
+            for (FileStatus status : fs.listStatus(new Path(folderPath))) {
+                if (!status.isDir()) {
+                    names.add(status.getPath().getName());
+                }
+            }
+            return names.toArray(new String[names.size()]);
+        } catch (IOException e) {
+            throw new FileSystemException(e.getMessage(), e);
+        }
+    }
+
+    public String[] listFolders(String folderPath) throws FileSystemException {
+        if (!isFolder(folderPath)) {
+            throw new FileSystemException(folderPath + " is not a folder");
+        }
+        try {
+            List<String> names = new ArrayList<String>();
+            for (FileStatus status : fs.listStatus(new Path(folderPath))) {
+                if (status.isDir()) {
+                    names.add(status.getPath().getName());
+                }
+            }
+            return names.toArray(new String[names.size()]);
+        } catch (IOException e) {
+            throw new FileSystemException(e.getMessage(), e);
+        }
+    }
+
+    public void deleteFile(String filePath) throws FileSystemException {
+        if (isFolder(filePath)) {
+            throw new FileSystemException(filePath + " is not a file");
+        }
+        try {
+            fs.delete(new Path(filePath), false);
+        } catch (IOException e) {
+            throw new FileSystemException(e.getMessage(), e);
+        }
+    }
+
+    public void deleteFolder(String folderPath) throws FileSystemException {
+        if (isFile(folderPath)) {
+            throw new FileSystemException(folderPath + " is not a folder");
+        }
+        try {
+            fs.delete(new Path(folderPath), true);
+        } catch (IOException e) {
+            throw new FileSystemException(e.getMessage(), e);
+        }
+    }
+
+    public void move(String srcPath, String destPath) throws FileSystemException {
+        try {
+            fs.rename(new Path(srcPath), new Path(destPath));
+        } catch (IOException e) {
+            throw new FileSystemException(e.getMessage(), e);
+        }
+    }
+
+    public void copy(String srcPath, String destPath) throws FileSystemException {
+        // TODO: implement
+        throw new UnsupportedOperationException();
+    }
+
+    //-------------------------------< properties >-----------------------------
+
+    /**
+     * Get the location where this data store keeps the files.
+     *
+     * @return the hadoop configuration property: fs.default.name
+     */
+    public String getFs_default_name() {
+        return config.get("fs.default.name");
+    }
+
+    /**
+     * Set the location where this data store keeps the files.
+     *
+     * @param name the hadoop configuration property: fs.default.name
+     */
+    public void setFs_default_name(String name) {
+        config.set("fs.default.name", name);
+    }
+}

Propchange: jackrabbit/sandbox/jackrabbit-hadoop/src/main/java/org/apache/jackrabbit/hadoop/fs/HadoopFileSystem.java
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: jackrabbit/sandbox/jackrabbit-hadoop/src/main/java/org/apache/jackrabbit/hadoop/fs/HadoopFileSystem.java
------------------------------------------------------------------------------
    svn:keywords = Author Date Id Revision Rev URL

Added: jackrabbit/sandbox/jackrabbit-hadoop/src/main/java/org/apache/jackrabbit/hadoop/spi/AbstractNodeInfo.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/jackrabbit-hadoop/src/main/java/org/apache/jackrabbit/hadoop/spi/AbstractNodeInfo.java?rev=1378830&view=auto
==============================================================================
--- jackrabbit/sandbox/jackrabbit-hadoop/src/main/java/org/apache/jackrabbit/hadoop/spi/AbstractNodeInfo.java (added)
+++ jackrabbit/sandbox/jackrabbit-hadoop/src/main/java/org/apache/jackrabbit/hadoop/spi/AbstractNodeInfo.java Thu Aug 30 07:27:48 2012
@@ -0,0 +1,70 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.hadoop.spi;
+
+import java.util.Iterator;
+import java.util.Collections;
+import java.util.Map;
+import java.util.HashMap;
+
+import org.apache.jackrabbit.spi.commons.NodeInfoImpl;
+import org.apache.jackrabbit.spi.PropertyId;
+import org.apache.jackrabbit.spi.PropertyInfo;
+import org.apache.jackrabbit.spi.IdFactory;
+import org.apache.jackrabbit.spi.PathFactory;
+import org.apache.jackrabbit.spi.QValueFactory;
+import org.apache.jackrabbit.spi.NodeId;
+import org.apache.jackrabbit.spi.Name;
+import org.apache.hadoop.fs.FileStatus;
+
+/**
+ * <code>AbstractNodeInfo</code>...
+ */
+public class AbstractNodeInfo extends NodeInfoImpl {
+
+    protected static final Iterator EMPTY = Collections.EMPTY_LIST.iterator();
+
+    protected final Map<PropertyId, PropertyInfo> propertyInfos = new HashMap<PropertyId, PropertyInfo>();
+
+    protected final FileStatus status;
+
+    protected final IdFactory idFactory;
+
+    protected final PathFactory pathFactory;
+
+    protected final QValueFactory valueFactory;
+
+    public AbstractNodeInfo(NodeId id, Name primaryTypeName,
+                            Iterator childInfos, IdFactory idFactory,
+                            PathFactory pathFactory, QValueFactory valueFactory,
+                            FileStatus status) {
+        super(id.getPath(), id, 1, primaryTypeName, new Name[0],
+                EMPTY, EMPTY, childInfos);
+        this.status = status;
+        this.idFactory = idFactory;
+        this.pathFactory = pathFactory;
+        this.valueFactory = valueFactory;
+    }
+
+    public Iterator getPropertyIds() {
+        return propertyInfos.keySet().iterator();
+    }
+
+    public PropertyInfo getPropertyInfo(PropertyId id) {
+        return propertyInfos.get(id);
+    }
+}

Propchange: jackrabbit/sandbox/jackrabbit-hadoop/src/main/java/org/apache/jackrabbit/hadoop/spi/AbstractNodeInfo.java
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: jackrabbit/sandbox/jackrabbit-hadoop/src/main/java/org/apache/jackrabbit/hadoop/spi/AbstractNodeInfo.java
------------------------------------------------------------------------------
    svn:keywords = Author Date Id Revision Rev URL

Added: jackrabbit/sandbox/jackrabbit-hadoop/src/main/java/org/apache/jackrabbit/hadoop/spi/FileInfo.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/jackrabbit-hadoop/src/main/java/org/apache/jackrabbit/hadoop/spi/FileInfo.java?rev=1378830&view=auto
==============================================================================
--- jackrabbit/sandbox/jackrabbit-hadoop/src/main/java/org/apache/jackrabbit/hadoop/spi/FileInfo.java (added)
+++ jackrabbit/sandbox/jackrabbit-hadoop/src/main/java/org/apache/jackrabbit/hadoop/spi/FileInfo.java Thu Aug 30 07:27:48 2012
@@ -0,0 +1,76 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.hadoop.spi;
+
+import java.util.Iterator;
+import java.util.Collections;
+import java.util.Set;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.Calendar;
+
+import javax.jcr.RepositoryException;
+import javax.jcr.PropertyType;
+
+import org.apache.jackrabbit.spi.commons.NodeInfoImpl;
+import org.apache.jackrabbit.spi.commons.ChildInfoImpl;
+import org.apache.jackrabbit.spi.commons.PropertyInfoImpl;
+import org.apache.jackrabbit.spi.commons.name.NameConstants;
+import org.apache.jackrabbit.spi.Path;
+import org.apache.jackrabbit.spi.NodeId;
+import org.apache.jackrabbit.spi.Name;
+import org.apache.jackrabbit.spi.PropertyId;
+import org.apache.jackrabbit.spi.PropertyInfo;
+import org.apache.jackrabbit.spi.ChildInfo;
+import org.apache.jackrabbit.spi.IdFactory;
+import org.apache.jackrabbit.spi.PathFactory;
+import org.apache.jackrabbit.spi.QValue;
+import org.apache.jackrabbit.spi.QValueFactory;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+
+/**
+ * <code>FileInfo</code>...
+ */
+public class FileInfo extends AbstractNodeInfo {
+
+    private static final ChildInfo JCR_CONTENT = new ChildInfoImpl(NameConstants.JCR_CONTENT, null, 1);
+
+
+    public FileInfo(NodeId id, FileStatus status, IdFactory idFactory,
+                    PathFactory pathFactory, QValueFactory valueFactory)
+            throws RepositoryException {
+        super(id, NameConstants.NT_FILE, Collections.singleton(JCR_CONTENT).iterator(),
+                idFactory, pathFactory, valueFactory, status);
+        // jcr:primaryType
+        PropertyId propId = idFactory.createPropertyId(getId(), NameConstants.JCR_PRIMARYTYPE);
+        QValue value = valueFactory.create(getNodetype());
+        propertyInfos.put(propId, new PropertyInfoImpl(propId.getPath(), propId, PropertyType.NAME, false, new QValue[]{value}));
+        // jcr:created
+        propId = idFactory.createPropertyId(getId(), NameConstants.JCR_CREATED);
+        Calendar cal = Calendar.getInstance();
+        cal.setTimeInMillis(0);
+        value = valueFactory.create(cal);
+        propertyInfos.put(propId, new PropertyInfoImpl(propId.getPath(), propId, PropertyType.DATE, false, new QValue[]{value}));
+    }
+
+    public ResourceInfo getResourceInfo(FileSystem fs)
+            throws RepositoryException {
+        NodeId id = idFactory.createNodeId(getId(), pathFactory.create(NameConstants.JCR_CONTENT));
+        return new ResourceInfo(id, status, fs, idFactory, pathFactory, valueFactory);
+    }
+}

Propchange: jackrabbit/sandbox/jackrabbit-hadoop/src/main/java/org/apache/jackrabbit/hadoop/spi/FileInfo.java
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: jackrabbit/sandbox/jackrabbit-hadoop/src/main/java/org/apache/jackrabbit/hadoop/spi/FileInfo.java
------------------------------------------------------------------------------
    svn:keywords = Author Date Id Revision Rev URL

Added: jackrabbit/sandbox/jackrabbit-hadoop/src/main/java/org/apache/jackrabbit/hadoop/spi/FolderInfo.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/jackrabbit-hadoop/src/main/java/org/apache/jackrabbit/hadoop/spi/FolderInfo.java?rev=1378830&view=auto
==============================================================================
--- jackrabbit/sandbox/jackrabbit-hadoop/src/main/java/org/apache/jackrabbit/hadoop/spi/FolderInfo.java (added)
+++ jackrabbit/sandbox/jackrabbit-hadoop/src/main/java/org/apache/jackrabbit/hadoop/spi/FolderInfo.java Thu Aug 30 07:27:48 2012
@@ -0,0 +1,69 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.hadoop.spi;
+
+import java.util.Iterator;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Calendar;
+
+import javax.jcr.PropertyType;
+import javax.jcr.RepositoryException;
+
+import org.apache.jackrabbit.spi.commons.name.NameConstants;
+import org.apache.jackrabbit.spi.commons.ChildInfoImpl;
+import org.apache.jackrabbit.spi.commons.PropertyInfoImpl;
+import org.apache.jackrabbit.spi.NodeId;
+import org.apache.jackrabbit.spi.IdFactory;
+import org.apache.jackrabbit.spi.PathFactory;
+import org.apache.jackrabbit.spi.QValueFactory;
+import org.apache.jackrabbit.spi.ChildInfo;
+import org.apache.jackrabbit.spi.Name;
+import org.apache.jackrabbit.spi.PropertyId;
+import org.apache.jackrabbit.spi.QValue;
+import org.apache.hadoop.fs.FileStatus;
+
+/**
+ * <code>FolderInfo</code>...
+ */
+public class FolderInfo extends AbstractNodeInfo {
+
+    public FolderInfo(NodeId id, IdFactory idFactory, PathFactory pathFactory,
+                      QValueFactory valueFactory, FileStatus status,
+                      Name[] childNames) throws RepositoryException {
+        super(id, NameConstants.NT_FOLDER, getChildInfos(childNames),
+                idFactory, pathFactory, valueFactory, status);
+        // jcr:primaryType
+        PropertyId propId = idFactory.createPropertyId(getId(), NameConstants.JCR_PRIMARYTYPE);
+        QValue value = valueFactory.create(getNodetype());
+        propertyInfos.put(propId, new PropertyInfoImpl(propId.getPath(), propId, PropertyType.NAME, false, new QValue[]{value}));
+        // jcr:created
+        propId = idFactory.createPropertyId(getId(), NameConstants.JCR_CREATED);
+        Calendar cal = Calendar.getInstance();
+        cal.setTimeInMillis(0);
+        value = valueFactory.create(cal);
+        propertyInfos.put(propId, new PropertyInfoImpl(propId.getPath(), propId, PropertyType.DATE, false, new QValue[]{value}));
+    }
+
+    private static Iterator getChildInfos(Name[] childNames) {
+        List<ChildInfo> infos = new ArrayList<ChildInfo>();
+        for (Name n : childNames) {
+            infos.add(new ChildInfoImpl(n, null, 1));
+        }
+        return infos.iterator();
+    }
+}

Propchange: jackrabbit/sandbox/jackrabbit-hadoop/src/main/java/org/apache/jackrabbit/hadoop/spi/FolderInfo.java
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: jackrabbit/sandbox/jackrabbit-hadoop/src/main/java/org/apache/jackrabbit/hadoop/spi/FolderInfo.java
------------------------------------------------------------------------------
    svn:keywords = Author Date Id Revision Rev URL

Added: jackrabbit/sandbox/jackrabbit-hadoop/src/main/java/org/apache/jackrabbit/hadoop/spi/HadoopRepositoryService.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/jackrabbit-hadoop/src/main/java/org/apache/jackrabbit/hadoop/spi/HadoopRepositoryService.java?rev=1378830&view=auto
==============================================================================
--- jackrabbit/sandbox/jackrabbit-hadoop/src/main/java/org/apache/jackrabbit/hadoop/spi/HadoopRepositoryService.java (added)
+++ jackrabbit/sandbox/jackrabbit-hadoop/src/main/java/org/apache/jackrabbit/hadoop/spi/HadoopRepositoryService.java Thu Aug 30 07:27:48 2012
@@ -0,0 +1,180 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.hadoop.spi;
+
+import java.util.Map;
+import java.util.Iterator;
+import java.util.HashMap;
+import java.util.Properties;
+import java.util.Collections;
+import java.util.List;
+import java.util.ArrayList;
+import java.io.Reader;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.UnsupportedEncodingException;
+import java.io.FileNotFoundException;
+
+import javax.jcr.RepositoryException;
+import javax.jcr.Credentials;
+import javax.jcr.LoginException;
+import javax.jcr.ItemNotFoundException;
+
+import org.apache.jackrabbit.spi.commons.AbstractReadableRepositoryService;
+import org.apache.jackrabbit.spi.commons.conversion.NamePathResolver;
+import org.apache.jackrabbit.spi.commons.conversion.DefaultNamePathResolver;
+import org.apache.jackrabbit.spi.commons.name.NameConstants;
+import org.apache.jackrabbit.spi.commons.nodetype.compact.ParseException;
+import org.apache.jackrabbit.spi.QNodeDefinition;
+import org.apache.jackrabbit.spi.SessionInfo;
+import org.apache.jackrabbit.spi.NodeInfo;
+import org.apache.jackrabbit.spi.NodeId;
+import org.apache.jackrabbit.spi.PropertyInfo;
+import org.apache.jackrabbit.spi.PropertyId;
+import org.apache.jackrabbit.spi.QNodeTypeDefinition;
+import org.apache.jackrabbit.spi.Name;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.conf.Configuration;
+
+/**
+ * <code>HadoopRepositoryService</code>...
+ */
+public class HadoopRepositoryService extends AbstractReadableRepositoryService {
+
+    private static final Map DESCRIPTORS;
+
+    static {
+        InputStream in = HadoopRepositoryService.class.getResourceAsStream("descriptor.properties");
+        try {
+            Properties props = new Properties();
+            props.load(in);
+            DESCRIPTORS = Collections.unmodifiableMap(props);
+        } catch (IOException e) {
+            throw (InternalError) new InternalError().initCause(e);
+        } finally {
+            try {
+                in.close();
+            } catch (IOException e) {
+                throw (InternalError) new InternalError().initCause(e);
+            }
+        }
+    }
+
+    private final FileSystem fs;
+
+    /**
+     * The hadoop file system configuration.
+     */
+    private final Configuration config = new Configuration();
+
+    private final NamePathResolver resolver;
+
+    public HadoopRepositoryService(String fs_default_name)
+            throws RepositoryException, ParseException {
+        super(DESCRIPTORS, new HashMap(), createCNDReader(),
+                Collections.singletonList("default"));
+        try {
+            config.set("fs.default.name", fs_default_name);
+            this.fs = FileSystem.get(config);
+        } catch (IOException e) {
+            throw new RepositoryException(e);
+        }
+        this.resolver = new DefaultNamePathResolver(namespaces);
+    }
+
+    protected static Reader createCNDReader() {
+        try {
+            return new InputStreamReader(
+                    HadoopRepositoryService.class.getResourceAsStream(
+                            "files-and-folders.cnd"), "UTF-8");
+        } catch (UnsupportedEncodingException e) {
+            // UTF-8 is always supported
+            throw new InternalError();
+        }
+    }
+
+    protected QNodeDefinition createRootNodeDefinition()
+            throws RepositoryException {
+        QNodeTypeDefinition ntDef = (QNodeTypeDefinition) nodeTypeDefs.get(NameConstants.NT_FOLDER);
+        return ntDef.getChildNodeDefs()[0];
+    }
+
+    protected void checkCredentials(Credentials credentials, String workspaceName)
+            throws LoginException {
+        // accept all credentials
+    }
+
+    public NodeInfo getNodeInfo(SessionInfo sessionInfo, NodeId nodeId) throws
+            ItemNotFoundException, RepositoryException {
+        Path p;
+        boolean isResource;
+        if (nodeId.getPath().getNameElement().getName().equals(NameConstants.JCR_CONTENT)) {
+            isResource = true;
+            p = new Path(resolver.getJCRPath(nodeId.getPath().getAncestor(1)));
+        } else {
+            isResource = false;
+            p = new Path(resolver.getJCRPath(nodeId.getPath()));
+        }
+        try {
+            FileStatus status = fs.getFileStatus(p);
+            if (status.isDir()) {
+                // folder
+                List<Name> childNames = new ArrayList<Name>();
+                for (FileStatus s : fs.listStatus(p)) {
+                    childNames.add(resolver.getQName(s.getPath().getName()));
+                }
+                return new FolderInfo(nodeId, getIdFactory(), getPathFactory(),
+                        getQValueFactory(), status,
+                        childNames.toArray(new Name[childNames.size()]));
+            } else {
+                // file or resource
+                FileInfo info = new FileInfo(nodeId, status, getIdFactory(),
+                        getPathFactory(), getQValueFactory());
+                if (isResource) {
+                    return info.getResourceInfo(fs);
+                } else {
+                    return info;
+                }
+            }
+        } catch (FileNotFoundException e) {
+            throw new ItemNotFoundException(e);
+        } catch (IOException e) {
+            throw new RepositoryException(e);
+        }
+    }
+
+    public Iterator getChildInfos(SessionInfo sessionInfo, NodeId parentId)
+            throws ItemNotFoundException, RepositoryException {
+        return getNodeInfo(sessionInfo, parentId).getChildInfos();
+    }
+
+    public PropertyInfo getPropertyInfo(SessionInfo sessionInfo, PropertyId propertyId)
+            throws ItemNotFoundException, RepositoryException {
+        return ((AbstractNodeInfo) getNodeInfo(sessionInfo, propertyId.getParentId())).getPropertyInfo(propertyId);
+    }
+
+    public void dispose() {
+        try {
+            fs.close();
+        } catch (IOException e) {
+            // TODO: log
+        }
+    }
+}

Propchange: jackrabbit/sandbox/jackrabbit-hadoop/src/main/java/org/apache/jackrabbit/hadoop/spi/HadoopRepositoryService.java
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: jackrabbit/sandbox/jackrabbit-hadoop/src/main/java/org/apache/jackrabbit/hadoop/spi/HadoopRepositoryService.java
------------------------------------------------------------------------------
    svn:keywords = Author Date Id Revision Rev URL

Added: jackrabbit/sandbox/jackrabbit-hadoop/src/main/java/org/apache/jackrabbit/hadoop/spi/ResourceInfo.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/jackrabbit-hadoop/src/main/java/org/apache/jackrabbit/hadoop/spi/ResourceInfo.java?rev=1378830&view=auto
==============================================================================
--- jackrabbit/sandbox/jackrabbit-hadoop/src/main/java/org/apache/jackrabbit/hadoop/spi/ResourceInfo.java (added)
+++ jackrabbit/sandbox/jackrabbit-hadoop/src/main/java/org/apache/jackrabbit/hadoop/spi/ResourceInfo.java Thu Aug 30 07:27:48 2012
@@ -0,0 +1,125 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.hadoop.spi;
+
+import java.util.Calendar;
+import java.net.URLConnection;
+import java.io.InputStream;
+import java.io.IOException;
+
+import javax.jcr.RepositoryException;
+import javax.jcr.PropertyType;
+
+import org.apache.jackrabbit.spi.commons.PropertyInfoImpl;
+import org.apache.jackrabbit.spi.commons.name.NameConstants;
+import org.apache.jackrabbit.spi.NodeId;
+import org.apache.jackrabbit.spi.IdFactory;
+import org.apache.jackrabbit.spi.PathFactory;
+import org.apache.jackrabbit.spi.QValueFactory;
+import org.apache.jackrabbit.spi.PropertyId;
+import org.apache.jackrabbit.spi.QValue;
+import org.apache.jackrabbit.spi.Name;
+import org.apache.jackrabbit.spi.Path;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+
+/**
+ * <code>ResourceInfo</code>...
+ */
+public class ResourceInfo extends AbstractNodeInfo {
+
+    private final FileSystem fs;
+
+    public ResourceInfo(NodeId id, FileStatus status, FileSystem fs,
+                        IdFactory idFactory, PathFactory pathFactory,
+                        QValueFactory valueFactory) throws RepositoryException {
+        super(id, NameConstants.NT_RESOURCE, EMPTY, idFactory,
+                pathFactory, valueFactory, status);
+        this.fs = fs;
+        // jcr:primaryType
+        PropertyId propId = idFactory.createPropertyId(getId(), NameConstants.JCR_PRIMARYTYPE);
+        QValue value = valueFactory.create(getNodetype());
+        propertyInfos.put(propId, new PropertyInfoImpl(propId.getPath(), propId, PropertyType.NAME, false, new QValue[]{value}));
+        // jcr:lastModified
+        propId = idFactory.createPropertyId(getId(), NameConstants.JCR_LASTMODIFIED);
+        Calendar cal = Calendar.getInstance();
+        cal.setTimeInMillis(status.getModificationTime());
+        value = valueFactory.create(cal);
+        propertyInfos.put(propId, new PropertyInfoImpl(propId.getPath(), propId, PropertyType.DATE, false, new QValue[]{value}));
+        // jcr:mimeType
+        propId = idFactory.createPropertyId(getId(), NameConstants.JCR_MIMETYPE);
+        String mimeType = URLConnection.guessContentTypeFromName(status.getPath().getName());
+        if (mimeType == null) {
+            mimeType = "application/octet-stream";
+        }
+        value = valueFactory.create(mimeType, PropertyType.STRING);
+        propertyInfos.put(propId, new PropertyInfoImpl(propId.getPath(), propId, PropertyType.STRING, false, new QValue[]{value}));
+        // jcr:data
+        propId = idFactory.createPropertyId(getId(), NameConstants.JCR_DATA);
+        propertyInfos.put(propId, new PropertyInfoImpl(propId.getPath(), propId, PropertyType.BINARY, false, new QValue[]{new BinaryQValue()}));
+    }
+
+    private class BinaryQValue implements QValue {
+
+        public int getType() {
+            return PropertyType.BINARY;
+        }
+
+        public long getLength() throws RepositoryException {
+            return status.getLen();
+        }
+
+        public String getString() throws RepositoryException {
+            throw new UnsupportedOperationException();
+        }
+
+        public InputStream getStream() throws RepositoryException {
+            try {
+                return fs.open(status.getPath());
+            } catch (IOException e) {
+                throw new RepositoryException(e);
+            }
+        }
+
+        public Calendar getCalendar() throws RepositoryException {
+            throw new UnsupportedOperationException();
+        }
+
+        public double getDouble() throws RepositoryException {
+            throw new UnsupportedOperationException();
+        }
+
+        public long getLong() throws RepositoryException {
+            throw new UnsupportedOperationException();
+        }
+
+        public boolean getBoolean() throws RepositoryException {
+            throw new UnsupportedOperationException();
+        }
+
+        public Name getName() throws RepositoryException {
+            throw new UnsupportedOperationException();
+        }
+
+        public Path getPath() throws RepositoryException {
+            throw new UnsupportedOperationException();
+        }
+
+        public void discard() {
+        }
+    }
+}

Propchange: jackrabbit/sandbox/jackrabbit-hadoop/src/main/java/org/apache/jackrabbit/hadoop/spi/ResourceInfo.java
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: jackrabbit/sandbox/jackrabbit-hadoop/src/main/java/org/apache/jackrabbit/hadoop/spi/ResourceInfo.java
------------------------------------------------------------------------------
    svn:keywords = Author Date Id Revision Rev URL

Added: jackrabbit/sandbox/jackrabbit-hadoop/src/main/resources/org/apache/jackrabbit/hadoop/spi/descriptor.properties
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/jackrabbit-hadoop/src/main/resources/org/apache/jackrabbit/hadoop/spi/descriptor.properties?rev=1378830&view=auto
==============================================================================
--- jackrabbit/sandbox/jackrabbit-hadoop/src/main/resources/org/apache/jackrabbit/hadoop/spi/descriptor.properties (added)
+++ jackrabbit/sandbox/jackrabbit-hadoop/src/main/resources/org/apache/jackrabbit/hadoop/spi/descriptor.properties Thu Aug 30 07:27:48 2012
@@ -0,0 +1,38 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+
+################################################################################
+# Repository Version and Implementation Information
+
+jcr.specification.version = 1.0
+jcr.specification.name = Content Repository API for Java(TM) Technology Specification
+jcr.repository.vendor = Apache Software Foundation
+jcr.repository.vendor.url = http://jackrabbit.apache.org/
+jcr.repository.name = JCR2HDFS
+jcr.repository.version = ${pom.version}
+
+################################################################################
+# Repository features and support information
+
+level.1.supported = false
+level.2.supported = false
+option.transactions.supported = false
+option.versioning.supported = false
+option.observation.supported = false
+option.locking.supported = false
+option.query.sql.supported = false
+query.xpath.pos.index = false
+query.xpath.doc.order = false

Propchange: jackrabbit/sandbox/jackrabbit-hadoop/src/main/resources/org/apache/jackrabbit/hadoop/spi/descriptor.properties
------------------------------------------------------------------------------
    svn:eol-style = native

Added: jackrabbit/sandbox/jackrabbit-hadoop/src/main/resources/org/apache/jackrabbit/hadoop/spi/files-and-folders.cnd
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/jackrabbit-hadoop/src/main/resources/org/apache/jackrabbit/hadoop/spi/files-and-folders.cnd?rev=1378830&view=auto
==============================================================================
--- jackrabbit/sandbox/jackrabbit-hadoop/src/main/resources/org/apache/jackrabbit/hadoop/spi/files-and-folders.cnd (added)
+++ jackrabbit/sandbox/jackrabbit-hadoop/src/main/resources/org/apache/jackrabbit/hadoop/spi/files-and-folders.cnd Thu Aug 30 07:27:48 2012
@@ -0,0 +1,43 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+<jcr='http://www.jcp.org/jcr/1.0'>
+<nt='http://www.jcp.org/jcr/nt/1.0'>
+<mix='http://www.jcp.org/jcr/mix/1.0'>
+
+//------------------------------------------------------------------------------
+// B A S E  T Y P E S
+//------------------------------------------------------------------------------
+
+[nt:base]
+  - jcr:primaryType (name) mandatory autocreated protected compute
+  - jcr:mixinTypes (name) protected multiple compute
+
+[nt:hierarchyNode]
+  - jcr:created (date) autocreated protected initialize
+
+[nt:folder] > nt:hierarchyNode
+  + * (nt:hierarchyNode) version
+
+[nt:file] > nt:hierarchyNode
+  + jcr:content (nt:base) primary mandatory
+
+[nt:resource] > mix:referenceable
+  - jcr:encoding (string)
+  - jcr:mimeType (string) mandatory
+  - jcr:data (binary) primary mandatory
+  - jcr:lastModified (date) mandatory ignore

Propchange: jackrabbit/sandbox/jackrabbit-hadoop/src/main/resources/org/apache/jackrabbit/hadoop/spi/files-and-folders.cnd
------------------------------------------------------------------------------
    svn:eol-style = native