You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@jackrabbit.apache.org by ju...@apache.org on 2008/09/03 17:29:09 UTC

svn commit: r691635 [2/2] - in /jackrabbit/sandbox/jackrabbit-amazon: ./ applications/ applications/test/ applications/test/repository/ applications/test/repository/namespaces/ applications/test/repository/nodetypes/ applications/test/workspaces/ appli...

Added: jackrabbit/sandbox/jackrabbit-amazon/patches-for-1.3/jackrabbit-jcr-commons.support-sysprops-in-config-files-dirty.patch
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/jackrabbit-amazon/patches-for-1.3/jackrabbit-jcr-commons.support-sysprops-in-config-files-dirty.patch?rev=691635&view=auto
==============================================================================
--- jackrabbit/sandbox/jackrabbit-amazon/patches-for-1.3/jackrabbit-jcr-commons.support-sysprops-in-config-files-dirty.patch (added)
+++ jackrabbit/sandbox/jackrabbit-amazon/patches-for-1.3/jackrabbit-jcr-commons.support-sysprops-in-config-files-dirty.patch Wed Sep  3 08:29:07 2008
@@ -0,0 +1,26 @@
+Index: src/main/java/org/apache/jackrabbit/util/Text.java
+===================================================================
+--- src/main/java/org/apache/jackrabbit/util/Text.java	(revision 609460)
++++ src/main/java/org/apache/jackrabbit/util/Text.java	(working copy)
+@@ -718,12 +718,15 @@
+                 String variable = value.substring(p + 2, q);
+                 String replacement = variables.getProperty(variable);
+                 if (replacement == null) {
+-                    if (ignoreMissing) {
+-                        replacement = "";
+-                    } else {
+-                        throw new IllegalArgumentException(
+-                                "Replacement not found for ${" + variable + "}.");
+-                    }
++                	replacement = System.getProperty(variable);
++                	if (replacement == null) {
++	                    if (ignoreMissing) {
++	                        replacement = "";
++	                    } else {
++	                        throw new IllegalArgumentException(
++	                                "Replacement not found for ${" + variable + "}.");
++	                    }
++                	}
+                 }
+                 result.append(replacement);
+                 p = q + 1;

Propchange: jackrabbit/sandbox/jackrabbit-amazon/patches-for-1.3/jackrabbit-jcr-commons.support-sysprops-in-config-files-dirty.patch
------------------------------------------------------------------------------
    svn:eol-style = native

Added: jackrabbit/sandbox/jackrabbit-amazon/pom.xml
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/jackrabbit-amazon/pom.xml?rev=691635&view=auto
==============================================================================
--- jackrabbit/sandbox/jackrabbit-amazon/pom.xml (added)
+++ jackrabbit/sandbox/jackrabbit-amazon/pom.xml Wed Sep  3 08:29:07 2008
@@ -0,0 +1,260 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+  -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
+                             http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+
+<!-- ====================================================================== -->
+<!-- P R O J E C T  D E S C R I P T I O N                                   -->
+<!-- ====================================================================== -->
+  <parent>
+    <groupId>org.apache.jackrabbit</groupId>
+    <artifactId>jackrabbit</artifactId>
+    <version>1.3.3</version>
+  </parent>
+  <artifactId>jackrabbit-amazon</artifactId>
+  <name>Jackrabbit persistence implementations that connect to Amazon Webservices</name>
+
+  <!--<scm>
+    <connection>
+      scm:svn:http://svn.apache.org/repos/asf/jackrabbit/trunk/jackrabbit-spi2jcr
+    </connection>
+    <developerConnection>
+      scm:svn:https://svn.apache.org/repos/asf/jackrabbit/trunk/jackrabbit-spi2jcr
+    </developerConnection>
+    <url>http://svn.apache.org/viewvc/jackrabbit/trunk/jackrabbit-spi2jcr</url>
+  </scm>-->
+
+  <build>
+    <resources>
+      <resource>
+        <directory>${basedir}/src/main/java</directory>
+        <includes>
+          <include>**/*.properties</include>
+        </includes>
+      </resource>
+    </resources>
+    <testResources>
+      <testResource>
+        <directory>${basedir}/applications/test</directory>
+        <includes>
+          <include>*.properties</include>
+          <include>*.xml</include>
+        </includes>
+      </testResource>
+    </testResources>
+    <plugins>
+      <plugin>
+        <artifactId>maven-antrun-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>delete-sources</id>
+            <phase>process-sources</phase>
+            <configuration>
+              <tasks>
+                <ant
+                  antfile="${basedir}/build.xml"
+                  inheritRefs="true">
+                  <target name="deleteSources" />
+                </ant>
+              </tasks>
+            </configuration>
+            <goals>
+              <goal>run</goal>
+            </goals>
+          </execution>
+          <execution>
+            <id>process-test-resources</id>
+            <phase>process-test-resources</phase>
+            <configuration>
+              <tasks>
+                <ant
+                  antfile="${basedir}/build.xml"
+                  inheritRefs="true">
+                  <target name="prepareTestRepository" />
+                </ant>
+              </tasks>
+            </configuration>
+            <goals>
+              <goal>run</goal>
+            </goals>
+          </execution>
+          <execution>
+            <id>clean</id>
+            <phase>clean</phase>
+            <configuration>
+              <tasks>
+                <ant
+                  antfile="${basedir}/build.xml"
+                  inheritRefs="true">
+                  <target name="cleanTestData" />
+                </ant>
+              </tasks>
+            </configuration>
+            <goals>
+              <goal>run</goal>
+            </goals>
+          </execution>
+        </executions>
+        <dependencies>
+          <dependency>
+            <groupId>ant</groupId>
+            <artifactId>ant-optional</artifactId>
+            <version>1.5.3-1</version>
+          </dependency>
+        </dependencies>
+      </plugin>
+      <plugin>
+        <artifactId>maven-surefire-plugin</artifactId>
+        <configuration>
+          <excludes>
+            <exclude>**</exclude>
+          </excludes>
+          <forkMode>once</forkMode>
+          <argLine>-Xmx128m -enableassertions</argLine>
+          <systemProperties>
+            <property>
+              <name>derby.system.durability</name>
+              <value>test</value>
+            </property>
+            <!--<property>
+              <name>known.issues</name>
+              <value>
+                org.apache.jackrabbit.core.xml.DocumentViewTest#testMultiValue
+                org.apache.jackrabbit.value.BinaryValueTest#testBinaryValueEquals
+              </value>
+            </property>-->
+          </systemProperties>
+        </configuration>
+        <executions>
+          <execution>
+            <id>do_init</id>
+            <configuration>
+              <includes>
+                <include>**/init/TestAll.java</include>
+              </includes>
+              <excludes>
+                <exclude>**/tck/*.java</exclude>
+              </excludes>
+            </configuration>
+            <goals>
+              <goal>test</goal>
+            </goals>
+          </execution>
+          <execution>
+            <id>do_test</id>
+            <configuration>
+              <excludes>
+                <exclude>**/init/*</exclude>
+              </excludes>
+              <includes>
+                <include>**/tck/TestAll.java</include>
+              </includes>
+            </configuration>
+            <goals>
+              <goal>test</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.jackrabbit</groupId>
+      <artifactId>jackrabbit-core</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>javax.jcr</groupId>
+      <artifactId>jcr</artifactId>
+    </dependency>
+    
+    <!-- S3 connection library -->
+    <dependency>
+ 	  <groupId>net.java.dev.jets3t</groupId>
+ 	  <artifactId>jets3t</artifactId>
+ 	  <version>0.5.0</version>
+    </dependency>
+    <dependency>
+      <groupId>commons-httpclient</groupId>
+      <artifactId>commons-httpclient</artifactId>
+      <version>3.0.1</version>
+    </dependency>
+    <!-- for SOAP in jets3t -->
+    <!-- <dependency>
+	  <groupId>javax.xml</groupId>
+	  <artifactId>jaxrpc-api</artifactId>
+	  <version>1.1</version>
+	</dependency>
+	<dependency>
+      <groupId>javax.activation</groupId>
+	  <artifactId>activation</artifactId>
+	  <version>1.1</version>
+	</dependency>
+	<dependency>
+	    <groupId>org.apache.axis</groupId>
+	    <artifactId>axis</artifactId>
+	    <version>1.4</version>
+	</dependency>	
+	<dependency>
+	    <groupId>commons-discovery</groupId>
+	    <artifactId>commons-discovery</artifactId>
+	    <version>0.4</version>
+	</dependency>
+	<dependency>
+	    <groupId>javax.xml.soap</groupId>
+	    <artifactId>saaj-api</artifactId>
+	    <version>1.3</version>
+	</dependency>
+	<dependency>
+	    <groupId>wsdl4j</groupId>
+	    <artifactId>wsdl4j</artifactId>
+	    <version>1.5.1</version>
+	</dependency> -->
+	
+    <!-- testing -->
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.jackrabbit</groupId>
+      <artifactId>jackrabbit-jcr-tests</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.jackrabbit</groupId>
+      <artifactId>jackrabbit-core</artifactId>
+      <version>1.3.3</version>
+      <type>test-jar</type>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-log4j12</artifactId>
+      <scope>test</scope>
+    </dependency>
+  </dependencies>
+
+</project>

Propchange: jackrabbit/sandbox/jackrabbit-amazon/pom.xml
------------------------------------------------------------------------------
    svn:eol-style = native

Added: jackrabbit/sandbox/jackrabbit-amazon/src/main/java/org/apache/jackrabbit/fs/amazon/AmazonS3FileSystem.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/jackrabbit-amazon/src/main/java/org/apache/jackrabbit/fs/amazon/AmazonS3FileSystem.java?rev=691635&view=auto
==============================================================================
--- jackrabbit/sandbox/jackrabbit-amazon/src/main/java/org/apache/jackrabbit/fs/amazon/AmazonS3FileSystem.java (added)
+++ jackrabbit/sandbox/jackrabbit-amazon/src/main/java/org/apache/jackrabbit/fs/amazon/AmazonS3FileSystem.java Wed Sep  3 08:29:07 2008
@@ -0,0 +1,316 @@
+package org.apache.jackrabbit.fs.amazon;
+
+import java.io.InputStream;
+import java.io.OutputStream;
+
+import org.apache.jackrabbit.core.fs.FileSystem;
+import org.apache.jackrabbit.core.fs.FileSystemException;
+import org.apache.jackrabbit.core.fs.RandomAccessOutputStream;
+import org.apache.jackrabbit.persistence.amazon.AmazonS3PersistenceManager;
+import org.jets3t.service.S3Service;
+import org.jets3t.service.S3ServiceException;
+import org.jets3t.service.impl.rest.httpclient.RestS3Service;
+import org.jets3t.service.model.S3Bucket;
+import org.jets3t.service.security.AWSCredentials;
+
+/**
+ * <b>NOTE: this class is not fully implemented.</b>
+ * 
+ * <p>
+ * <code>AmazonS3FileSystem</code> is a Jackrabbit file system
+ * that stores the data in <a href="http://www.amazon.com/s3">Amazon S3</a>.
+ * All you need is an S3 account (which costs money, depending on size
+ * of the stored data and the used upload/download bandwidth).
+ * 
+ * <p>
+ * (Note that the performance might be disappointing, as Jackrabbit will make
+ * many fine-granular calls to the file system manager and each will result in
+ * an HTTP call to S3, which can be slow.)
+ * </p>
+ * 
+ * <h4>Configuration parameters:</h4>
+ * 
+ * <p>
+ * To configure this file system, you will have to provide your Amazon
+ * AWS credentials, namely access key and secret key, the bucket name to choose
+ * as well as the prefix for the object names, typically containing workspace
+ * names.
+ * </p>
+ * 
+ * <dl>
+ * <dt>accessKey</dt>
+ * <dd>Amazon AWS access key (aka account user id) [required]</dd>
+ * 
+ * <dt>secretKey</dt>
+ * <dd>Amazon AWS secret key (aka account password) [required]</dd>
+ * 
+ * <dt>bucketName</dt>
+ * <dd>Name of the S3 bucket to use [optional, default uses accessKey] Note
+ * that bucket names are global, so using your accessKey is recommended to
+ * prevent conflicts with other AWS users.</dd>
+ * 
+ * <dt>objectPrefix</dt>
+ * <dd>Prefix used for all object names [optional, default is ""] Should
+ * include the workspace name ("<code>${wsp.name}.fs</code>" or "<code>version.fs</code>"
+ * for the versioning PM) to put multiple workspaces into one bucket.
+ * Make sure FileSystem and PersistenceManager for a workspace have separate
+ * prefixes (if both are AmazonS3).</dd>
+ * </dl>
+ * 
+ * <h4>Example XML Config:</h4>
+ * 
+ * <pre>
+ * &lt;FileSystem class=&quot;org.apache.jackrabbit.fs.amazon.AmazonS3FileSystem&quot;&gt;
+ *     &lt;param name=&quot;accessKey&quot;    value=&quot;abcde01234&quot;/&gt;
+ *     &lt;param name=&quot;secretKey&quot;    value=&quot;topsecret&quot;/&gt;
+ *     &lt;param name=&quot;bucketName&quot;   value=&quot;abcde01234.jcrstore&quot;/&gt;
+ *     &lt;param name=&quot;objectPrefix&quot; value=&quot;${wsp.name}.fs/&quot;/&gt;
+ * &lt;/FileSystem&gt;
+ * </pre>
+ * 
+ * <h4>Technical implementation - Mapping File System to S3:</h4>
+ * 
+ * Files, Folders onto Objects
+ *
+ * TODO
+ * 
+ * @author <a href="mailto:alexander dot klimetschek at googlemail dot com">
+ *         Alexander Klimetschek</a>
+ * 
+ */
+public class AmazonS3FileSystem implements FileSystem {
+
+	public static String USER_AGENT_APPLICATION = "Jackrabbit/1.3 (Amazon S3 File System)";
+	
+	protected String accessKey;
+	protected String secretKey;
+	protected String bucketName;
+	protected String objectPrefix;
+	
+	protected S3Service s3service;
+	protected S3Bucket bucket;
+
+	/**
+	 * Sets the Amazon access key of the AWS account (aka the user id) used
+	 * in the credentials for the API connection.
+	 * 
+	 * @param accessKey the plain access key string
+	 */
+	public void setAccessKey(String accessKey) {
+		this.accessKey = accessKey;
+	}
+
+	public String getAccessKey() {
+		return accessKey;
+	}
+
+	/**
+	 * Sets the Amazon secret key of the AWS account (aka the password) used
+	 * in the credentials for the API connection.
+	 * 
+	 * @param secretKey the plain secret key string
+	 */
+	public void setSecretKey(String secretKey) {
+		this.secretKey = secretKey;
+	}
+
+	public String getSecretKey() {
+		return secretKey;
+	}
+
+	/**
+	 * The bucket in which all data from this persistence manager will be
+	 * stored.
+	 * 
+	 * <p>
+	 * <b>Please note that buckets are global and you are advised to use you own
+	 * AWS access key as part of the bucket name to avoid conflicts!</b> For
+	 * example, use <code>AWS-KEY.jcrstore</code>, where AWS-KEY is replaced
+	 * by your own AWS access key. Also note that the number of buckets is by
+	 * default limited to 100 per AWS account.
+	 * </p>
+	 * 
+	 * <p>
+	 * If not specified, the default value will be your access key.
+	 * </p>
+	 * 
+	 * @param bucket
+	 *            a bucket name inside which this PM will store its data
+	 */
+	public void setBucket(String bucket) {
+		this.bucketName = bucket;
+	}
+
+	public String getBucket() {
+		return bucketName;
+	}
+
+	/**
+	 * This objectPrefix is applied to all objects that get created inside
+	 * the bucket and allows to have multiple workspaces in a single
+	 * bucket. Simply use "<code>${wsp.name}</code>" in the repository.xml
+	 * as placeholder for the workspace name or use eg.
+	 * "<code>version</code>" in the version persistence manager config.
+	 * 
+	 * <p>
+	 * It is recommended to put a slash at the end (eg.
+	 * "<code>${wsp.name}/</code>") to have better compatibility
+	 * with other tools that would see each workspace then as a
+	 * separate directory.
+	 * </p>
+	 *  
+	 * @param objectPrefix
+	 */
+	public void setObjectPrefix(String objectPrefix) {
+		this.objectPrefix = objectPrefix;
+	}
+
+	public String getObjectPrefix() {
+		return objectPrefix;
+	}
+
+	//--------------------------------------------------------< lifecycle >
+
+	public void init() throws FileSystemException {
+		if (accessKey == null) {
+			throw new IllegalArgumentException("No accessKey specified for " +
+					AmazonS3PersistenceManager.class.getName());
+		}
+		if (secretKey == null) {
+			throw new IllegalArgumentException("No secretKey specified for " +
+					AmazonS3PersistenceManager.class.getName());
+		}
+		// set the default value for baseBucket
+		if (bucketName == null) {
+			bucketName = accessKey;
+		}
+		// ensure empty string
+		if (objectPrefix == null) {
+			objectPrefix = "";
+		}
+		
+		AWSCredentials credentials = new AWSCredentials(accessKey, secretKey);
+		try {
+			s3service = new RestS3Service(credentials, USER_AGENT_APPLICATION, null);
+			
+			if (!s3service.isBucketAccessible(bucketName)) {
+				bucket = s3service.createBucket(bucketName);
+			} else {
+				bucket = new S3Bucket(bucketName);
+			}
+		} catch (S3ServiceException e) {
+			throw new FileSystemException("Could not init " + this.getClass().getName() +
+					" with accessKey='" + accessKey + "', bucketName='" + bucketName + "' " +
+					"and objectPrefix='" + objectPrefix + "'", e);
+		}
+	}
+
+	public void close() throws FileSystemException {
+	}
+
+	//--------------------------------------------------------< exists >
+
+	public boolean exists(String path) throws FileSystemException {
+		// TODO Auto-generated method stub
+		return false;
+	}
+
+	public boolean isFile(String path) throws FileSystemException {
+		// TODO Auto-generated method stub
+		return false;
+	}
+
+	public boolean isFolder(String path) throws FileSystemException {
+		// TODO Auto-generated method stub
+		return false;
+	}
+
+	public boolean hasChildren(String path) throws FileSystemException {
+		// TODO Auto-generated method stub
+		return false;
+	}
+
+	//--------------------------------------------------------< read >
+
+	public long lastModified(String path) throws FileSystemException {
+		// TODO Auto-generated method stub
+		return 0;
+	}
+
+	public long length(String filePath) throws FileSystemException {
+		// TODO Auto-generated method stub
+		return 0;
+	}
+
+	public InputStream getInputStream(String filePath)
+			throws FileSystemException {
+		// TODO Auto-generated method stub
+		return null;
+	}
+
+	public String[] list(String folderPath) throws FileSystemException {
+		// TODO Auto-generated method stub
+		return null;
+	}
+
+	public String[] listFiles(String folderPath) throws FileSystemException {
+		// TODO Auto-generated method stub
+		return null;
+	}
+
+	public String[] listFolders(String folderPath) throws FileSystemException {
+		// TODO Auto-generated method stub
+		return null;
+	}
+
+	//--------------------------------------------------------< write >
+
+	public void createFolder(String folderPath) throws FileSystemException {
+		// TODO Auto-generated method stub
+
+	}
+
+	public void touch(String filePath) throws FileSystemException {
+		// TODO Auto-generated method stub
+
+	}
+
+	public OutputStream getOutputStream(String filePath)
+			throws FileSystemException {
+		// TODO Auto-generated method stub
+		return null;
+	}
+	
+	public RandomAccessOutputStream getRandomAccessOutputStream(String filePath)
+			throws FileSystemException, UnsupportedOperationException {
+        throw new UnsupportedOperationException(
+        	"Random access is not implemented for the Amazon S3 file system");
+	}
+	
+	//--------------------------------------------------------< delete >
+
+	public void deleteFile(String filePath) throws FileSystemException {
+		// TODO Auto-generated method stub
+
+	}
+
+	public void deleteFolder(String folderPath) throws FileSystemException {
+		// TODO Auto-generated method stub
+
+	}
+
+	//--------------------------------------------------------< copy/move >
+
+	public void copy(String srcPath, String destPath)
+			throws FileSystemException {
+		// TODO Auto-generated method stub
+
+	}
+
+	public void move(String srcPath, String destPath)
+			throws FileSystemException {
+		// TODO Auto-generated method stub
+	
+	}
+
+}

Propchange: jackrabbit/sandbox/jackrabbit-amazon/src/main/java/org/apache/jackrabbit/fs/amazon/AmazonS3FileSystem.java
------------------------------------------------------------------------------
    svn:eol-style = native

Added: jackrabbit/sandbox/jackrabbit-amazon/src/main/java/org/apache/jackrabbit/persistence/amazon/AmazonS3BundlePersistenceManager.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/jackrabbit-amazon/src/main/java/org/apache/jackrabbit/persistence/amazon/AmazonS3BundlePersistenceManager.java?rev=691635&view=auto
==============================================================================
--- jackrabbit/sandbox/jackrabbit-amazon/src/main/java/org/apache/jackrabbit/persistence/amazon/AmazonS3BundlePersistenceManager.java (added)
+++ jackrabbit/sandbox/jackrabbit-amazon/src/main/java/org/apache/jackrabbit/persistence/amazon/AmazonS3BundlePersistenceManager.java Wed Sep  3 08:29:07 2008
@@ -0,0 +1,550 @@
+package org.apache.jackrabbit.persistence.amazon;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.InputStream;
+import java.util.Collection;
+import java.util.Iterator;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.jackrabbit.core.NodeId;
+import org.apache.jackrabbit.core.PropertyId;
+import org.apache.jackrabbit.core.persistence.PMContext;
+import org.apache.jackrabbit.core.persistence.bundle.AbstractBundlePersistenceManager;
+import org.apache.jackrabbit.core.persistence.bundle.util.BundleBinding;
+import org.apache.jackrabbit.core.persistence.bundle.util.ErrorHandling;
+import org.apache.jackrabbit.core.persistence.bundle.util.NodePropBundle;
+import org.apache.jackrabbit.core.persistence.util.BLOBStore;
+import org.apache.jackrabbit.core.state.ItemStateException;
+import org.apache.jackrabbit.core.state.NoSuchItemStateException;
+import org.apache.jackrabbit.core.state.NodeReferences;
+import org.apache.jackrabbit.core.state.NodeReferencesId;
+import org.apache.jackrabbit.persistence.util.AmazonS3Exception;
+import org.jets3t.service.S3Service;
+import org.jets3t.service.S3ServiceException;
+import org.jets3t.service.impl.rest.httpclient.RestS3Service;
+import org.jets3t.service.model.S3Bucket;
+import org.jets3t.service.model.S3Object;
+import org.jets3t.service.security.AWSCredentials;
+
+/**
+ * <code>AmazonS3BundlePersistenceManager</code> is a Jackrabbit persistence manager
+ * that stores the data in <a href="http://www.amazon.com/s3">Amazon S3</a>.
+ * All you need is an S3 account (which costs money, depending on size of the
+ * stored data and the used upload/download bandwidth).
+ * 
+ * <p>
+ * The bundles improve the performance when compared with a normal persistence
+ * manager that separately stores nodes and its properties. It also contains
+ * bundle caching to further improve that. Still, the performance might be
+ * disappointing, as Jackrabbit's calls will be relatively fine-granular and
+ * the HTTP connection to S3 contains a notable lag.
+ * </p>
+ * 
+ * <h4>Configuration parameters:</h4>
+ * 
+ * <p>
+ * To configure this persistence manager, you will have to provide your Amazon
+ * AWS credentials, namely access key and secret key, the bucket name to choose
+ * as well as the prefix for the object names, typically containing workspace
+ * names.
+ * </p>
+ * 
+ * <dl>
+ * <dt>accessKey</dt>
+ * <dd>Amazon AWS access key (aka account user id) [required]</dd>
+ * 
+ * <dt>secretKey</dt>
+ * <dd>Amazon AWS secret key (aka account password) [required]</dd>
+ * 
+ * <dt>bucket</dt>
+ * <dd>Name of the S3 bucket to use [optional, default uses accessKey] Note
+ * that bucket names are global, so using your accessKey is recommended to
+ * prevent conflicts with other AWS users.</dd>
+ * 
+ * <dt>objectPrefix</dt>
+ * <dd>Prefix used for all object names [optional, default is ""] Should
+ * include the workspace name ("<code>${wsp.name}</code>" or "<code>version</code>"
+ * for the versioning PM) to put multiple workspaces into one bucket.</dd>
+ * </dl>
+ * 
+ * <h4>Example XML Config:</h4>
+ * 
+ * <pre>
+ * &lt;PersistenceManager class=&quot;org.apache.jackrabbit.persistence.amazon.AmazonS3PersistenceManager&quot;&gt;
+ *     &lt;param name=&quot;accessKey&quot;    value=&quot;abcde01234&quot;/&gt;
+ *     &lt;param name=&quot;secretKey&quot;    value=&quot;topsecret&quot;/&gt;
+ *     &lt;param name=&quot;bucket&quot;       value=&quot;abcde01234.jcrstore&quot;/&gt;
+ *     &lt;param name=&quot;objectPrefix&quot; value=&quot;${wsp.name}/&quot;/&gt;
+ * &lt;/PersistenceManager&gt;
+ * </pre>
+ * 
+ * <h4>Technical implementation - Mapping JCR to S3:</h4>
+ * 
+ * <p>
+ * JCR (a BundlePersistenceManager) has NodePropBundles and NodeReferences,
+ * whereas S3 has Buckets and Objects. There is a maximum of 100 buckets
+ * per account, that's why everything should be in one bucket, even multiple
+ * workspaces. For putting multiple workspaces into one bucket, there can be
+ * a objectPrefix for all objects.
+ * </p>
+ * 
+ * <b>Node and Property Bundle:</b>
+ * <ul>
+ * <li>
+ * Node => <b>Object</b> with node UUID as name
+ * "<code>&lt;objectPrefix&gt;/&lt;nodeUUID&gt;</code>"
+ * </li>
+ * <li>binary serialized NodePropBundle as data of the object</li>
+ * </ul>
+ * 
+ * <b>Node references:</b>
+ * <ul>
+ * <li>
+ * Node references => <b>Object</b> named
+ * "<code>&lt;objectPrefix&gt;/references/&lt;nodeUUID&gt;</code>"
+ * </li>
+ * <li>
+ * all reference UUIDs as metadata => "<code>refby[i]</code>"
+ * </li>
+ * </ul>
+ * 
+ * @author <a href="mailto:alexander dot klimetschek at googlemail dot com">
+ *         Alexander Klimetschek</a>
+ * 
+ */
+public class AmazonS3BundlePersistenceManager extends
+		AbstractBundlePersistenceManager {
+	
+	public static Log log = LogFactory.getLog(AmazonS3BundlePersistenceManager.class);
+
+	/**
+	 * Application part of the HTTP User-Agent sent to Amazon S3.
+	 */
+	public static String USER_AGENT_APPLICATION = "Jackrabbit/1.3 (Bundle PM)";
+	
+	/**
+	 * Name of the system property that can be used to configure the
+	 * AWS accessKey if it is not set via {@link setAccessKey(String)}.
+	 */
+	public static final String AWS_ACCESS_KEY_PROP = "aws.accessKey";
+	
+	/**
+	 * Name of the system property that can be used to configure the
+	 * AWS secretKey if it is not set via {@link setSecretKey(String)}.
+	 */
+	public static final String AWS_SECRET_KEY_PROP = "aws.secretKey";
+
+	/**
+	 * Name of the "subdirectory" or simply prefix that is prepended to the
+	 * objects that hold the list of references that point to a certain node.
+	 * The rest of the object name will be the target node UUID.
+	 */
+	public static final String REFERENCES_PREFIX = "references/";
+	
+	// NOTE: all metadata keys must be lower-case
+	// TODO: do not use metadata for refs
+	public static final String METADATA_REF = "refby";
+	
+	/**
+	 * Initial size for the byte arrays that contain the serialized bundle data
+	 * before uploading them to S3.
+	 */
+    public static final int INITIAL_BUNDLE_BUFFER_SIZE = 1024;
+
+	//--------------------------------------------------------< member fields >
+
+    // config
+	protected String accessKey;
+	protected String secretKey;
+	protected String bucketName;
+	protected String objectPrefix;
+	
+	// S3 objects
+	protected S3Service s3service;
+	protected S3Bucket bucket;
+
+	// other
+    protected BundleBinding binding;
+    /** name of the workspace/version this PM handles (for debug logging) */
+	protected String name;
+    
+	//--------------------------------------------------------< config setter >
+
+	/**
+	 * Sets the Amazon access key of the AWS account (aka the user id) used
+	 * in the credentials for the API connection.
+	 * 
+	 * @param accessKey the plain access key string
+	 */
+	public void setAccessKey(String accessKey) {
+		this.accessKey = accessKey;
+	}
+
+	public String getAccessKey() {
+		return accessKey;
+	}
+
+	/**
+	 * Sets the Amazon secret key of the AWS account (aka the password) used
+	 * in the credentials for the API connection.
+	 * 
+	 * @param secretKey the plain secret key string
+	 */
+	public void setSecretKey(String secretKey) {
+		this.secretKey = secretKey;
+	}
+
+	public String getSecretKey() {
+		return secretKey;
+	}
+
+	/**
+	 * The bucket in which all data from this persistence manager will be
+	 * stored.
+	 * 
+	 * <p>
+	 * <b>Please note that buckets are global and you are advised to use you own
+	 * AWS access key as part of the bucket name to avoid conflicts!</b> For
+	 * example, use <code>AWS-KEY.jcrstore</code>, where AWS-KEY is replaced
+	 * by your own AWS access key. Also note that the number of buckets is by
+	 * default limited to 100 per AWS account.
+	 * </p>
+	 * 
+	 * <p>
+	 * If not specified, the default value will be your access key.
+	 * </p>
+	 * 
+	 * @param bucket
+	 *            a bucket name inside which this PM will store its data
+	 */
+	public void setBucket(String bucket) {
+		this.bucketName = bucket;
+	}
+
+	public String getBucket() {
+		return bucketName;
+	}
+
+	/**
+	 * This objectPrefix is applied to all objects that get created inside
+	 * the bucket and allows to have multiple workspaces in a single
+	 * bucket. Simply use "<code>${wsp.name}</code>" in the repository.xml
+	 * as placeholder for the workspace name or use eg.
+	 * "<code>version</code>" in the version persistence manager config.
+	 * 
+	 * <p>
+	 * It is recommended to put a slash at the end (eg.
+	 * "<code>${wsp.name}/</code>") to have better compatibility
+	 * with other tools that would see each workspace then as a
+	 * separate directory.
+	 * </p>
+	 *  
+	 * @param objectPrefix
+	 */
+	public void setObjectPrefix(String objectPrefix) {
+		this.objectPrefix = objectPrefix;
+	}
+
+	public String getObjectPrefix() {
+		return objectPrefix;
+	}
+	
+	//--------------------------------------------------------< lifecycle >
+
+	public void init(PMContext context) throws Exception {
+		super.init(context);
+
+		if (accessKey == null) {
+			accessKey = System.getProperty(AWS_ACCESS_KEY_PROP);
+			if (accessKey == null) {
+				throw new IllegalArgumentException("No accessKey specified for " +
+						this.getClass().getSimpleName() + " and no '" + AWS_ACCESS_KEY_PROP + "' " +
+						"system property set.");
+			}
+		}
+		if (secretKey == null) {
+			secretKey = System.getProperty(AWS_SECRET_KEY_PROP);
+			if (secretKey == null) {
+				throw new IllegalArgumentException("No secretKey specified for " +
+						this.getClass().getSimpleName() + " and no '" + AWS_SECRET_KEY_PROP + "' " +
+						"system property set.");
+			}
+		}
+		// set the default value for baseBucket
+		if (bucketName == null) {
+			bucketName = accessKey;
+		}
+		// ensure empty string
+		if (objectPrefix == null) {
+			objectPrefix = "";
+		}
+		
+		AWSCredentials credentials = new AWSCredentials(accessKey, secretKey);
+		
+		try {
+			// SOAP has constraints, is not recommended and it does not work
+			//s3service = new SoapS3Service(credentials, USER_AGENT_APPLICATION);
+			s3service = new RestS3Service(credentials, USER_AGENT_APPLICATION, null);
+			if (!s3service.isBucketAccessible(bucketName)) {
+				bucket = s3service.createBucket(bucketName);
+			} else {
+				bucket = new S3Bucket(bucketName);
+			}
+		} catch (S3ServiceException e) {
+			throw new AmazonS3Exception(credentials, bucketName, e);
+		}
+		
+        name = context.getHomeDir().getName();
+        
+        binding = new BundleBinding(
+        	new ErrorHandling(),
+        	
+        	// dummy implementation, never used due to min blob size set to MAX long
+        	new BLOBStore() {
+
+				public String createId(PropertyId id, int index) {
+					return null;
+				}
+	
+				public InputStream get(String blobId) throws Exception {
+					return null;
+				}
+	
+				public void put(String blobId, InputStream in, long size)
+						throws Exception {
+				}
+	
+				public boolean remove(String blobId) throws Exception {
+					return false;
+				}
+        	
+        	},
+        	getNsIndex(),
+        	getNameIndex()
+        );
+        
+        // never store in blobStore
+        binding.setMinBlobSize(Long.MAX_VALUE);
+	}
+
+	public void close() throws Exception {
+	}
+	
+	//--------------------------------------------------------< s3 helper >
+	
+	protected String getObjectKey(NodeId id) {
+		return objectPrefix + id.toString();
+	}
+
+	protected String getObjectKey(NodeReferencesId id) {
+		return objectPrefix + REFERENCES_PREFIX + id.getTargetId().toString();
+	}
+	
+	protected synchronized boolean containsObject(String key) throws ItemStateException {
+		try {
+	        log.debug("Looking for " + key + " [" + name + "]");
+			S3Object[] objects = s3service.listObjects(bucket, key, null);
+	        log.debug("Found " + objects.length + " while looking for " + key + " [" + name + "]");
+	        
+	        if (objects.length == 0) {
+	        	return false;
+	        }
+	        
+			// listObjects returns a list with all objects that match the prefix
+			for (int i = 0; i < objects.length; i++) {
+				S3Object obj = objects[i];
+				// so we have to find an exact match
+				if (obj.getKey().equals(key)) {
+			        log.debug("> Found " + key + " [" + name + "]");
+					return true;
+				}
+			}
+		} catch (S3ServiceException e) {
+			throw new ItemStateException("Cannot listObjects from S3 while " +
+					"checking for existence of '" + key + "' " +
+					"in bucket '" + bucket.getName() + "'. " +
+					AmazonS3Exception.formatS3ErrorMessage(e), e);
+		}
+        log.debug("> Did not find " + key + " [" + name + "]");
+		return false;
+	}
+
+	//--------------------------------------------------------< exists >
+	
+	protected boolean existsBundle(NodeId id) throws ItemStateException {
+		return containsObject(getObjectKey(id));
+	}
+
+	public boolean exists(NodeReferencesId targetId) throws ItemStateException {
+		return containsObject(getObjectKey(targetId));
+	}
+
+	//--------------------------------------------------------< load >
+	
+	protected synchronized NodePropBundle loadBundle(NodeId id) throws ItemStateException {
+		String key = getObjectKey(id);
+		
+		S3Object object;
+		try {
+			object = s3service.getObject(bucket, key);
+		} catch (S3ServiceException e) {
+			// object does not exist
+	        log.debug("Not found " + key + " [" + name + "]");
+	        //Thread.dumpStack();
+			return null;
+		}
+
+        log.debug("Read " + key + " [" + name + "]");
+        
+		try {
+	        InputStream is = object.getDataInputStream();
+	        
+	        // buffering bundle data: https://issues.apache.org/jira/browse/JCR-1039
+//            byte[] bytes = new byte[(int) object.getContentLength()];
+//            int read, pos = 0;
+//            while ((read = is.read(bytes, pos, bytes.length - pos)) > 0) {
+//                pos += read;
+//            }
+//            DataInputStream din = new DataInputStream(new ByteArrayInputStream(bytes));
+//            NodePropBundle bundle = binding.readBundle(din, id);
+            
+	        // direct input stream access
+            NodePropBundle bundle = binding.readBundle(new DataInputStream(is), id);
+            
+            bundle.setSize(object.getContentLength());
+            
+            return bundle;
+        } catch (Exception e) {
+			throw new ItemStateException("Error while reading NodePropBundle from S3, " +
+					"bucket '" + bucket.getName() + "', " +
+					"key prefix '" + key + "'. (" + e.getClass().getName() + " was: " + e.getMessage() + ")", e);
+        }
+	}
+
+	public synchronized NodeReferences load(NodeReferencesId targetId)
+			throws NoSuchItemStateException, ItemStateException {
+		String key = getObjectKey(targetId);
+		
+		S3Object object;
+		try {
+	        log.debug("Reading " + key + " [" + name + "]");
+			object = s3service.getObject(bucket, key);
+		} catch (S3ServiceException e) {
+			throw new NoSuchItemStateException("'" + targetId.toString() + "' " +
+					"Cannot find object representing NodeReferences in S3, " +
+					"bucket '" + bucket.getName() + "', " +
+					"key '" + key + "'. " +
+					AmazonS3Exception.formatS3ErrorMessage(e), e);
+		}
+		
+		try {
+            NodeReferences refs = new NodeReferences(targetId);
+
+			int i = 1;
+			while (true) {
+				String name = METADATA_REF + "[" + i + "]";
+				if (!object.containsMetadata(name)) {
+					break;
+				}
+                refs.addReference(PropertyId.valueOf((String) object.getMetadata(name)));
+			}
+			return refs;
+		} catch (Exception e) {
+			throw new ItemStateException("Error while parsing NodeReferences values from S3, " +
+					"bucket '" + bucket.getName() + "', " +
+					"key '" + key + "'. (" + e.getClass().getName() + " was: " + e.getMessage() + ")", e);
+		}
+	}
+	
+	//--------------------------------------------------------< store >
+	
+	protected synchronized void storeBundle(NodePropBundle bundle) throws ItemStateException {
+		String key = getObjectKey(bundle.getId());
+		S3Object object = new S3Object(key);
+		
+        try {
+            ByteArrayOutputStream out = new ByteArrayOutputStream(INITIAL_BUNDLE_BUFFER_SIZE);
+            DataOutputStream dout = new DataOutputStream(out);
+            binding.writeBundle(dout, bundle);
+            dout.close();
+            
+            object.setContentLength(out.size());
+            object.setContentType("application/octet-stream");
+			object.setDataInputStream(new ByteArrayInputStream(out.toByteArray()));
+			
+        } catch (Exception e) {
+			throw new ItemStateException("Cannot store NodePropBundle into S3, " +
+					"bucket '" + bucket.getName() + "', " +
+					"key '" + object.getKey() + "'. (" + e.getClass().getName() + " was: " + e.getMessage() + ")", e);
+        }
+		
+		try {
+	        log.debug("Storing " + object.getKey() + " [" + name + "]");
+			s3service.putObject(bucket, object);
+			
+		} catch (S3ServiceException e) {
+			throw new ItemStateException("Cannot store NodePropBundle into S3, " +
+					"bucket '" + bucket.getName() + "', " +
+					"key '" + object.getKey() + "'. " +
+					AmazonS3Exception.formatS3ErrorMessage(e), e);
+		}
+	}
+
+	protected synchronized void store(NodeReferences refs) throws ItemStateException {
+		String key = getObjectKey(refs.getId());
+		S3Object object = new S3Object(key);
+		
+		// TODO: store refs in object value as plain text list as well
+        Collection c = refs.getReferences();
+        int i=1;
+        for (Iterator iter = c.iterator(); iter.hasNext();) {
+            PropertyId propId = (PropertyId) iter.next();
+    		object.addMetadata(METADATA_REF + "[" + i + "]", propId.toString());
+            i++;
+        }
+			
+		try {
+	        log.debug("Storing " + object.getKey() + " [" + name + "]");
+			s3service.putObject(bucket, object);
+		} catch (S3ServiceException e) {
+			throw new ItemStateException("Cannot write NodeReferences into S3, " +
+					"bucket '" + bucket.getName() + "', " +
+					"key '" + object.getKey() + "'. " +
+					AmazonS3Exception.formatS3ErrorMessage(e), e);
+		}
+	}
+
+	//--------------------------------------------------------< destory >
+	
+	protected synchronized void destroyBundle(NodePropBundle bundle)
+			throws ItemStateException {
+		String key = getObjectKey(bundle.getId());
+		try {
+	        log.debug("Deleting " + key + " [" + name + "]");
+			s3service.deleteObject(bucket, key);
+		} catch (S3ServiceException e) {
+			throw new ItemStateException("Cannot deleteObject NodePropBundle from S3, " +
+					"bucket '" + bucket.getName() + "', " +
+					"key '" + key + "'. " +
+					AmazonS3Exception.formatS3ErrorMessage(e), e);
+		}
+	}
+	
+	protected synchronized void destroy(NodeReferences refs) throws ItemStateException {
+		String key = getObjectKey(refs.getId());
+		try {
+	        log.debug("Deleting " + key + " [" + name + "]");
+			s3service.deleteObject(bucket, key);
+		} catch (S3ServiceException e) {
+			throw new ItemStateException("Cannot deleteObject NodeReferences from S3, " +
+					"bucket '" + bucket.getName() + "', " +
+					"key '" + key + "'. " +
+					AmazonS3Exception.formatS3ErrorMessage(e), e);
+		}
+	}
+
+}

Propchange: jackrabbit/sandbox/jackrabbit-amazon/src/main/java/org/apache/jackrabbit/persistence/amazon/AmazonS3BundlePersistenceManager.java
------------------------------------------------------------------------------
    svn:eol-style = native

Added: jackrabbit/sandbox/jackrabbit-amazon/src/main/java/org/apache/jackrabbit/persistence/amazon/AmazonS3PersistenceManager.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/jackrabbit-amazon/src/main/java/org/apache/jackrabbit/persistence/amazon/AmazonS3PersistenceManager.java?rev=691635&view=auto
==============================================================================
--- jackrabbit/sandbox/jackrabbit-amazon/src/main/java/org/apache/jackrabbit/persistence/amazon/AmazonS3PersistenceManager.java (added)
+++ jackrabbit/sandbox/jackrabbit-amazon/src/main/java/org/apache/jackrabbit/persistence/amazon/AmazonS3PersistenceManager.java Wed Sep  3 08:29:07 2008
@@ -0,0 +1,814 @@
+package org.apache.jackrabbit.persistence.amazon;
+
+import java.io.BufferedReader;
+import java.io.ByteArrayInputStream;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.Reader;
+import java.io.StringReader;
+import java.io.StringWriter;
+import java.io.UnsupportedEncodingException;
+import java.net.URLDecoder;
+import java.net.URLEncoder;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+import javax.jcr.PropertyType;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.jackrabbit.core.NodeId;
+import org.apache.jackrabbit.core.PropertyId;
+import org.apache.jackrabbit.core.nodetype.NodeDefId;
+import org.apache.jackrabbit.core.nodetype.PropDefId;
+import org.apache.jackrabbit.core.persistence.AbstractPersistenceManager;
+import org.apache.jackrabbit.core.persistence.PMContext;
+import org.apache.jackrabbit.core.state.ItemStateException;
+import org.apache.jackrabbit.core.state.NoSuchItemStateException;
+import org.apache.jackrabbit.core.state.NodeReferences;
+import org.apache.jackrabbit.core.state.NodeReferencesId;
+import org.apache.jackrabbit.core.state.NodeState;
+import org.apache.jackrabbit.core.state.PropertyState;
+import org.apache.jackrabbit.core.value.BLOBFileValue;
+import org.apache.jackrabbit.core.value.InternalValue;
+import org.apache.jackrabbit.name.QName;
+import org.apache.jackrabbit.persistence.util.AmazonS3Exception;
+import org.apache.jackrabbit.uuid.UUID;
+import org.jets3t.service.S3Service;
+import org.jets3t.service.S3ServiceException;
+import org.jets3t.service.impl.rest.httpclient.RestS3Service;
+import org.jets3t.service.model.S3Bucket;
+import org.jets3t.service.model.S3Object;
+import org.jets3t.service.security.AWSCredentials;
+
+/**
+ * <b>NOTE: this class is not fully tested. Please use
+ * AmazonS3BundlePersistenceManager instead.</b>
+ * 
+ * <p>
+ * <code>AmazonS3PersistenceManager</code> is a Jackrabbit persistence manager
+ * that stores the data in <a href="http://www.amazon.com/s3">Amazon S3</a>.
+ * All you need is an S3 account (which costs money, depending on size of the
+ * stored data and the used upload/download bandwidth).
+ * 
+ * <p>
+ * Note that the performance might be disappointing, as Jackrabbit will make
+ * many fine-granular calls to the persistence manager and each will result in
+ * an HTTP call to S3, which can be slow.
+ * </p>
+ * 
+ * <h4>Configuration parameters:</h4>
+ * 
+ * <p>
+ * To configure this persistence manager, you will have to provide your Amazon
+ * AWS credentials, namely access key and secret key, the bucket name to choose
+ * as well as the prefix for the object names, typically containing workspace
+ * names.
+ * </p>
+ * 
+ * <dl>
+ * <dt>accessKey</dt>
+ * <dd>Amazon AWS access key (aka account user id) [required]</dd>
+ * 
+ * <dt>secretKey</dt>
+ * <dd>Amazon AWS secret key (aka account password) [required]</dd>
+ * 
+ * <dt>bucket</dt>
+ * <dd>Name of the S3 bucket to use [optional, default uses accessKey] Note
+ * that bucket names are global, so using your accessKey is recommended to
+ * prevent conflicts with other AWS users.</dd>
+ * 
+ * <dt>objectPrefix</dt>
+ * <dd>Prefix used for all object names [optional, default is ""] Should
+ * include the workspace name ("<code>${wsp.name}</code>" or "<code>version</code>"
+ * for the versioning PM) to put multiple workspaces into one bucket.</dd>
+ * </dl>
+ * 
+ * <h4>Example XML Config:</h4>
+ * 
+ * <pre>
+ * &lt;PersistenceManager class=&quot;org.apache.jackrabbit.persistence.amazon.AmazonS3PersistenceManager&quot;&gt;
+ *     &lt;param name=&quot;accessKey&quot;    value=&quot;abcde01234&quot;/&gt;
+ *     &lt;param name=&quot;secretKey&quot;    value=&quot;topsecret&quot;/&gt;
+ *     &lt;param name=&quot;bucket&quot;       value=&quot;abcde01234.jcrstore&quot;/&gt;
+ *     &lt;param name=&quot;objectPrefix&quot; value=&quot;${wsp.name}/&quot;/&gt;
+ * &lt;/PersistenceManager&gt;
+ * </pre>
+ * 
+ * <h4>Technical implementation - Mapping JCR to S3:</h4>
+ * 
+ * <p>
+ * JCR has Nodes and Properties, whereas S3 has Buckets and Objects. There is a
+ * maximum of 100 buckets per account, that's why everything should be in one
+ * bucket, even multiple workspaces. For putting multiple workspaces into one
+ * bucket, there can be a objectPrefix for all objects.
+ * </p>
+ * 
+ * <b>Nodes:</b>
+ * <ul>
+ * <li>Node => <b>Object</b> with UUID as name "<code>&lt;objectPrefix&gt;/&lt;nodeUUID&gt;</code>"
+ * </li>
+ * <li> Node metadata (type, mixins, etc.) => Metadata of that Object
+ * <ul>
+ * <li>parent node uuid => "<code>parentUUID</code>"</li>
+ * <li>node definition id => "<code>nodeDefinitionID</code>"</li>
+ * <li>mixins => "<code>mixin[1]</code>", "<code>mixin[2]</code>"
+ * (starting at one like same-name node siblings)</li>
+ * <li>modification count => "<code>modCount</code>"</li>
+ * <li>child nodes => "<code>&lt;childname&gt;</code>" (with uuid as value)</li>
+ * <li>same name siblings child nodes => "<code>&lt;childname&gt;[1]</code>", "<code>&lt;childname&gt;[2]</code>"</li>
+ * </ul>
+ * </li>
+ * </ul>
+ * 
+ * <b>Properties:</b>
+ * <ul>
+ * <li>Property => <b>Object</b> named "<code>&lt;objectPrefix&gt;/&lt;parentNodeUUID&gt;/&lt;propertyname&gt;</code>"
+ * </li>
+ * <li> Multi value Property => Objects named "<code>&lt;objectPrefix&gt;/&lt;parentNodeUUID&gt;/&lt;propertyname&gt;[i]</code>"
+ * </li>
+ * <li> values: binary directly, other types converted to String and then stored
+ * as UTF-8 encoded </li>
+ * <li> Property metadata => Metadata of that Object
+ * <ul>
+ * <li>property type => "<code>type</code>"</li>
+ * <li>property definition id => "<code>propertyDefinitionID</code>"</li>
+ * <li>modification count => "<code>modCount</code>"</li>
+ * </ul>
+ * </li>
+ * </ul>
+ * 
+ * <b>Node references:</b>
+ * <ul>
+ * <li>Node references => <b>Object</b> named "<code>&lt;objectPrefix&gt;/references/&lt;nodeUUID&gt;</code>"
+ * with all the reference UUIDs as metadata => "<code>refBy[1]</code>", "<code>refBy[2]</code>"
+ * </li>
+ * </ul>
+ * 
+ * @author <a href="mailto:alexander dot klimetschek at googlemail dot com">
+ *         Alexander Klimetschek</a>
+ * 
+ */
+public class AmazonS3PersistenceManager extends AbstractPersistenceManager {
+	
+	public static Log log = LogFactory.getLog(AmazonS3PersistenceManager.class);
+
+	public static String USER_AGENT_APPLICATION = "Jackrabbit/1.3";
+	
+	public static final String AWS_ACCESS_KEY_PROP = "aws.accessKey";
+	public static final String AWS_SECRET_KEY_PROP = "aws.secretKey";
+
+	public static final String REFERENCES_PREFIX = "references/";
+	
+    protected static final String ENCODING = "UTF-8";
+    
+	// NOTE: all metadata keys must be lower-case
+	public static final String METADATA_PARENT_UUID            = "parentuuid";
+	public static final String METADATA_NODE_DEFINITION_ID     = "nodedefinitionid";
+	public static final String METADATA_MIXIN                  = "mixin";
+	public static final String METADATA_MOD_COUNT              = "modcount";
+	public static final String METADATA_TYPE                   = "type";
+	public static final String METADATA_PROPERTY_DEFINITION_ID = "propdefinitionid";
+	public static final String METADATA_REF                    = "refby";
+//	public static final String METADATA_CHILD_NODE_PREFIX      = "/";
+//	public static final String METADATA_CHILD_PROP_PREFIX      = "*";
+
+	protected String accessKey;
+	protected String secretKey;
+	protected String bucketName;
+	protected String objectPrefix;
+	
+	protected S3Service s3service;
+	protected S3Bucket bucket;
+
+	/**
+	 * Sets the Amazon access key of the AWS account (aka the user id) used
+	 * in the credentials for the API connection.
+	 * 
+	 * @param accessKey the plain access key string
+	 */
+	public void setAccessKey(String accessKey) {
+		this.accessKey = accessKey;
+	}
+
+	public String getAccessKey() {
+		return accessKey;
+	}
+
+	/**
+	 * Sets the Amazon secret key of the AWS account (aka the password) used
+	 * in the credentials for the API connection.
+	 * 
+	 * @param secretKey the plain secret key string
+	 */
+	public void setSecretKey(String secretKey) {
+		this.secretKey = secretKey;
+	}
+
+	public String getSecretKey() {
+		return secretKey;
+	}
+
+	/**
+	 * The bucket in which all data from this persistence manager will be
+	 * stored.
+	 * 
+	 * <p>
+	 * <b>Please note that buckets are global and you are advised to use you own
+	 * AWS access key as part of the bucket name to avoid conflicts!</b> For
+	 * example, use <code>AWS-KEY.jcrstore</code>, where AWS-KEY is replaced
+	 * by your own AWS access key. Also note that the number of buckets is by
+	 * default limited to 100 per AWS account.
+	 * </p>
+	 * 
+	 * <p>
+	 * If not specified, the default value will be your access key.
+	 * </p>
+	 * 
+	 * @param bucket
+	 *            a bucket name inside which this PM will store its data
+	 */
+	public void setBucket(String bucket) {
+		this.bucketName = bucket;
+	}
+
+	public String getBucket() {
+		return bucketName;
+	}
+
+	/**
+	 * This objectPrefix is applied to all objects that get created inside
+	 * the bucket and allows to have multiple workspaces in a single
+	 * bucket. Simply use "<code>${wsp.name}</code>" in the repository.xml
+	 * as placeholder for the workspace name or use eg.
+	 * "<code>version</code>" in the version persistence manager config.
+	 * 
+	 * <p>
+	 * It is recommended to put a slash at the end (eg.
+	 * "<code>${wsp.name}/</code>") to have better compatibility
+	 * with other tools that would see each workspace then as a
+	 * separate directory.
+	 * </p>
+	 *  
+	 * @param objectPrefix
+	 */
+	public void setObjectPrefix(String objectPrefix) {
+		this.objectPrefix = objectPrefix;
+	}
+
+	public String getObjectPrefix() {
+		return objectPrefix;
+	}
+	
+	//--------------------------------------------------------< lifecycle >
+
+	public void init(PMContext context) throws Exception {
+		if (accessKey == null) {
+			accessKey = System.getProperty(AWS_ACCESS_KEY_PROP);
+			if (accessKey == null) {
+				throw new IllegalArgumentException("No accessKey specified for " +
+						this.getClass().getSimpleName() + " and no '" + AWS_ACCESS_KEY_PROP + "' " +
+						"system property set.");
+			}
+		}
+		if (secretKey == null) {
+			secretKey = System.getProperty(AWS_SECRET_KEY_PROP);
+			if (secretKey == null) {
+				throw new IllegalArgumentException("No secretKey specified for " +
+						this.getClass().getSimpleName() + " and no '" + AWS_SECRET_KEY_PROP + "' " +
+						"system property set.");
+			}
+		}
+		// set the default value for baseBucket
+		if (bucketName == null) {
+			bucketName = accessKey;
+		}
+		// ensure empty string
+		if (objectPrefix == null) {
+			objectPrefix = "";
+		}
+		
+		AWSCredentials credentials = new AWSCredentials(accessKey, secretKey);
+		
+		try {
+			// SOAP has constraints, is not recommended and it does not work
+			//s3service = new SoapS3Service(credentials, USER_AGENT_APPLICATION);
+			s3service = new RestS3Service(credentials, USER_AGENT_APPLICATION, null);
+			if (!s3service.isBucketAccessible(bucketName)) {
+				bucket = s3service.createBucket(bucketName);
+			} else {
+				bucket = new S3Bucket(bucketName);
+			}
+		} catch (S3ServiceException e) {
+			throw new AmazonS3Exception(credentials, bucketName, e);
+		}
+	}
+
+	public void close() throws Exception {
+	}
+	
+	//--------------------------------------------------------< s3 helper >
+
+	protected String getObjectKey(NodeId id) {
+		return objectPrefix + id.toString();
+	}
+
+	protected String getObjectKey(PropertyId id) {
+		return objectPrefix + id.toString();
+	}
+
+	protected String getObjectKey(NodeReferencesId id) {
+		return objectPrefix + REFERENCES_PREFIX + id.getTargetId().toString();
+	}
+
+	protected boolean containsObject(String key) throws ItemStateException {
+		try {
+			String keyEnc = URLEncoder.encode(key, ENCODING);
+			S3Object[] objects = s3service.listObjects(bucket, keyEnc, null);
+	        log.debug("Found " + objects.length + " while looking for " + key);
+	        
+	        if (objects.length == 0) {
+	        	return false;
+	        }
+	        
+			// listObjects returns a list with all objects that match the prefix
+			for (int i = 0; i < objects.length; i++) {
+				S3Object obj = objects[i];
+				// so we have to find an exact match
+				if (obj.getKey().equals(key)) {
+			        log.debug("> Found " + key);
+					return true;
+				}
+			}
+		} catch (S3ServiceException e) {
+			throw new ItemStateException("Cannot listObjects from S3 while " +
+					"checking for existence of '" + key + "' " +
+					"in bucket '" + bucket.getName() + "'. " +
+					AmazonS3Exception.formatS3ErrorMessage(e), e);
+		} catch (UnsupportedEncodingException e) {
+			throw new ItemStateException("Cannot listObjects from S3 while " +
+					"checking for existence of '" + key + "' " +
+					"in bucket '" + bucket.getName() + "'. (" + e.getClass().getName() + " was: " + e.getMessage() + ")", e);
+		}
+        log.debug("> Did not find " + key);
+		return false;
+	}
+
+	//--------------------------------------------------------< exists >
+
+	public synchronized boolean exists(NodeId id) throws ItemStateException {
+		return containsObject(getObjectKey(id));
+	}
+
+	public synchronized boolean exists(PropertyId id) throws ItemStateException {
+		return containsObject(getObjectKey(id));
+	}
+
+	public synchronized boolean exists(NodeReferencesId targetId) throws ItemStateException {
+		return containsObject(getObjectKey(targetId));
+	}
+
+	//--------------------------------------------------------< load >
+
+	public synchronized NodeState load(NodeId id) throws NoSuchItemStateException,
+			ItemStateException {
+		String key = getObjectKey(id);
+		
+		S3Object object;
+		try {
+			object = s3service.getObject(bucket, key);
+		} catch (S3ServiceException e) {
+			throw new NoSuchItemStateException("'" + id.toString() + "' " +
+					"Cannot find object representing Node in S3, " +
+					"bucket '" + bucket.getName() + "', " +
+					"key '" + key + "'. " +
+					AmazonS3Exception.formatS3ErrorMessage(e), e);
+		}
+		
+		try {
+			NodeState state = createNew(id);
+			String parentUUID = (String) object.getMetadata(METADATA_PARENT_UUID);
+			// parentUUID might be null (for root nodes)
+			if (parentUUID != null && !parentUUID.equals("")) {
+				state.setParentId(new NodeId(new UUID(parentUUID)));
+			}
+			state.setDefinitionId(NodeDefId.valueOf((String) object.getMetadata(METADATA_NODE_DEFINITION_ID)));
+	
+			List list = new ArrayList();
+			int i = 1;
+			while (true) {
+				String name = METADATA_MIXIN + "[" + i + "]";
+				if (!object.containsMetadata(name)) {
+					break;
+				}
+				list.add(object.getMetadata(name));
+			}
+			if (list.size() > 0) {
+				state.setMixinTypeNames(new HashSet(list));
+			}
+	
+	        state.setModCount(Short.parseShort((String) object.getMetadata(METADATA_MOD_COUNT)));
+	        
+	        // child nodes and properties
+	        InputStream is = object.getDataInputStream();
+	        BufferedReader reader = new BufferedReader(new InputStreamReader(is, ENCODING));
+	        String line;
+	        boolean passedChildNodes = false;
+	        while ((line = reader.readLine()) != null ) {
+	        	if (line.length() == 0) {
+	        		passedChildNodes = true;
+	        		continue;
+	        	}
+	        	if (passedChildNodes) {
+	        		// properties: "name"
+	        		state.addPropertyName(QName.valueOf(line));
+	        	} else {
+	        		// child node: "name=uuid"
+	        		String[] parts = line.split("=");
+	        		state.addChildNodeEntry(QName.valueOf(parts[0]), new NodeId(new UUID(parts[1])));
+	        	}
+	        }
+	        
+//	        Map metadata = object.getMetadataMap();
+//	        
+//	        // make sure keys are sorted for correct order of child nodes (same-name siblings)
+//	        list = new ArrayList(metadata.keySet());
+//	        Collections.sort(list);
+//	        
+//	        // find properties (objects named 'nodekey/name')
+//	        for (Iterator iter = list.iterator(); iter.hasNext(); ) {
+//        		// decode URL (was name of HTTP header)
+//	        	String mkey = URLDecoder.decode((String) iter.next(), ENCODING);
+//	        	// filter out
+//	        	if (mkey.startsWith(METADATA_CHILD_PROP_PREFIX)) {
+//	        		//String uuid = (String) metadata.get(mkey);
+//	        		// remove "/" at the beginning
+//	        		String propName = mkey.substring(METADATA_CHILD_PROP_PREFIX.length());
+//	        		
+//	        		log.debug("Found child property " + propName + " for Node " + id.toString());
+//	        		
+//		            state.addPropertyName(QName.valueOf(propName));
+//	        	}
+//	        }
+//	        
+//	        // find child nodes (list of name/uuid pairs in metadata, starting with "/")
+//	        for (Iterator iter = list.iterator(); iter.hasNext(); ) {
+//        		// decode URL (was name of HTTP header)
+//	        	String mkey = (String) iter.next();
+//        		Object valueObj = metadata.get(mkey);
+//        		mkey = URLDecoder.decode(mkey, ENCODING);
+//	        	// filter out
+//	        	if (mkey.startsWith(METADATA_CHILD_NODE_PREFIX)) {
+//	        		// remove "/" at the beginning
+//	        		String nodeName = mkey.substring(METADATA_CHILD_NODE_PREFIX.length());
+////	        		// parse index at the end
+////	        		String indexStr = nodeName.substring(nodeName.indexOf('[')+1, nodeName.indexOf(']'));
+////	        		int index = Integer.parseInt(indexStr);
+//	        		nodeName = nodeName.substring(0, nodeName.indexOf('['));
+//	        		
+//	        		log.debug("Found child node " + nodeName + " for Node " + id.toString());
+//	        		
+//	        		String uuid = (String) valueObj;
+//		            state.addChildNodeEntry(QName.valueOf(nodeName), new NodeId(new UUID(uuid)));
+//	        	}
+//	        }
+	        
+			return state;
+			
+		} catch (Exception e) {
+			e.printStackTrace();
+			throw new ItemStateException("Error while parsing Node metadata values from S3, " +
+					"bucket '" + bucket.getName() + "', " +
+					"key '" + key + "'. (" + e.getClass().getName() + " was: " + e.getMessage() + ")", e);
+		}
+	}
+
+	public synchronized PropertyState load(PropertyId id) throws NoSuchItemStateException,
+			ItemStateException {
+		String key = getObjectKey(id);
+		
+		PropertyState state = createNew(id);
+		
+		S3Object[] objects;
+		try {
+			objects = s3service.listObjects(bucket, key, null);
+		} catch (S3ServiceException e) {
+			throw new NoSuchItemStateException("'" + id.toString() + "' " +
+					"Cannot find object(s) representing Property in S3, " +
+					"bucket '" + bucket.getName() + "', " +
+					"key '" + key + "'. " +
+					AmazonS3Exception.formatS3ErrorMessage(e), e);
+		}
+		
+        state.setMultiValued(objects.length > 0);
+        InternalValue[] values = new InternalValue[objects.length];
+        
+		try {
+			for (int i = 0; i < objects.length; i++) {
+				S3Object object = objects[i];
+				if (i == 0) {
+					state.setType(Integer.parseInt((String) object.getMetadata(METADATA_TYPE)));
+					state.setDefinitionId(PropDefId.valueOf((String) object.getMetadata(METADATA_PROPERTY_DEFINITION_ID)));
+			        state.setModCount(Short.parseShort((String) object.getMetadata(METADATA_MOD_COUNT)));
+				}
+				// load property value
+	            InternalValue val;
+	            if (state.getType() == PropertyType.BINARY) {
+                    InputStream is = object.getDataInputStream();
+                    try {
+                        val = InternalValue.create(is, false);
+                    } finally {
+                        try {
+                            is.close();
+                        } catch (IOException e) {
+                            // ignore
+                        }
+                    }
+	            } else {
+	            	// read from input stream via DataInputStream
+	            	// -> byte[] -> String
+	                byte[] bytes = new byte[(int) object.getContentLength()];
+	                DataInputStream is = new DataInputStream(object.getDataInputStream());
+	                is.readFully(bytes);
+	                String s = new String(bytes, ENCODING);
+	                val = InternalValue.valueOf(s, state.getType());
+	            }
+	            values[i] = val;				
+			}
+		} catch (S3ServiceException e) {
+			throw new ItemStateException("Error while reading Property value(s) from S3, " +
+					"bucket '" + bucket.getName() + "', " +
+					"key prefix '" + key + "'. " +
+					AmazonS3Exception.formatS3ErrorMessage(e), e);
+		} catch (Exception e) {
+			throw new ItemStateException("Error while parsing Property value(s) from S3, " +
+					"bucket '" + bucket.getName() + "', " +
+					"key prefix '" + key + "'. (" + e.getClass().getName() + " was: " + e.getMessage() + ")", e);
+		}
+        state.setValues(values);
+        
+		return state;
+	}
+
+	public synchronized NodeReferences load(NodeReferencesId targetId)
+			throws NoSuchItemStateException, ItemStateException {
+		String key = getObjectKey(targetId);
+		
+		S3Object object;
+		try {
+			object = s3service.getObject(bucket, key);
+		} catch (S3ServiceException e) {
+			throw new NoSuchItemStateException("'" + targetId.toString() + "' " +
+					"Cannot find object representing NodeReferences in S3, " +
+					"bucket '" + bucket.getName() + "', " +
+					"key '" + key + "'. " +
+					AmazonS3Exception.formatS3ErrorMessage(e), e);
+		}
+		
+		try {
+            NodeReferences refs = new NodeReferences(targetId);
+
+			int i = 1;
+			while (true) {
+				String name = METADATA_REF + "[" + i + "]";
+				if (!object.containsMetadata(name)) {
+					break;
+				}
+                refs.addReference(PropertyId.valueOf((String) object.getMetadata(name)));
+			}
+			return refs;
+		} catch (Exception e) {
+			throw new ItemStateException("Error while parsing NodeReferences values from S3, " +
+					"bucket '" + bucket.getName() + "', " +
+					"key '" + key + "'. (" + e.getClass().getName() + " was: " + e.getMessage() + ")", e);
+		}
+	}
+
+	//--------------------------------------------------------< store >
+
+	protected synchronized void store(NodeState state) throws ItemStateException {
+		String key = getObjectKey(state.getNodeId());
+		S3Object object = new S3Object(key);
+		
+		if (state.getParentId() == null) {
+			object.removeMetadata(METADATA_PARENT_UUID);
+			//object.addMetadata(METADATA_PARENT_UUID, "");
+		} else {
+			object.addMetadata(METADATA_PARENT_UUID, state.getParentId().toString());
+		}
+		object.addMetadata(METADATA_NODE_DEFINITION_ID, state.getDefinitionId().toString());
+		int i=1;
+        for (Iterator iter = state.getMixinTypeNames().iterator(); iter.hasNext();) {
+    		object.addMetadata(METADATA_MIXIN + "[" + i + "]", iter.next().toString());
+    		i++;
+        }
+		object.addMetadata(METADATA_MOD_COUNT, Short.toString(state.getModCount()));
+		
+		// store properties and child nodes in text file that is stored as this object's value
+		StringBuffer propsAndNodes = new StringBuffer();
+		
+		// child nodes - one on each line: "name=uuid"
+        for (Iterator iter = state.getChildNodeEntries().iterator(); iter.hasNext();) {
+            NodeState.ChildNodeEntry entry = (NodeState.ChildNodeEntry) iter.next();
+            propsAndNodes.append(entry.getName() + "[" + entry.getIndex() + "]=" + entry.getId().toString() + "\n");
+        }
+        
+        // empty line as delimiter
+        propsAndNodes.append("\n");
+        
+		// properties - one on each line: "qname"
+        for (Iterator iter = state.getPropertyNames().iterator(); iter.hasNext();) {
+            QName propName = (QName) iter.next();
+			propsAndNodes.append(propName.toString() + "\n");
+        }
+        
+		try {
+			// convert StringBuffer into InputStream and store in s3object
+			object.setDataInputStream(new ByteArrayInputStream(propsAndNodes.toString().getBytes(ENCODING)));
+		} catch (UnsupportedEncodingException e) {
+			throw new ItemStateException("Cannot putObject Node into S3, " +
+					"bucket '" + bucket.getName() + "', " +
+					"key '" + object.getKey() + "'. (" + e.getClass().getName() + " was: " + e.getMessage() + ")", e);
+		}
+		
+//		// child property names
+//        for (Iterator iter = state.getPropertyNames().iterator(); iter.hasNext();) {
+//            QName propName = (QName) iter.next();
+//            
+//    		try {
+//    			// add "/" for distinguishing child node metadata
+//    			// URL encode metadata key because it becomes an HTTP header name
+//    			String mKey = URLEncoder.encode(METADATA_CHILD_PROP_PREFIX + propName.toString(), ENCODING);
+//    			// store dummy as value
+//				object.addMetadata(mKey, "property");
+//			} catch (UnsupportedEncodingException e) {
+//				throw new ItemStateException("Cannot putObject Node into S3, " +
+//						"bucket '" + bucket.getName() + "', " +
+//						"key '" + object.getKey() + "'. (" + e.getClass().getName() + " was: " + e.getMessage() + ")", e);
+//			}
+//        }
+//		
+//		// child nodes
+//        for (Iterator iter = state.getChildNodeEntries().iterator(); iter.hasNext();) {
+//            NodeState.ChildNodeEntry entry = (NodeState.ChildNodeEntry) iter.next();
+//            
+//    		try {
+//    			// add "/" for distinguishing child node metadata
+//    			// URL encode metadata key because it becomes an HTTP header name
+//    			String mKey = URLEncoder.encode(METADATA_CHILD_NODE_PREFIX + entry.getName() + "[" + entry.getIndex() + "]", ENCODING);
+//    			// store uuid as value
+//				object.addMetadata(mKey, entry.getId().toString());
+//			} catch (UnsupportedEncodingException e) {
+//				throw new ItemStateException("Cannot putObject Node into S3, " +
+//						"bucket '" + bucket.getName() + "', " +
+//						"key '" + object.getKey() + "'. (" + e.getClass().getName() + " was: " + e.getMessage() + ")", e);
+//			}
+//        }
+			
+		try {
+	        log.debug("Storing " + object.getKey());
+	        for (Iterator iter = state.getChildNodeEntries().iterator(); iter.hasNext();) {
+	            NodeState.ChildNodeEntry entry = (NodeState.ChildNodeEntry) iter.next();
+	        	log.debug("> Child node " + entry.getName() + " => " + entry.getId().toString());
+	        }
+	        for (Iterator iter = state.getPropertyNames().iterator(); iter.hasNext();) {
+	            QName propName = (QName) iter.next();
+	        	log.debug("> Property " + propName.toString());
+	        }
+	        
+			s3service.putObject(bucket, object);
+			
+		} catch (S3ServiceException e) {
+			throw new ItemStateException("Cannot putObject Node into S3, " +
+					"bucket '" + bucket.getName() + "', " +
+					"key '" + object.getKey() + "'. " +
+					AmazonS3Exception.formatS3ErrorMessage(e), e);
+		}
+	}
+
+	protected synchronized void store(PropertyState state) throws ItemStateException {
+		String keyPrefix = getObjectKey(state.getPropertyId());
+		
+		String type = Integer.toString(state.getType());
+		String propDefID = state.getDefinitionId().toString();
+		String modCount = Short.toString(state.getModCount());
+
+		// one S3 object for each property value
+        InternalValue[] values = state.getValues();
+        for (int i = 0; i < values.length; i++) {
+        	String key = keyPrefix;
+        	if (values.length > 0) {
+        		key = keyPrefix + "[" + i + "]";
+        	}
+        	
+			S3Object object = new S3Object(key);
+			// all objects contain the same property metadata
+			object.addMetadata(METADATA_TYPE, type);
+			object.addMetadata(METADATA_PROPERTY_DEFINITION_ID, propDefID);
+			object.addMetadata(METADATA_MOD_COUNT, modCount);
+			
+            try {
+	            InternalValue val = values[i];
+	            if (state.getType() == PropertyType.BINARY) {
+	                BLOBFileValue blobVal = (BLOBFileValue) val.internalValue();
+	                object.setContentType("binary/octet-stream");
+	                object.setContentLength(blobVal.getLength());
+					object.setDataInputStream(blobVal.getStream());
+	                // NOTE: maybe setContentType etc. here to senseful values
+	                blobVal.discard();
+	            } else {
+	            	ByteArrayInputStream is = new ByteArrayInputStream(val.toString().getBytes(ENCODING));
+	                object.setContentType("text/plain");
+	            	object.setContentLength(is.available());
+	            	object.setDataInputStream(is);
+	            }
+			} catch (Exception e) {
+				throw new ItemStateException("Cannot read Property value for " +
+						"property '" + object.getKey() + "'. (" + e.getClass().getName() + " was: " + e.getMessage() + ")", e);
+			}
+		
+    		try {
+    	        log.debug("Storing " + object.getKey());
+				s3service.putObject(bucket, object);
+			} catch (S3ServiceException e) {
+				throw new ItemStateException("Cannot write Property into S3, " +
+						"bucket '" + bucket.getName() + "', " +
+						"key '" + object.getKey() + "'. " +
+						AmazonS3Exception.formatS3ErrorMessage(e), e);
+			}
+        }
+	}
+
+	protected synchronized void store(NodeReferences refs) throws ItemStateException {
+		String key = getObjectKey(refs.getId());
+		S3Object object = new S3Object(key);
+		
+		// TODO: store refs in object value as plain text list as well
+        Collection c = refs.getReferences();
+        int i=1;
+        for (Iterator iter = c.iterator(); iter.hasNext();) {
+            PropertyId propId = (PropertyId) iter.next();
+    		object.addMetadata(METADATA_REF + "[" + i + "]", propId.toString());
+            i++;
+        }
+			
+		try {
+	        log.debug("Storing " + object.getKey());
+			s3service.putObject(bucket, object);
+		} catch (S3ServiceException e) {
+			throw new ItemStateException("Cannot write NodeReferences into S3, " +
+					"bucket '" + bucket.getName() + "', " +
+					"key '" + object.getKey() + "'. " +
+					AmazonS3Exception.formatS3ErrorMessage(e), e);
+		}
+	}
+	
+	//--------------------------------------------------------< destroy >
+
+	protected synchronized void destroy(NodeState state) throws ItemStateException {
+		String key = getObjectKey(state.getNodeId());
+		try {
+	        log.debug("Deleting " + key);
+			s3service.deleteObject(bucket, key);
+		} catch (S3ServiceException e) {
+			throw new ItemStateException("Cannot deleteObject Node from S3, " +
+					"bucket '" + bucket.getName() + "', " +
+					"key '" + key + "'. " +
+					AmazonS3Exception.formatS3ErrorMessage(e), e);
+		}
+	}
+
+	protected synchronized void destroy(PropertyState state) throws ItemStateException {
+		String key = getObjectKey(state.getPropertyId());
+		try {
+	        log.debug("Deleting " + key);
+			s3service.deleteObject(bucket, key);
+		} catch (S3ServiceException e) {
+			throw new ItemStateException("Cannot deleteObject Property from S3, " +
+					"bucket '" + bucket.getName() + "', " +
+					"key '" + key + "'. " +
+					AmazonS3Exception.formatS3ErrorMessage(e), e);
+		}
+	}
+
+	protected synchronized void destroy(NodeReferences refs) throws ItemStateException {
+		String key = getObjectKey(refs.getId());
+		try {
+	        log.debug("Deleting " + key);
+			s3service.deleteObject(bucket, key);
+		} catch (S3ServiceException e) {
+			throw new ItemStateException("Cannot deleteObject NodeReferences from S3, " +
+					"bucket '" + bucket.getName() + "', " +
+					"key '" + key + "'. " +
+					AmazonS3Exception.formatS3ErrorMessage(e), e);
+		}
+	}
+}

Propchange: jackrabbit/sandbox/jackrabbit-amazon/src/main/java/org/apache/jackrabbit/persistence/amazon/AmazonS3PersistenceManager.java
------------------------------------------------------------------------------
    svn:eol-style = native

Added: jackrabbit/sandbox/jackrabbit-amazon/src/main/java/org/apache/jackrabbit/persistence/util/AmazonS3Exception.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/jackrabbit-amazon/src/main/java/org/apache/jackrabbit/persistence/util/AmazonS3Exception.java?rev=691635&view=auto
==============================================================================
--- jackrabbit/sandbox/jackrabbit-amazon/src/main/java/org/apache/jackrabbit/persistence/util/AmazonS3Exception.java (added)
+++ jackrabbit/sandbox/jackrabbit-amazon/src/main/java/org/apache/jackrabbit/persistence/util/AmazonS3Exception.java Wed Sep  3 08:29:07 2008
@@ -0,0 +1,48 @@
+package org.apache.jackrabbit.persistence.util;
+
+import org.jets3t.service.S3Service;
+import org.jets3t.service.S3ServiceException;
+import org.jets3t.service.security.AWSCredentials;
+
+/**
+ * Exception for AmazonS3 persistence / file system implementations.
+ * Helps to format the underlying {@link S3ServiceException} and provide
+ * the helpful error details.
+ * 
+ * @author <a href="mailto:alexander dot klimetschek at googlemail dot com">
+ *         Alexander Klimetschek</a>
+ *
+ */
+public class AmazonS3Exception extends Exception {
+
+	private static final long serialVersionUID = -8573644884544233300L;
+	
+	/**
+	 * Helper method that formats all the interesting details from
+	 * the {@link S3ServiceException} into a single string that can be
+	 * appended to error messages.
+	 */
+	public static String formatS3ErrorMessage(S3ServiceException e) {
+		return "(S3ServiceException was '" + e.getMessage() + "', " +
+							 "errorCode '" + e.getS3ErrorCode() +"', " +
+							 "message '"   + e.getS3ErrorMessage() +"', " +
+							 "hostID '"    + e.getS3ErrorHostId() +"', " +
+							 "requestID '" + e.getS3ErrorRequestId() +"')";
+	}
+	
+	public static String formatBucketErrorMessage(AWSCredentials credentials, String bucketName, S3ServiceException e) {
+		return "'" + e.getS3ErrorMessage() + "' when accessing bucket '" + bucketName + "' " +
+			"at '" + S3Service.getS3EndpointHost() + "' with accessKey='" + credentials.getAccessKey() + "'. " +
+			formatS3ErrorMessage(e);
+	}
+	
+	/**
+	 * For errors when creating/accessing a bucket (ie. the first "login").
+	 * @param bucketName name of the bucket that was requested
+	 * @param cause underlying exception from jets3t library
+	 */
+	public AmazonS3Exception(AWSCredentials credentials, String bucketName, S3ServiceException cause) {
+		super(formatBucketErrorMessage(credentials, bucketName, cause), cause);
+	}
+
+}

Propchange: jackrabbit/sandbox/jackrabbit-amazon/src/main/java/org/apache/jackrabbit/persistence/util/AmazonS3Exception.java
------------------------------------------------------------------------------
    svn:eol-style = native

Added: jackrabbit/sandbox/jackrabbit-amazon/src/test/java/org/apache/jackrabbit/test/amazon/init/TestAll.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/jackrabbit-amazon/src/test/java/org/apache/jackrabbit/test/amazon/init/TestAll.java?rev=691635&view=auto
==============================================================================
--- jackrabbit/sandbox/jackrabbit-amazon/src/test/java/org/apache/jackrabbit/test/amazon/init/TestAll.java (added)
+++ jackrabbit/sandbox/jackrabbit-amazon/src/test/java/org/apache/jackrabbit/test/amazon/init/TestAll.java Wed Sep  3 08:29:07 2008
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.test.amazon.init;
+
+import junit.framework.Test;
+import junit.framework.TestCase;
+
+/**
+ * Collects all test classes setting up test data in the workspace. This test
+ * data is specific to Jackrabbit and will probably not work in other
+ * implementations.
+ */
+public class TestAll extends TestCase {
+
+    /**
+     * Returns a <code>Test</code> suite that executes all tests inside this
+     * package.
+     *
+     * @return a <code>Test</code> suite that executes all tests inside this
+     *         package.
+     */
+    public static Test suite() {
+    	org.apache.jackrabbit.test.amazon.tck.TestAll.loadAWSCredentialProperties();
+    	
+    	return org.apache.jackrabbit.init.TestAll.suite();
+    }
+}

Propchange: jackrabbit/sandbox/jackrabbit-amazon/src/test/java/org/apache/jackrabbit/test/amazon/init/TestAll.java
------------------------------------------------------------------------------
    svn:eol-style = native

Added: jackrabbit/sandbox/jackrabbit-amazon/src/test/java/org/apache/jackrabbit/test/amazon/tck/TestAll.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/jackrabbit-amazon/src/test/java/org/apache/jackrabbit/test/amazon/tck/TestAll.java?rev=691635&view=auto
==============================================================================
--- jackrabbit/sandbox/jackrabbit-amazon/src/test/java/org/apache/jackrabbit/test/amazon/tck/TestAll.java (added)
+++ jackrabbit/sandbox/jackrabbit-amazon/src/test/java/org/apache/jackrabbit/test/amazon/tck/TestAll.java Wed Sep  3 08:29:07 2008
@@ -0,0 +1,83 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.test.amazon.tck;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+
+import junit.framework.Test;
+import junit.framework.TestCase;
+import junit.framework.TestSuite;
+
+import org.apache.jackrabbit.test.JCRTestSuite;
+
+/**
+ * Test suite that includes all test suites from jackrabbit-jcr-tests.
+ */
+public class TestAll extends TestCase {
+	
+	public static void printError() {
+		System.err.println();
+		System.err.println(">>>>> ERROR: AmazonS3 Test Suite:");
+		System.err.println(">>>>> File 'aws.properties' not found in working directory or incorrect.");
+		System.err.println(">>>>> It must contain aws.accessKey=<key> and aws.secrectKey=<topsecret> properties.");
+		System.err.println(">>>>> These keys require an Amazon AWS account. See http://www.amazon.com/s3");
+		System.err.println();
+		System.exit(-1);
+	}
+	
+	public static void loadAWSCredentialProperties() {
+        File awsCredentialsPropFile = new File("aws.properties");
+        if (awsCredentialsPropFile.exists()) {
+        	try {
+				System.getProperties().load(new FileInputStream(awsCredentialsPropFile));
+			} catch (IOException e) {
+				printError();
+			}
+            // check if set
+            if (System.getProperty("aws.accessKey") == null ||
+            		System.getProperty("aws.secretKey") == null) {
+            	printError();
+            }
+        } else {
+        	printError();
+        }
+	}
+
+	public static class JCRSubsetTestSuite extends TestSuite {
+
+	    public JCRSubsetTestSuite() {
+	        super("Selected JCR API tests");
+	        addTest(org.apache.jackrabbit.test.api.TestAll.suite());
+	        addTest(org.apache.jackrabbit.test.api.query.TestAll.suite());
+	        addTest(org.apache.jackrabbit.test.api.nodetype.TestAll.suite());
+	        addTest(org.apache.jackrabbit.test.api.util.TestAll.suite());
+	        addTest(org.apache.jackrabbit.test.api.lock.TestAll.suite());
+	        addTest(org.apache.jackrabbit.test.api.version.TestAll.suite());
+	        addTest(org.apache.jackrabbit.test.api.observation.TestAll.suite());
+	    }
+
+	}
+	
+    public static Test suite() {
+    	loadAWSCredentialProperties();
+    	
+        return new JCRSubsetTestSuite();
+    }
+
+}

Propchange: jackrabbit/sandbox/jackrabbit-amazon/src/test/java/org/apache/jackrabbit/test/amazon/tck/TestAll.java
------------------------------------------------------------------------------
    svn:eol-style = native