You are viewing a plain text version of this content. The canonical link for it is here.
Posted to oak-commits@jackrabbit.apache.org by ad...@apache.org on 2020/05/14 11:11:13 UTC
svn commit: r1877731 [1/4] - in /jackrabbit/oak/trunk: ./
oak-commons/src/test/java/org/apache/jackrabbit/oak/commons/ oak-it/
oak-it/src/test/java/org/apache/jackrabbit/oak/
oak-it/src/test/java/org/apache/jackrabbit/oak/spi/state/ oak-jcr/
oak-parent...
Author: adulceanu
Date: Thu May 14 11:11:12 2020
New Revision: 1877731
URL: http://svn.apache.org/viewvc?rev=1877731&view=rev
Log:
OAK-8827 - AWS support for segment-tar
Contribution by Alvaro Dias
Added:
jackrabbit/oak/trunk/oak-segment-aws/
jackrabbit/oak/trunk/oak-segment-aws/pom.xml
jackrabbit/oak/trunk/oak-segment-aws/src/
jackrabbit/oak/trunk/oak-segment-aws/src/main/
jackrabbit/oak/trunk/oak-segment-aws/src/main/java/
jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/
jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/
jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/
jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/
jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/
jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/
jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/AwsAppendableFile.java
jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/AwsArchiveManager.java
jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/AwsBlobMetadata.java
jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/AwsContext.java
jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/AwsGCJournalFile.java
jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/AwsJournalFile.java
jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/AwsManifestFile.java
jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/AwsPersistence.java
jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/AwsRepositoryLock.java
jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/AwsSegmentArchiveEntry.java
jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/AwsSegmentArchiveReader.java
jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/AwsSegmentArchiveWriter.java
jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/AwsSegmentStoreService.java
jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/Configuration.java
jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/queue/
jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/queue/SegmentWriteAction.java
jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/queue/SegmentWriteQueue.java
jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/tool/
jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/tool/AwsCompact.java
jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/tool/AwsSegmentCopy.java
jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/tool/AwsSegmentStoreMigrator.java
jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/tool/AwsToolUtils.java
jackrabbit/oak/trunk/oak-segment-aws/src/test/
jackrabbit/oak/trunk/oak-segment-aws/src/test/java/
jackrabbit/oak/trunk/oak-segment-aws/src/test/java/org/
jackrabbit/oak/trunk/oak-segment-aws/src/test/java/org/apache/
jackrabbit/oak/trunk/oak-segment-aws/src/test/java/org/apache/jackrabbit/
jackrabbit/oak/trunk/oak-segment-aws/src/test/java/org/apache/jackrabbit/oak/
jackrabbit/oak/trunk/oak-segment-aws/src/test/java/org/apache/jackrabbit/oak/segment/
jackrabbit/oak/trunk/oak-segment-aws/src/test/java/org/apache/jackrabbit/oak/segment/aws/
jackrabbit/oak/trunk/oak-segment-aws/src/test/java/org/apache/jackrabbit/oak/segment/aws/AwsArchiveManagerTest.java
jackrabbit/oak/trunk/oak-segment-aws/src/test/java/org/apache/jackrabbit/oak/segment/aws/AwsGCJournalFileTest.java
jackrabbit/oak/trunk/oak-segment-aws/src/test/java/org/apache/jackrabbit/oak/segment/aws/AwsJournalFileConcurrencyIT.java
jackrabbit/oak/trunk/oak-segment-aws/src/test/java/org/apache/jackrabbit/oak/segment/aws/AwsJournalFileTest.java
jackrabbit/oak/trunk/oak-segment-aws/src/test/java/org/apache/jackrabbit/oak/segment/aws/AwsManifestFileTest.java
jackrabbit/oak/trunk/oak-segment-aws/src/test/java/org/apache/jackrabbit/oak/segment/aws/AwsReadSegmentTest.java
jackrabbit/oak/trunk/oak-segment-aws/src/test/java/org/apache/jackrabbit/oak/segment/aws/AwsRepositoryLockTest.java
jackrabbit/oak/trunk/oak-segment-aws/src/test/java/org/apache/jackrabbit/oak/segment/aws/AwsTarFileTest.java
jackrabbit/oak/trunk/oak-segment-aws/src/test/java/org/apache/jackrabbit/oak/segment/aws/AwsTarFilesTest.java
jackrabbit/oak/trunk/oak-segment-aws/src/test/java/org/apache/jackrabbit/oak/segment/aws/AwsTarWriterTest.java
jackrabbit/oak/trunk/oak-segment-aws/src/test/java/org/apache/jackrabbit/oak/segment/aws/S3MockRule.java
jackrabbit/oak/trunk/oak-segment-aws/src/test/java/org/apache/jackrabbit/oak/segment/aws/fixture/
jackrabbit/oak/trunk/oak-segment-aws/src/test/java/org/apache/jackrabbit/oak/segment/aws/fixture/SegmentAwsFixture.java
jackrabbit/oak/trunk/oak-segment-aws/src/test/java/org/apache/jackrabbit/oak/segment/aws/journal/
jackrabbit/oak/trunk/oak-segment-aws/src/test/java/org/apache/jackrabbit/oak/segment/aws/journal/AwsJournalReaderTest.java
jackrabbit/oak/trunk/oak-segment-aws/src/test/java/org/apache/jackrabbit/oak/segment/aws/journal/AwsTarRevisionsTest.java
jackrabbit/oak/trunk/oak-segment-aws/src/test/java/org/apache/jackrabbit/oak/segment/aws/queue/
jackrabbit/oak/trunk/oak-segment-aws/src/test/java/org/apache/jackrabbit/oak/segment/aws/queue/SegmentWriteQueueTest.java
jackrabbit/oak/trunk/oak-segment-aws/src/test/java/org/apache/jackrabbit/oak/segment/aws/tool/
jackrabbit/oak/trunk/oak-segment-aws/src/test/java/org/apache/jackrabbit/oak/segment/aws/tool/SegmentCopyAzureToTarTest.java
jackrabbit/oak/trunk/oak-segment-aws/src/test/java/org/apache/jackrabbit/oak/segment/aws/tool/SegmentCopyTarToAzureTest.java
jackrabbit/oak/trunk/oak-segment-aws/src/test/java/org/apache/jackrabbit/oak/segment/aws/tool/SegmentCopyTestBase.java
jackrabbit/oak/trunk/oak-segment-aws/src/test/java/org/apache/jackrabbit/oak/segment/spi/
jackrabbit/oak/trunk/oak-segment-aws/src/test/java/org/apache/jackrabbit/oak/segment/spi/persistence/
jackrabbit/oak/trunk/oak-segment-aws/src/test/java/org/apache/jackrabbit/oak/segment/spi/persistence/split/
jackrabbit/oak/trunk/oak-segment-aws/src/test/java/org/apache/jackrabbit/oak/segment/spi/persistence/split/SplitPersistenceBlobTest.java
jackrabbit/oak/trunk/oak-segment-aws/src/test/java/org/apache/jackrabbit/oak/segment/spi/persistence/split/SplitPersistenceTest.java
Modified:
jackrabbit/oak/trunk/oak-commons/src/test/java/org/apache/jackrabbit/oak/commons/FixturesHelper.java
jackrabbit/oak/trunk/oak-it/pom.xml
jackrabbit/oak/trunk/oak-it/src/test/java/org/apache/jackrabbit/oak/NodeStoreFixtures.java
jackrabbit/oak/trunk/oak-it/src/test/java/org/apache/jackrabbit/oak/spi/state/NodeStoreTest.java
jackrabbit/oak/trunk/oak-jcr/pom.xml
jackrabbit/oak/trunk/oak-parent/pom.xml
jackrabbit/oak/trunk/oak-run-commons/pom.xml
jackrabbit/oak/trunk/oak-run-commons/src/main/java/org/apache/jackrabbit/oak/fixture/OakFixture.java
jackrabbit/oak/trunk/oak-run-commons/src/main/java/org/apache/jackrabbit/oak/fixture/OakRepositoryFixture.java
jackrabbit/oak/trunk/oak-run-commons/src/main/java/org/apache/jackrabbit/oak/fixture/SegmentTarFixture.java
jackrabbit/oak/trunk/oak-run/pom.xml
jackrabbit/oak/trunk/oak-run/src/main/java/org/apache/jackrabbit/oak/run/CompactCommand.java
jackrabbit/oak/trunk/oak-run/src/main/java/org/apache/jackrabbit/oak/run/SegmentCopyCommand.java
jackrabbit/oak/trunk/pom.xml
Modified: jackrabbit/oak/trunk/oak-commons/src/test/java/org/apache/jackrabbit/oak/commons/FixturesHelper.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-commons/src/test/java/org/apache/jackrabbit/oak/commons/FixturesHelper.java?rev=1877731&r1=1877730&r2=1877731&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-commons/src/test/java/org/apache/jackrabbit/oak/commons/FixturesHelper.java (original)
+++ jackrabbit/oak/trunk/oak-commons/src/test/java/org/apache/jackrabbit/oak/commons/FixturesHelper.java Thu May 14 11:11:12 2020
@@ -43,7 +43,7 @@ public final class FixturesHelper {
* default fixtures when no {@code nsfixtures} is provided
*/
public enum Fixture {
- DOCUMENT_NS, @Deprecated SEGMENT_MK, DOCUMENT_RDB, MEMORY_NS, DOCUMENT_MEM, SEGMENT_TAR, SEGMENT_AZURE, COMPOSITE_SEGMENT, COMPOSITE_MEM, COW_DOCUMENT
+ DOCUMENT_NS, @Deprecated SEGMENT_MK, DOCUMENT_RDB, MEMORY_NS, DOCUMENT_MEM, SEGMENT_TAR, SEGMENT_AWS, SEGMENT_AZURE, COMPOSITE_SEGMENT, COMPOSITE_MEM, COW_DOCUMENT
}
private static final Set<Fixture> FIXTURES;
Modified: jackrabbit/oak/trunk/oak-it/pom.xml
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-it/pom.xml?rev=1877731&r1=1877730&r2=1877731&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-it/pom.xml (original)
+++ jackrabbit/oak/trunk/oak-it/pom.xml Thu May 14 11:11:12 2020
@@ -64,6 +64,12 @@
</dependency>
<dependency>
<groupId>org.apache.jackrabbit</groupId>
+ <artifactId>oak-segment-aws</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.jackrabbit</groupId>
<artifactId>oak-segment-azure</artifactId>
<version>${project.version}</version>
<scope>test</scope>
@@ -120,6 +126,13 @@
<version>${project.version}</version>
<type>test-jar</type>
<scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.jackrabbit</groupId>
+ <artifactId>oak-segment-aws</artifactId>
+ <version>${project.version}</version>
+ <type>test-jar</type>
+ <scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.jackrabbit</groupId>
Modified: jackrabbit/oak/trunk/oak-it/src/test/java/org/apache/jackrabbit/oak/NodeStoreFixtures.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-it/src/test/java/org/apache/jackrabbit/oak/NodeStoreFixtures.java?rev=1877731&r1=1877730&r2=1877731&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-it/src/test/java/org/apache/jackrabbit/oak/NodeStoreFixtures.java (original)
+++ jackrabbit/oak/trunk/oak-it/src/test/java/org/apache/jackrabbit/oak/NodeStoreFixtures.java Thu May 14 11:11:12 2020
@@ -31,6 +31,7 @@ import org.apache.jackrabbit.oak.fixture
import org.apache.jackrabbit.oak.fixture.NodeStoreFixture;
import org.apache.jackrabbit.oak.composite.CompositeMemoryStoreFixture;
import org.apache.jackrabbit.oak.composite.CompositeSegmentStoreFixture;
+import org.apache.jackrabbit.oak.segment.aws.fixture.SegmentAwsFixture;
import org.apache.jackrabbit.oak.segment.azure.fixture.SegmentAzureFixture;
import org.apache.jackrabbit.oak.segment.fixture.SegmentTarFixture;
@@ -40,6 +41,8 @@ public class NodeStoreFixtures {
public static final NodeStoreFixture SEGMENT_TAR = new SegmentTarFixture();
+ public static final NodeStoreFixture SEGMENT_AWS = new SegmentAwsFixture();
+
public static final NodeStoreFixture SEGMENT_AZURE = new SegmentAzureFixture();
public static final NodeStoreFixture DOCUMENT_NS = new DocumentMongoFixture();
@@ -71,6 +74,9 @@ public class NodeStoreFixtures {
if (fixtures.contains(FixturesHelper.Fixture.SEGMENT_TAR)) {
configuredFixtures.add(SEGMENT_TAR);
}
+ if (fixtures.contains(FixturesHelper.Fixture.SEGMENT_AWS)) {
+ configuredFixtures.add(SEGMENT_AWS);
+ }
if (fixtures.contains(FixturesHelper.Fixture.SEGMENT_AZURE)) {
configuredFixtures.add(SEGMENT_AZURE);
}
Modified: jackrabbit/oak/trunk/oak-it/src/test/java/org/apache/jackrabbit/oak/spi/state/NodeStoreTest.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-it/src/test/java/org/apache/jackrabbit/oak/spi/state/NodeStoreTest.java?rev=1877731&r1=1877730&r2=1877731&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-it/src/test/java/org/apache/jackrabbit/oak/spi/state/NodeStoreTest.java (original)
+++ jackrabbit/oak/trunk/oak-it/src/test/java/org/apache/jackrabbit/oak/spi/state/NodeStoreTest.java Thu May 14 11:11:12 2020
@@ -457,7 +457,8 @@ public class NodeStoreTest extends OakBa
NodeBuilder x = test.getChildNode("x");
if (fixture == NodeStoreFixtures.SEGMENT_TAR || fixture == NodeStoreFixtures.MEMORY_NS
|| fixture == NodeStoreFixtures.COMPOSITE_MEM || fixture == NodeStoreFixtures.COMPOSITE_SEGMENT
- || fixture == NodeStoreFixtures.COW_DOCUMENT || fixture == NodeStoreFixtures.SEGMENT_AZURE) {
+ || fixture == NodeStoreFixtures.COW_DOCUMENT || fixture == NodeStoreFixtures.SEGMENT_AWS
+ || fixture == NodeStoreFixtures.SEGMENT_AZURE) {
assertTrue(x.moveTo(x, "xx"));
assertFalse(x.exists());
assertFalse(test.hasChildNode("x"));
Modified: jackrabbit/oak/trunk/oak-jcr/pom.xml
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-jcr/pom.xml?rev=1877731&r1=1877730&r2=1877731&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-jcr/pom.xml (original)
+++ jackrabbit/oak/trunk/oak-jcr/pom.xml Thu May 14 11:11:12 2020
@@ -274,6 +274,12 @@
</dependency>
<dependency>
<groupId>org.apache.jackrabbit</groupId>
+ <artifactId>oak-segment-aws</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.jackrabbit</groupId>
<artifactId>oak-segment-azure</artifactId>
<version>${project.version}</version>
<scope>test</scope>
@@ -309,6 +315,13 @@
<version>${project.version}</version>
<type>test-jar</type>
<scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.jackrabbit</groupId>
+ <artifactId>oak-segment-aws</artifactId>
+ <version>${project.version}</version>
+ <type>test-jar</type>
+ <scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.jackrabbit</groupId>
Modified: jackrabbit/oak/trunk/oak-parent/pom.xml
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-parent/pom.xml?rev=1877731&r1=1877730&r2=1877731&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-parent/pom.xml (original)
+++ jackrabbit/oak/trunk/oak-parent/pom.xml Thu May 14 11:11:12 2020
@@ -70,7 +70,7 @@
<!-- specifies on which fixture to run the integration testing tests.
override in profiles or provide from command line to change behaviour. Provide
more fixtures space separated. See org.apache.jackrabbit.oak.jcr.FixturesHelper#AVAILABLE_FIXTURES
- for the possible values: SEGMENT_MK SEGMENT_TAR SEGMENT_AZURE DOCUMENT_NS DOCUMENT_RDB -->
+ for the possible values: SEGMENT_MK SEGMENT_TAR SEGMENT_AWS SEGMENT_AZURE DOCUMENT_NS DOCUMENT_RDB -->
<fixtures>SEGMENT_TAR</fixtures>
<!-- whether skip the surefire unit testing during the integration testing.
Modified: jackrabbit/oak/trunk/oak-run-commons/pom.xml
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-run-commons/pom.xml?rev=1877731&r1=1877730&r2=1877731&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-run-commons/pom.xml (original)
+++ jackrabbit/oak/trunk/oak-run-commons/pom.xml Thu May 14 11:11:12 2020
@@ -63,6 +63,11 @@
</dependency>
<dependency>
<groupId>org.apache.jackrabbit</groupId>
+ <artifactId>oak-segment-aws</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.jackrabbit</groupId>
<artifactId>oak-segment-azure</artifactId>
<version>${project.version}</version>
</dependency>
Modified: jackrabbit/oak/trunk/oak-run-commons/src/main/java/org/apache/jackrabbit/oak/fixture/OakFixture.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-run-commons/src/main/java/org/apache/jackrabbit/oak/fixture/OakFixture.java?rev=1877731&r1=1877730&r2=1877731&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-run-commons/src/main/java/org/apache/jackrabbit/oak/fixture/OakFixture.java (original)
+++ jackrabbit/oak/trunk/oak-run-commons/src/main/java/org/apache/jackrabbit/oak/fixture/OakFixture.java Thu May 14 11:11:12 2020
@@ -71,6 +71,7 @@ public abstract class OakFixture {
public static final String OAK_RDB_DS = "Oak-RDB-DS";
public static final String OAK_SEGMENT_TAR = "Oak-Segment-Tar";
+ public static final String OAK_SEGMENT_AWS = "Oak-Segment-Aws";
public static final String OAK_SEGMENT_AZURE = "Oak-Segment-Azure";
public static final String OAK_SEGMENT_TAR_DS = "Oak-Segment-Tar-DS";
public static final String OAK_SEGMENT_TAR_COLD = "Oak-Segment-Tar-Cold";
@@ -362,6 +363,15 @@ public abstract class OakFixture {
dsCacheInMB, true, syncInterval, shareBlobStore, secure, oneShotRun);
}
+ public static OakFixture getSegmentTarWithAwsSegmentStore(final File base, final String awsBucketName,
+ final String awsRootPath, final String awsJournalTableName, final String awsLockTableName,
+ final int maxFileSizeMB, final int cacheSizeMB, final boolean useBlobStore, final int dsCacheInMB) {
+ return SegmentTarFixtureBuilder.segmentTarFixtureBuilder(OakFixture.OAK_SEGMENT_AWS, base)
+ .withAws(awsBucketName, awsRootPath, awsJournalTableName, awsLockTableName)
+ .withMaxFileSize(maxFileSizeMB).withSegmentCacheSize(cacheSizeMB).withBlobStore(useBlobStore)
+ .withDSCacheSize(dsCacheInMB).build();
+ }
+
public static OakFixture getSegmentTarWithAzureSegmentStore(final File base, final String azureConnectionString, final String azureContainerName, final String azureRootPath,
final int maxFileSizeMB, final int cacheSizeMB, final boolean useBlobStore, final int dsCacheInMB) {
return SegmentTarFixtureBuilder
Modified: jackrabbit/oak/trunk/oak-run-commons/src/main/java/org/apache/jackrabbit/oak/fixture/OakRepositoryFixture.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-run-commons/src/main/java/org/apache/jackrabbit/oak/fixture/OakRepositoryFixture.java?rev=1877731&r1=1877730&r2=1877731&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-run-commons/src/main/java/org/apache/jackrabbit/oak/fixture/OakRepositoryFixture.java (original)
+++ jackrabbit/oak/trunk/oak-run-commons/src/main/java/org/apache/jackrabbit/oak/fixture/OakRepositoryFixture.java Thu May 14 11:11:12 2020
@@ -113,6 +113,13 @@ public class OakRepositoryFixture implem
memoryMapping, useBlobStore, dsCacheInMB, syncInterval, shareBlobStore, secure, oneShotRun));
}
+ public static RepositoryFixture getSegmentTarWithAwsSegmentStore(final File base, final String awsBucketName,
+ final String awsRootPath, final String awsJournalTableName, final String awsLockTableName,
+ final int maxFileSizeMB, final int cacheSizeMB, final boolean useBlobStore, final int dsCacheInMB) {
+ return new OakRepositoryFixture(OakFixture.getSegmentTarWithAwsSegmentStore(base, awsBucketName, awsRootPath,
+ awsJournalTableName, awsLockTableName, maxFileSizeMB, cacheSizeMB, useBlobStore, dsCacheInMB));
+ }
+
public static RepositoryFixture getSegmentTarWithAzureSegmentStore(final File base, final String azureConnectionString,
final String azureContainerName, final String azureRootPath,
final int maxFileSizeMB, final int cacheSizeMB, final boolean useBlobStore,
Modified: jackrabbit/oak/trunk/oak-run-commons/src/main/java/org/apache/jackrabbit/oak/fixture/SegmentTarFixture.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-run-commons/src/main/java/org/apache/jackrabbit/oak/fixture/SegmentTarFixture.java?rev=1877731&r1=1877730&r2=1877731&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-run-commons/src/main/java/org/apache/jackrabbit/oak/fixture/SegmentTarFixture.java (original)
+++ jackrabbit/oak/trunk/oak-run-commons/src/main/java/org/apache/jackrabbit/oak/fixture/SegmentTarFixture.java Thu May 14 11:11:12 2020
@@ -42,6 +42,8 @@ import org.apache.jackrabbit.oak.segment
import org.apache.jackrabbit.oak.segment.SegmentNodeStoreBuilders;
import org.apache.jackrabbit.oak.segment.SegmentNotFoundException;
import org.apache.jackrabbit.oak.segment.SegmentNotFoundExceptionListener;
+import org.apache.jackrabbit.oak.segment.aws.AwsContext;
+import org.apache.jackrabbit.oak.segment.aws.AwsPersistence;
import org.apache.jackrabbit.oak.segment.azure.AzurePersistence;
import org.apache.jackrabbit.oak.segment.compaction.SegmentGCOptions;
import org.apache.jackrabbit.oak.segment.file.FileStore;
@@ -79,6 +81,12 @@ public class SegmentTarFixture extends O
private boolean memoryMapping;
private boolean useBlobStore;
private int dsCacheSize;
+
+ private String awsBucketName;
+ private String awsRootPath;
+ private String awsJournalTableName;
+ private String awsLockTableName;
+
private String azureConnectionString;
private String azureContainerName;
private String azureRootPath;
@@ -111,12 +119,20 @@ public class SegmentTarFixture extends O
this.useBlobStore = useBlobStore;
return this;
}
-
+
public SegmentTarFixtureBuilder withDSCacheSize(int dsCacheSize) {
this.dsCacheSize = dsCacheSize;
return this;
}
+ public SegmentTarFixtureBuilder withAws(String awsBucketName, String awsRootPath, String awsJournalTableName, String awsLockTableName) {
+ this.awsBucketName = awsBucketName;
+ this.awsRootPath = awsRootPath;
+ this.awsJournalTableName = awsJournalTableName;
+ this.awsLockTableName = awsLockTableName;
+ return this;
+ }
+
public SegmentTarFixtureBuilder withAzure(String azureConnectionString, String azureContainerName, String azureRootPath) {
this.azureConnectionString = azureConnectionString;
this.azureContainerName = azureContainerName;
@@ -142,6 +158,11 @@ public class SegmentTarFixture extends O
private final boolean oneShotRun;
private final boolean secure;
+ private final String awsBucketName;
+ private final String awsRootPath;
+ private final String awsJournalTableName;
+ private final String awsLockTableName;
+
private final String azureConnectionString;
private final String azureContainerName;
private final String azureRootPath;
@@ -176,6 +197,12 @@ public class SegmentTarFixture extends O
this.memoryMapping = builder.memoryMapping;
this.useBlobStore = builder.useBlobStore;
this.dsCacheSize = builder.dsCacheSize;
+
+ this.awsBucketName = builder.awsBucketName;
+ this.awsRootPath = builder.awsRootPath;
+ this.awsJournalTableName = builder.awsJournalTableName;
+ this.awsLockTableName = builder.awsLockTableName;
+
this.azureConnectionString = builder.azureConnectionString;
this.azureContainerName = builder.azureContainerName;
this.azureRootPath = builder.azureRootPath;
@@ -194,6 +221,11 @@ public class SegmentTarFixture extends O
.withSegmentCacheSize(segmentCacheSize)
.withMemoryMapping(memoryMapping);
+ if (awsBucketName != null) {
+ AwsContext awsContext = AwsContext.create(awsBucketName, awsRootPath, awsJournalTableName, awsLockTableName);
+ fileStoreBuilder.withCustomPersistence(new AwsPersistence(awsContext));
+ }
+
if (azureConnectionString != null) {
CloudStorageAccount cloud = CloudStorageAccount.parse(azureConnectionString);
CloudBlobContainer container = cloud.createCloudBlobClient().getContainerReference(azureContainerName);
@@ -237,6 +269,11 @@ public class SegmentTarFixture extends O
FileStoreBuilder builder = fileStoreBuilder(new File(parentPath, "primary-" + i));
+ if (awsBucketName != null) {
+ AwsContext awsContext = AwsContext.create(awsBucketName, awsRootPath, awsJournalTableName, awsLockTableName);
+ builder.withCustomPersistence(new AwsPersistence(awsContext, "primary-" + i));
+ }
+
if (azureConnectionString != null) {
CloudStorageAccount cloud = CloudStorageAccount.parse(azureConnectionString);
CloudBlobContainer container = cloud.createCloudBlobClient().getContainerReference(azureContainerName);
Modified: jackrabbit/oak/trunk/oak-run/pom.xml
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-run/pom.xml?rev=1877731&r1=1877730&r2=1877731&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-run/pom.xml (original)
+++ jackrabbit/oak/trunk/oak-run/pom.xml Thu May 14 11:11:12 2020
@@ -35,6 +35,7 @@
<groovy.version>2.4.17</groovy.version>
<!--
Size History:
+ + 54 MB AWS support for segment-tar (OAK-8827)
+ 52 MB AWS java sdk update (OAK-8875)
+ 51 MB AWS java sdk update (OAK-7536)
+ 49 MB MongoDB Java driver 3.6.3 is bigger (OAK-7359)
@@ -44,7 +45,7 @@
+ 41 MB build failing on the release profile (OAK-6250)
+ 38 MB. Initial value. Current 35MB plus a 10%
-->
- <max.jar.size>52000000</max.jar.size>
+ <max.jar.size>54000000</max.jar.size>
</properties>
<build>
@@ -167,6 +168,7 @@
<phase>package</phase>
<configuration>
<rules>
+
<requireFilesSize>
<maxsize>${max.jar.size}</maxsize>
<files>
Modified: jackrabbit/oak/trunk/oak-run/src/main/java/org/apache/jackrabbit/oak/run/CompactCommand.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-run/src/main/java/org/apache/jackrabbit/oak/run/CompactCommand.java?rev=1877731&r1=1877730&r2=1877731&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-run/src/main/java/org/apache/jackrabbit/oak/run/CompactCommand.java (original)
+++ jackrabbit/oak/trunk/oak-run/src/main/java/org/apache/jackrabbit/oak/run/CompactCommand.java Thu May 14 11:11:12 2020
@@ -27,6 +27,7 @@ import org.apache.jackrabbit.oak.run.com
import org.apache.jackrabbit.oak.segment.azure.tool.AzureCompact;
import org.apache.jackrabbit.oak.segment.azure.tool.AzureCompact.Builder;
import org.apache.jackrabbit.oak.segment.compaction.SegmentGCOptions.CompactorType;
+import org.apache.jackrabbit.oak.segment.aws.tool.AwsCompact;
import org.apache.jackrabbit.oak.segment.tool.Compact;
class CompactCommand implements Command {
@@ -86,7 +87,14 @@ class CompactCommand implements Command
code = azureBuilder
.build()
.run();
-
+ } else if (path.startsWith("aws:")) {
+ code = AwsCompact.builder()
+ .withPath(path)
+ .withForce(isTrue(forceArg.value(options)))
+ .withSegmentCacheSize(Integer.getInteger("cache", 256))
+ .withGCLogInterval(Long.getLong("compaction-progress-log", 150000))
+ .build()
+ .run();
} else {
org.apache.jackrabbit.oak.segment.tool.Compact.Builder tarBuilder = Compact.builder()
.withPath(new File(path))
Modified: jackrabbit/oak/trunk/oak-run/src/main/java/org/apache/jackrabbit/oak/run/SegmentCopyCommand.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-run/src/main/java/org/apache/jackrabbit/oak/run/SegmentCopyCommand.java?rev=1877731&r1=1877730&r2=1877731&view=diff
==============================================================================
--- jackrabbit/oak/trunk/oak-run/src/main/java/org/apache/jackrabbit/oak/run/SegmentCopyCommand.java (original)
+++ jackrabbit/oak/trunk/oak-run/src/main/java/org/apache/jackrabbit/oak/run/SegmentCopyCommand.java Thu May 14 11:11:12 2020
@@ -19,6 +19,7 @@ package org.apache.jackrabbit.oak.run;
import joptsimple.OptionSpec;
import org.apache.jackrabbit.oak.run.commons.Command;
+import org.apache.jackrabbit.oak.segment.aws.tool.AwsSegmentCopy;
import org.apache.jackrabbit.oak.segment.azure.tool.SegmentCopy;
import java.io.IOException;
@@ -49,17 +50,29 @@ class SegmentCopyCommand implements Comm
String source = options.nonOptionArguments().get(0).toString();
String destination = options.nonOptionArguments().get(1).toString();
- SegmentCopy.Builder builder = SegmentCopy.builder()
- .withSource(source)
- .withDestination(destination)
- .withOutWriter(out)
- .withErrWriter(err);
+ if (AwsSegmentCopy.canExecute(source, destination)) {
+ int statusCode = AwsSegmentCopy.builder()
+ .withSource(source)
+ .withDestination(destination)
+ .withOutWriter(out)
+ .withErrWriter(err)
+ .build()
+ .run();
+
+ System.exit(statusCode);
+ } else {
+ SegmentCopy.Builder builder = SegmentCopy.builder()
+ .withSource(source)
+ .withDestination(destination)
+ .withOutWriter(out)
+ .withErrWriter(err);
+
+ if (options.has(last)) {
+ builder.withRevisionsCount(last.value(options) != null ? last.value(options) : 1);
+ }
- if (options.has(last)) {
- builder.withRevisionsCount(last.value(options) != null ? last.value(options) : 1);
+ System.exit(builder.build().run());
}
-
- System.exit(builder.build().run());
}
private void printUsage(OptionParser parser, PrintWriter err, String... messages) throws IOException {
Added: jackrabbit/oak/trunk/oak-segment-aws/pom.xml
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-segment-aws/pom.xml?rev=1877731&view=auto
==============================================================================
--- jackrabbit/oak/trunk/oak-segment-aws/pom.xml (added)
+++ jackrabbit/oak/trunk/oak-segment-aws/pom.xml Thu May 14 11:11:12 2020
@@ -0,0 +1,333 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+
+ <parent>
+ <groupId>org.apache.jackrabbit</groupId>
+ <artifactId>oak-parent</artifactId>
+ <version>1.24.0</version>
+ <relativePath>../oak-parent/pom.xml</relativePath>
+ </parent>
+
+ <artifactId>oak-segment-aws</artifactId>
+ <packaging>bundle</packaging>
+
+ <name>Oak Segment AWS</name>
+
+ <properties>
+ <aws.version>1.11.475</aws.version>
+ <sqlite4java.version>1.0.392</sqlite4java.version>
+ </properties>
+
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.felix</groupId>
+ <artifactId>maven-bundle-plugin</artifactId>
+ <configuration>
+ <instructions>
+ <Import-Package>
+ <!-- OAK-7182 -->${guava.osgi.import},
+ org.apache.jackrabbit.oak.segment.spi*,
+ !org.apache.jackrabbit.oak.segment*,
+ *
+ </Import-Package>
+ <Export-Package>
+ com.amazonaws.metrics.internal.cloudwatch,
+ com.amazonaws.services.securitytoken.internal,
+ com.amazonaws*,
+ com.fasterxml.jackson.dataformat.cbor,
+ com.sun.org.apache.xpath.internal,
+ kotlin,
+ org.bouncycastle.jce.provider,
+ software.amazon.ion*
+ </Export-Package>
+ <Embed-Dependency>
+ aws-java-sdk-dynamodb,
+ aws-java-sdk-s3,
+ dynamodb-lock-client
+ </Embed-Dependency>
+ </instructions>
+ </configuration>
+ <executions>
+ <execution>
+ <id>baseline</id>
+ <goals>
+ <goal>baseline</goal>
+ </goals>
+ <phase>pre-integration-test</phase>
+ <configuration>
+ <!--
+ This is required because there's no prior (stable) version of oak-segment-aws
+ This should be removed post first release
+ -->
+ <skip>true</skip>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.rat</groupId>
+ <artifactId>apache-rat-plugin</artifactId>
+ <configuration>
+ <excludes>
+ </excludes>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-failsafe-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>failsafe-integration-tests</id>
+ <phase>integration-test</phase>
+ <goals>
+ <goal>integration-test</goal>
+ </goals>
+ </execution>
+ </executions>
+ <configuration>
+ <argLine>-Dsqlite4java.library.path=${project.build.directory}/dependencies</argLine>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-surefire-plugin</artifactId>
+ <version>3.0.0-M4</version>
+ <configuration>
+ <argLine>-Dsqlite4java.library.path=${project.build.directory}/dependencies</argLine>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-dependency-plugin</artifactId>
+ <version>3.1.1</version>
+ <executions>
+ <execution>
+ <id>copy-dependencies</id>
+ <phase>test-compile</phase>
+ <goals>
+ <goal>copy-dependencies</goal>
+ </goals>
+ <configuration>
+ <includeScope>test</includeScope>
+ <includeTypes>so,dll,dylib</includeTypes>
+ <outputDirectory>${project.build.directory}/dependencies</outputDirectory>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+
+ <!-- ====================================================================== -->
+ <!-- D E P E N D E N C I E S -->
+ <!-- ====================================================================== -->
+ <dependencies>
+ <!-- Optional OSGi dependencies, used only when running within OSGi -->
+ <dependency>
+ <groupId>org.osgi</groupId>
+ <artifactId>org.osgi.core</artifactId>
+ <scope>provided</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.osgi</groupId>
+ <artifactId>org.osgi.compendium</artifactId>
+ <scope>provided</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.osgi</groupId>
+ <artifactId>org.osgi.annotation</artifactId>
+ <scope>provided</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.osgi</groupId>
+ <artifactId>org.osgi.service.component.annotations</artifactId>
+ <scope>provided</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.osgi</groupId>
+ <artifactId>org.osgi.service.metatype.annotations</artifactId>
+ <scope>provided</scope>
+ </dependency>
+
+ <!-- Dependencies to other Oak components -->
+ <dependency>
+ <groupId>org.apache.jackrabbit</groupId>
+ <artifactId>oak-segment-tar</artifactId>
+ <version>${project.version}</version>
+ <scope>provided</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.jackrabbit</groupId>
+ <artifactId>oak-store-spi</artifactId>
+ <version>${project.version}</version>
+ <scope>provided</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.commons</groupId>
+ <artifactId>commons-lang3</artifactId>
+ <version>3.9</version>
+ </dependency>
+
+ <!-- AWS dependencies -->
+ <dependency>
+ <groupId>com.amazonaws</groupId>
+ <artifactId>aws-java-sdk-s3</artifactId>
+ <version>${aws.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>com.amazonaws</groupId>
+ <artifactId>aws-java-sdk-dynamodb</artifactId>
+ <version>${aws.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>com.amazonaws</groupId>
+ <artifactId>dynamodb-lock-client</artifactId>
+ <version>1.1.0</version>
+ </dependency>
+
+ <!-- Test dependencies -->
+ <dependency>
+ <groupId>org.apache.jackrabbit</groupId>
+ <artifactId>oak-blob-plugins</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.jackrabbit</groupId>
+ <artifactId>oak-segment-tar</artifactId>
+ <version>${project.version}</version>
+ <classifier>tests</classifier>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>io.dropwizard.metrics</groupId>
+ <artifactId>metrics-core</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.jackrabbit</groupId>
+ <artifactId>oak-store-spi</artifactId>
+ <version>${project.version}</version>
+ <type>test-jar</type>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.jackrabbit</groupId>
+ <artifactId>oak-blob</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.mockito</groupId>
+ <artifactId>mockito-core</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>jul-to-slf4j</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>ch.qos.logback</groupId>
+ <artifactId>logback-classic</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>com.arakelian</groupId>
+ <artifactId>docker-junit-rule</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>io.findify</groupId>
+ <artifactId>s3mock_2.12</artifactId>
+ <version>0.2.5</version>
+ <scope>test</scope>
+ </dependency>
+
+ <!-- Test dependencies for DynamoDBLocal -->
+ <dependency>
+ <groupId>com.amazonaws</groupId>
+ <artifactId>DynamoDBLocal</artifactId>
+ <version>1.11.86</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>com.almworks.sqlite4java</groupId>
+ <artifactId>sqlite4java</artifactId>
+ <version>${sqlite4java.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>com.almworks.sqlite4java</groupId>
+ <artifactId>sqlite4java-win32-x86</artifactId>
+ <version>${sqlite4java.version}</version>
+ <type>dll</type>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>com.almworks.sqlite4java</groupId>
+ <artifactId>sqlite4java-win32-x64</artifactId>
+ <version>${sqlite4java.version}</version>
+ <type>dll</type>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>com.almworks.sqlite4java</groupId>
+ <artifactId>libsqlite4java-osx</artifactId>
+ <version>${sqlite4java.version}</version>
+ <type>dylib</type>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>com.almworks.sqlite4java</groupId>
+ <artifactId>libsqlite4java-linux-i386</artifactId>
+ <version>${sqlite4java.version}</version>
+ <type>so</type>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>com.almworks.sqlite4java</groupId>
+ <artifactId>libsqlite4java-linux-amd64</artifactId>
+ <version>${sqlite4java.version}</version>
+ <type>so</type>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+
+ <repositories>
+ <!--
+ DynamoDBLocal is used for testing. Below repository is provided by Amazon for using it.
+ More details here: https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/DynamoDBLocal.Maven.html
+ -->
+ <repository>
+ <id>dynamodblocal</id>
+ <name>AWS DynamoDB Local Release Repository</name>
+ <url>https://s3-us-west-2.amazonaws.com/dynamodb-local/release</url>
+ </repository>
+ </repositories>
+</project>
Added: jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/AwsAppendableFile.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/AwsAppendableFile.java?rev=1877731&view=auto
==============================================================================
--- jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/AwsAppendableFile.java (added)
+++ jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/AwsAppendableFile.java Thu May 14 11:11:12 2020
@@ -0,0 +1,132 @@
+/*
+* Licensed to the Apache Software Foundation (ASF) under one or more
+* contributor license agreements. See the NOTICE file distributed with
+* this work for additional information regarding copyright ownership.
+* The ASF licenses this file to You under the Apache License, Version 2.0
+* (the "License"); you may not use this file except in compliance with
+* the License. You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+package org.apache.jackrabbit.oak.segment.aws;
+
+import static org.apache.jackrabbit.oak.segment.aws.AwsContext.TABLE_ATTR_CONTENT;
+
+import java.io.IOException;
+import java.util.Iterator;
+import java.util.List;
+
+import com.amazonaws.services.dynamodbv2.document.Item;
+
+import org.apache.jackrabbit.oak.segment.spi.persistence.JournalFileReader;
+import org.apache.jackrabbit.oak.segment.spi.persistence.JournalFileWriter;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class AwsAppendableFile {
+
+ private static final Logger log = LoggerFactory.getLogger(AwsAppendableFile.class);
+
+ private final AwsContext awsContext;
+ private final String fileName;
+
+ /**
+ * Initializes a file that is backed by AWS DynamoDB documents. Every line in a
+ * files that is read/written is the "{@link AwsContext#TABLE_ATTR_CONTENT}"
+ * attribute in a document.
+ *
+ * @param awsContext The AWS context.
+ * @param fileName The name of the file.
+ */
+ public AwsAppendableFile(AwsContext awsContext, String fileName) {
+ this.awsContext = awsContext;
+ this.fileName = fileName;
+ }
+
+ public String getName() {
+ return fileName;
+ }
+
+ public JournalFileWriter openJournalWriter() {
+ return new AwsFileWriter(awsContext, fileName);
+ }
+
+ public JournalFileReader openJournalReader() {
+ return new AwsFileReader(awsContext, fileName);
+ }
+
+ public boolean exists() {
+ try {
+ return openJournalReader().readLine() != null;
+ } catch (IOException e) {
+ log.error("Can't check if the file exists", e);
+ return false;
+ }
+ }
+
+ public List<String> readLines() throws IOException {
+ return awsContext.getDocumentContents(fileName);
+ }
+
+ private static class AwsFileWriter implements JournalFileWriter {
+ private final AwsContext awsContext;
+ private final String fileName;
+
+ public AwsFileWriter(AwsContext awsContext, String fileName) {
+ this.awsContext = awsContext;
+ this.fileName = fileName;
+ }
+
+ @Override
+ public void close() {
+ // Do nothing
+ }
+
+ @Override
+ public void truncate() throws IOException {
+ awsContext.deleteAllDocuments(fileName);
+ }
+
+ @Override
+ public void writeLine(String line) throws IOException {
+ awsContext.putDocument(fileName, line);
+ }
+ }
+
+ private static class AwsFileReader implements JournalFileReader {
+
+ private final AwsContext awsContext;
+ private final String fileName;
+
+ private Iterator<Item> iterator;
+
+ public AwsFileReader(AwsContext awsContext, String fileName) {
+ this.awsContext = awsContext;
+ this.fileName = fileName;
+ }
+
+ @Override
+ public void close() {
+ // Do nothing
+ }
+
+ @Override
+ public String readLine() throws IOException {
+ if (iterator == null) {
+ iterator = awsContext.getDocumentsStream(fileName).iterator();
+ }
+
+ if (iterator.hasNext()) {
+ return iterator.next().getString(TABLE_ATTR_CONTENT);
+ }
+
+ return null;
+ }
+ }
+}
Added: jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/AwsArchiveManager.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/AwsArchiveManager.java?rev=1877731&view=auto
==============================================================================
--- jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/AwsArchiveManager.java (added)
+++ jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/AwsArchiveManager.java Thu May 14 11:11:12 2020
@@ -0,0 +1,207 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.segment.aws;
+
+import java.io.IOException;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.UUID;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+import java.util.stream.Collectors;
+
+import com.amazonaws.services.s3.model.S3ObjectSummary;
+
+import org.apache.jackrabbit.oak.segment.spi.monitor.FileStoreMonitor;
+import org.apache.jackrabbit.oak.segment.spi.monitor.IOMonitor;
+import org.apache.jackrabbit.oak.segment.spi.persistence.SegmentArchiveManager;
+import org.apache.jackrabbit.oak.segment.spi.persistence.SegmentArchiveReader;
+import org.apache.jackrabbit.oak.segment.spi.persistence.SegmentArchiveWriter;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class AwsArchiveManager implements SegmentArchiveManager {
+
+ private static final Logger log = LoggerFactory.getLogger(AwsArchiveManager.class);
+
+ private static final String SEGMENT_FILE_NAME_PATTERN = "^([0-9a-f]{4})\\.([0-9a-f-]+)$";
+
+ private final AwsContext awsContext;
+
+ private final IOMonitor ioMonitor;
+
+ private final FileStoreMonitor monitor;
+
+ public AwsArchiveManager(AwsContext awsContext, IOMonitor ioMonitor, FileStoreMonitor fileStoreMonitor) {
+ this.awsContext = awsContext;
+ this.ioMonitor = ioMonitor;
+ this.monitor = fileStoreMonitor;
+ }
+
+ @Override
+ public List<String> listArchives() throws IOException {
+ List<String> archiveNames = awsContext.listPrefixes().stream().filter(i -> i.endsWith(".tar/")).map(Paths::get)
+ .map(Path::getFileName).map(Path::toString).collect(Collectors.toList());
+
+ Iterator<String> it = archiveNames.iterator();
+ while (it.hasNext()) {
+ String archiveName = it.next();
+ if (isArchiveEmpty(archiveName)) {
+ delete(archiveName);
+ it.remove();
+ }
+ }
+ return archiveNames;
+ }
+
+ /**
+ * Check if there's a valid 0000. segment in the archive
+ *
+ * @param archiveName The name of the archive
+ * @return true if the archive is empty (no 0000.* segment)
+ * @throws IOException
+ */
+ private boolean isArchiveEmpty(String archiveName) throws IOException {
+ return awsContext.withDirectory(archiveName).listObjects("0000.").isEmpty();
+ }
+
+ @Override
+ public SegmentArchiveReader open(String archiveName) throws IOException {
+ AwsContext directoryContext = awsContext.withDirectory(archiveName);
+ if (!directoryContext.doesObjectExist("closed")) {
+ throw new IOException("The archive " + archiveName + " hasn't been closed correctly.");
+ }
+ return new AwsSegmentArchiveReader(directoryContext, archiveName, ioMonitor);
+ }
+
+ @Override
+ public SegmentArchiveReader forceOpen(String archiveName) throws IOException {
+ AwsContext directoryContext = awsContext.withDirectory(archiveName);
+ return new AwsSegmentArchiveReader(directoryContext, archiveName, ioMonitor);
+ }
+
+ @Override
+ public SegmentArchiveWriter create(String archiveName) throws IOException {
+ return new AwsSegmentArchiveWriter(awsContext.withDirectory(archiveName), archiveName, ioMonitor, monitor);
+ }
+
+ @Override
+ public boolean delete(String archiveName) {
+ return awsContext.withDirectory(archiveName).deleteAllObjects();
+ }
+
+ @Override
+ public boolean renameTo(String from, String to) {
+ try {
+ AwsContext fromContext = awsContext.withDirectory(from);
+ AwsContext toContext = awsContext.withDirectory(to);
+
+ for (S3ObjectSummary obj : fromContext.listObjects("")) {
+ toContext.copyObject(fromContext, obj.getKey());
+ }
+
+ fromContext.deleteAllObjects();
+ return true;
+ } catch (IOException e) {
+ log.error("Can't rename archive {} to {}", from, to, e);
+ return false;
+ }
+ }
+
+ @Override
+ public void copyFile(String from, String to) throws IOException {
+ AwsContext fromContext = awsContext.withDirectory(from);
+ fromContext.listObjects("").forEach(obj -> {
+ try {
+ awsContext.withDirectory(to).copyObject(fromContext, obj.getKey());
+ } catch (IOException e) {
+ log.error("Can't copy segment {}", obj.getKey(), e);
+ }
+ });
+ }
+
+ @Override
+ public boolean exists(String archiveName) {
+ try {
+ return awsContext.withDirectory(archiveName).listObjects("").size() > 0;
+ } catch (IOException e) {
+ log.error("Can't check the existence of {}", archiveName, e);
+ return false;
+ }
+ }
+
+ @Override
+ public void recoverEntries(String archiveName, LinkedHashMap<UUID, byte[]> entries) throws IOException {
+ Pattern pattern = Pattern.compile(SEGMENT_FILE_NAME_PATTERN);
+ List<RecoveredEntry> entryList = new ArrayList<>();
+
+ for (S3ObjectSummary b : awsContext.withDirectory(archiveName).listObjects("")) {
+ String name = Paths.get(b.getKey()).getFileName().toString();
+ Matcher m = pattern.matcher(name);
+ if (!m.matches()) {
+ continue;
+ }
+ int position = Integer.parseInt(m.group(1), 16);
+ UUID uuid = UUID.fromString(m.group(2));
+
+ byte[] data = awsContext.readObject(b.getKey());
+ entryList.add(new RecoveredEntry(position, uuid, data, name));
+ }
+
+ Collections.sort(entryList);
+
+ int i = 0;
+ for (RecoveredEntry e : entryList) {
+ if (e.position != i) {
+ log.warn("Missing entry {}.??? when recovering {}. No more segments will be read.",
+ String.format("%04X", i), archiveName);
+ break;
+ }
+ log.info("Recovering segment {}/{}", archiveName, e.fileName);
+ entries.put(e.uuid, e.data);
+ i++;
+ }
+ }
+
+ private static class RecoveredEntry implements Comparable<RecoveredEntry> {
+
+ private final byte[] data;
+
+ private final UUID uuid;
+
+ private final int position;
+
+ private final String fileName;
+
+ public RecoveredEntry(int position, UUID uuid, byte[] data, String fileName) {
+ this.data = data;
+ this.uuid = uuid;
+ this.position = position;
+ this.fileName = fileName;
+ }
+
+ @Override
+ public int compareTo(RecoveredEntry o) {
+ return Integer.compare(this.position, o.position);
+ }
+ }
+}
Added: jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/AwsBlobMetadata.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/AwsBlobMetadata.java?rev=1877731&view=auto
==============================================================================
--- jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/AwsBlobMetadata.java (added)
+++ jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/AwsBlobMetadata.java Thu May 14 11:11:12 2020
@@ -0,0 +1,64 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.segment.aws;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.UUID;
+
+public final class AwsBlobMetadata {
+
+ private static final String METADATA_TYPE = "type";
+
+ private static final String METADATA_SEGMENT_UUID = "uuid";
+
+ private static final String METADATA_SEGMENT_POSITION = "position";
+
+ private static final String METADATA_SEGMENT_GENERATION = "generation";
+
+ private static final String METADATA_SEGMENT_FULL_GENERATION = "fullGeneration";
+
+ private static final String METADATA_SEGMENT_COMPACTED = "compacted";
+
+ private static final String TYPE_SEGMENT = "segment";
+
+ public static HashMap<String, String> toSegmentMetadata(AwsSegmentArchiveEntry indexEntry) {
+ HashMap<String, String> map = new HashMap<>();
+ map.put(METADATA_TYPE, TYPE_SEGMENT);
+ map.put(METADATA_SEGMENT_UUID, new UUID(indexEntry.getMsb(), indexEntry.getLsb()).toString());
+ map.put(METADATA_SEGMENT_POSITION, String.valueOf(indexEntry.getPosition()));
+ map.put(METADATA_SEGMENT_GENERATION, String.valueOf(indexEntry.getGeneration()));
+ map.put(METADATA_SEGMENT_FULL_GENERATION, String.valueOf(indexEntry.getFullGeneration()));
+ map.put(METADATA_SEGMENT_COMPACTED, String.valueOf(indexEntry.isCompacted()));
+ return map;
+ }
+
+ public static AwsSegmentArchiveEntry toIndexEntry(Map<String, String> metadata, int length) {
+ UUID uuid = UUID.fromString(metadata.get(METADATA_SEGMENT_UUID));
+ long msb = uuid.getMostSignificantBits();
+ long lsb = uuid.getLeastSignificantBits();
+ int position = Integer.parseInt(metadata.get(METADATA_SEGMENT_POSITION));
+ int generation = Integer.parseInt(metadata.get(METADATA_SEGMENT_GENERATION));
+ int fullGeneration = Integer.parseInt(metadata.get(METADATA_SEGMENT_FULL_GENERATION));
+ boolean compacted = Boolean.parseBoolean(metadata.get(METADATA_SEGMENT_COMPACTED));
+ return new AwsSegmentArchiveEntry(msb, lsb, position, length, generation, fullGeneration, compacted);
+ }
+
+ public static boolean isSegment(Map<String, String> metadata) {
+ return metadata != null && TYPE_SEGMENT.equals(metadata.get(METADATA_TYPE));
+ }
+}
Added: jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/AwsContext.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/AwsContext.java?rev=1877731&view=auto
==============================================================================
--- jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/AwsContext.java (added)
+++ jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/AwsContext.java Thu May 14 11:11:12 2020
@@ -0,0 +1,390 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.segment.aws;
+
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+import java.util.stream.StreamSupport;
+
+import com.amazonaws.AmazonServiceException;
+import com.amazonaws.auth.AWSCredentials;
+import com.amazonaws.auth.AWSStaticCredentialsProvider;
+import com.amazonaws.auth.BasicAWSCredentials;
+import com.amazonaws.auth.BasicSessionCredentials;
+import com.amazonaws.services.dynamodbv2.AmazonDynamoDB;
+import com.amazonaws.services.dynamodbv2.AmazonDynamoDBClient;
+import com.amazonaws.services.dynamodbv2.AmazonDynamoDBClientBuilder;
+import com.amazonaws.services.dynamodbv2.AmazonDynamoDBLockClientOptions;
+import com.amazonaws.services.dynamodbv2.AmazonDynamoDBLockClientOptions.AmazonDynamoDBLockClientOptionsBuilder;
+import com.amazonaws.services.dynamodbv2.document.DynamoDB;
+import com.amazonaws.services.dynamodbv2.document.Item;
+import com.amazonaws.services.dynamodbv2.document.ItemCollection;
+import com.amazonaws.services.dynamodbv2.document.PrimaryKey;
+import com.amazonaws.services.dynamodbv2.document.QueryOutcome;
+import com.amazonaws.services.dynamodbv2.document.Table;
+import com.amazonaws.services.dynamodbv2.document.TableWriteItems;
+import com.amazonaws.services.dynamodbv2.document.spec.QuerySpec;
+import com.amazonaws.services.dynamodbv2.document.utils.ValueMap;
+import com.amazonaws.services.dynamodbv2.model.AttributeDefinition;
+import com.amazonaws.services.dynamodbv2.model.CreateTableRequest;
+import com.amazonaws.services.dynamodbv2.model.KeySchemaElement;
+import com.amazonaws.services.dynamodbv2.model.KeyType;
+import com.amazonaws.services.dynamodbv2.model.ProvisionedThroughput;
+import com.amazonaws.services.dynamodbv2.model.ScalarAttributeType;
+import com.amazonaws.services.dynamodbv2.util.TableUtils;
+import com.amazonaws.services.s3.AmazonS3;
+import com.amazonaws.services.s3.AmazonS3Client;
+import com.amazonaws.services.s3.AmazonS3ClientBuilder;
+import com.amazonaws.services.s3.model.CopyObjectRequest;
+import com.amazonaws.services.s3.model.DeleteObjectsRequest;
+import com.amazonaws.services.s3.model.DeleteObjectsRequest.KeyVersion;
+import com.amazonaws.services.s3.model.GetObjectRequest;
+import com.amazonaws.services.s3.model.ListObjectsV2Request;
+import com.amazonaws.services.s3.model.ListObjectsV2Result;
+import com.amazonaws.services.s3.model.ObjectMetadata;
+import com.amazonaws.services.s3.model.PutObjectRequest;
+import com.amazonaws.services.s3.model.S3Object;
+import com.amazonaws.services.s3.model.S3ObjectSummary;
+
+import org.apache.jackrabbit.oak.commons.Buffer;
+import org.apache.jackrabbit.oak.segment.spi.monitor.RemoteStoreMonitor;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public final class AwsContext {
+
+ private static final Logger log = LoggerFactory.getLogger(AwsContext.class);
+
+ private static final String TABLE_ATTR_TIMESTAMP = "timestamp";
+
+ private static final String TABLE_ATTR_FILENAME = "filename";
+
+ public static final String TABLE_ATTR_CONTENT = "content";
+
+ private static final int TABLE_MAX_BATCH_WRITE_SIZE = 25;
+
+ private static final String LOCKTABLE_KEY = "key";
+
+ private final AmazonS3 s3;
+ private final String bucketName;
+ private final String rootDirectory;
+
+ private final AmazonDynamoDB ddb;
+ private final Table journalTable;
+ private final String lockTableName;
+
+ private RemoteStoreMonitor remoteStoreMonitor;
+
+ private AwsContext(AmazonS3 s3, String bucketName, String rootDirectory, AmazonDynamoDB ddb,
+ String journalTableName, String lockTableName) {
+ this.s3 = s3;
+ this.bucketName = bucketName;
+ this.rootDirectory = rootDirectory.endsWith("/") ? rootDirectory : rootDirectory + "/";
+ this.ddb = ddb;
+ this.journalTable = new DynamoDB(ddb).getTable(journalTableName);
+ this.lockTableName = lockTableName;
+ }
+
+ /**
+ * Creates the context used to interact with AWS services.
+ *
+ * @param bucketName Name for the bucket that will store segments.
+ * @param rootDirectory The root directory under which the segment store is
+ * setup.
+ * @param journalTableName Name of table used for storing log entries for
+ * journal and gc. The table will be created if it
+ * doesn't already exist. It should have a partition key
+ * on "{@link #TABLE_ATTR_FILENAME}" and sort key on
+ * "{@link #TABLE_ATTR_TIMESTAMP}".
+ * @param lockTableName Name of table used for managing the distributed lock.
+ * The table will be created if it doesn't already
+ * exist. It should have a partition key on
+ * "{@link #LOCKTABLE_KEY}".
+ * @return The context.
+ * @throws IOException
+ */
+ public static AwsContext create(String bucketName, String rootDirectory, String journalTableName,
+ String lockTableName) throws IOException {
+ AmazonS3 s3 = AmazonS3Client.builder().build();
+ AmazonDynamoDB ddb = AmazonDynamoDBClient.builder().build();
+ return create(s3, bucketName, rootDirectory, ddb, journalTableName, lockTableName);
+ }
+
+ /**
+ * Creates the context used to interact with AWS services.
+ *
+ * @param configuration The configuration used to initialize the context.
+ * @return The context.
+ * @throws IOException
+ */
+ public static AwsContext create(Configuration configuration) throws IOException {
+ String region = configuration.region();
+ String rootDirectory = configuration.rootDirectory();
+ if (rootDirectory != null && rootDirectory.length() > 0 && rootDirectory.charAt(0) == '/') {
+ rootDirectory = rootDirectory.substring(1);
+ }
+
+ AWSCredentials credentials = configuration.sessionToken() == null || configuration.sessionToken().isEmpty()
+ ? new BasicAWSCredentials(configuration.accessKey(), configuration.secretKey())
+ : new BasicSessionCredentials(configuration.accessKey(), configuration.secretKey(),
+ configuration.sessionToken());
+ AWSStaticCredentialsProvider credentialsProvider = new AWSStaticCredentialsProvider(credentials);
+
+ AmazonS3 s3 = AmazonS3ClientBuilder.standard().withCredentials(credentialsProvider).withRegion(region).build();
+ AmazonDynamoDB ddb = AmazonDynamoDBClientBuilder.standard().withCredentials(credentialsProvider)
+ .withRegion(region).build();
+ return create(s3, configuration.bucketName(), rootDirectory, ddb, configuration.journalTableName(),
+ configuration.lockTableName());
+ }
+
+ /**
+ * Creates the context used to interact with AWS services.
+ *
+ * @param s3 Client for accessing Amazon S3.
+ * @param bucketName Name for the bucket that will store segments.
+ * @param rootDirectory The root directory under which the segment store is
+ * setup.
+ * @param ddb Client for accessing Amazon DynamoDB.
+ * @param journalTableName Name of table used for storing log entries for
+ * journal and gc. The table will be created if it
+ * doesn't already exist. It should have a partition key
+ * on "{@link #TABLE_ATTR_FILENAME}" and sort key on
+ * "{@link #TABLE_ATTR_TIMESTAMP}".
+ * @param lockTableName Name of table used for managing the distributed lock.
+ * The table will be created if it doesn't already
+ * exist. It should have a partition key on
+ * "{@link #LOCKTABLE_KEY}".
+ * @return The context.
+ * @throws IOException
+ */
+ public static AwsContext create(AmazonS3 s3, String bucketName, String rootDirectory, AmazonDynamoDB ddb,
+ String journalTableName, String lockTableName) throws IOException {
+ AwsContext awsContext = new AwsContext(s3, bucketName, rootDirectory, ddb, journalTableName, lockTableName);
+ try {
+ if (!s3.doesBucketExistV2(bucketName)) {
+ s3.createBucket(bucketName);
+ }
+
+ CreateTableRequest createJournalTableRequest = new CreateTableRequest().withTableName(journalTableName)
+ .withKeySchema(new KeySchemaElement(TABLE_ATTR_FILENAME, KeyType.HASH),
+ new KeySchemaElement(TABLE_ATTR_TIMESTAMP, KeyType.RANGE))
+ .withAttributeDefinitions(new AttributeDefinition(TABLE_ATTR_FILENAME, ScalarAttributeType.S),
+ new AttributeDefinition(TABLE_ATTR_TIMESTAMP, ScalarAttributeType.N))
+ .withProvisionedThroughput(new ProvisionedThroughput(1000L, 1500L));
+ TableUtils.createTableIfNotExists(ddb, createJournalTableRequest);
+
+ CreateTableRequest createLockTableRequest = new CreateTableRequest().withTableName(lockTableName)
+ .withKeySchema(new KeySchemaElement(LOCKTABLE_KEY, KeyType.HASH))
+ .withAttributeDefinitions(new AttributeDefinition(LOCKTABLE_KEY, ScalarAttributeType.S))
+ .withProvisionedThroughput(new ProvisionedThroughput(1000L, 1500L));
+ TableUtils.createTableIfNotExists(ddb, createLockTableRequest);
+ } catch (AmazonServiceException e) {
+ throw new IOException(e);
+ }
+
+ return awsContext;
+ }
+
+ public AmazonDynamoDBLockClientOptionsBuilder getLockClientOptionsBuilder() {
+ return AmazonDynamoDBLockClientOptions.builder(ddb, lockTableName).withPartitionKeyName(LOCKTABLE_KEY);
+ }
+
+ public AwsContext withDirectory(String childDirectory) {
+ return new AwsContext(s3, bucketName, rootDirectory + childDirectory, ddb, journalTable.getTableName(),
+ lockTableName);
+ }
+
+ public String getConfig() {
+ StringBuilder uri = new StringBuilder("aws:");
+ uri.append(bucketName).append(';');
+ uri.append(rootDirectory).append(';');
+ uri.append(journalTable.getTableName()).append(';');
+ uri.append(lockTableName);
+ return uri.toString();
+ }
+
+ public String getPath() {
+ return rootDirectory;
+ }
+
+ public boolean doesObjectExist(String name) {
+ try {
+ return s3.doesObjectExist(bucketName, rootDirectory + name);
+ } catch (AmazonServiceException e) {
+ log.error("Can't check if the manifest exists", e);
+ return false;
+ }
+ }
+
+ public S3Object getObject(String name) throws IOException {
+ try {
+ GetObjectRequest request = new GetObjectRequest(bucketName, rootDirectory + name);
+ return s3.getObject(request);
+ } catch (AmazonServiceException e) {
+ throw new IOException(e);
+ }
+ }
+
+ public ObjectMetadata getObjectMetadata(String key) {
+ return s3.getObjectMetadata(bucketName, key);
+ }
+
+ public Buffer readObjectToBuffer(String name, boolean offHeap) throws IOException {
+ byte[] data = readObject(rootDirectory + name);
+ Buffer buffer = offHeap ? Buffer.allocateDirect(data.length) : Buffer.allocate(data.length);
+ buffer.put(data);
+ buffer.flip();
+ return buffer;
+ }
+
+ public byte[] readObject(String key) throws IOException {
+ try (S3Object object = s3.getObject(bucketName, key)) {
+ int length = (int) object.getObjectMetadata().getContentLength();
+ byte[] data = new byte[length];
+ if (length > 0) {
+ try (InputStream stream = object.getObjectContent()) {
+ stream.read(data, 0, length);
+ }
+ }
+ return data;
+ } catch (AmazonServiceException e) {
+ throw new IOException(e);
+ }
+ }
+
+ public void writeObject(String name, byte[] data) throws IOException {
+ writeObject(name, data, new HashMap<>());
+ }
+
+ public void writeObject(String name, byte[] data, Map<String, String> userMetadata) throws IOException {
+ InputStream input = new ByteArrayInputStream(data);
+ ObjectMetadata metadata = new ObjectMetadata();
+ metadata.setUserMetadata(userMetadata);
+ PutObjectRequest request = new PutObjectRequest(bucketName, rootDirectory + name, input, metadata);
+ try {
+ s3.putObject(request);
+ } catch (AmazonServiceException e) {
+ throw new IOException(e);
+ }
+ }
+
+ public void putObject(String name, InputStream input) throws IOException {
+ try {
+ PutObjectRequest request = new PutObjectRequest(bucketName, rootDirectory + name, input,
+ new ObjectMetadata());
+ s3.putObject(request);
+ } catch (AmazonServiceException e) {
+ throw new IOException(e);
+ }
+ }
+
+ public void copyObject(AwsContext fromContext, String fromKey) throws IOException {
+ String toKey = rootDirectory + fromKey.substring(fromContext.rootDirectory.length());
+ try {
+ s3.copyObject(new CopyObjectRequest(bucketName, fromKey, bucketName, toKey));
+ } catch (AmazonServiceException e) {
+ throw new IOException(e);
+ }
+ }
+
+ public boolean deleteAllObjects() {
+ try {
+ List<KeyVersion> keys = listObjects("").stream().map(i -> new KeyVersion(i.getKey()))
+ .collect(Collectors.toList());
+ DeleteObjectsRequest request = new DeleteObjectsRequest(bucketName).withKeys(keys);
+ s3.deleteObjects(request);
+ return true;
+ } catch (AmazonServiceException | IOException e) {
+ log.error("Can't delete objects from {}", rootDirectory, e);
+ return false;
+ }
+ }
+
+ public List<String> listPrefixes() throws IOException {
+ return listObjectsInternal("").getCommonPrefixes();
+ }
+
+ public List<S3ObjectSummary> listObjects(String prefix) throws IOException {
+ return listObjectsInternal(prefix).getObjectSummaries();
+ }
+
+ private ListObjectsV2Result listObjectsInternal(String prefix) throws IOException {
+ ListObjectsV2Request request = new ListObjectsV2Request().withBucketName(bucketName)
+ .withPrefix(rootDirectory + prefix).withDelimiter("/");
+ try {
+ return s3.listObjectsV2(request);
+ } catch (AmazonServiceException e) {
+ throw new IOException(e);
+ }
+ }
+
+ public void deleteAllDocuments(String fileName) throws IOException {
+ List<PrimaryKey> primaryKeys = getDocumentsStream(fileName).map(item -> {
+ return new PrimaryKey(TABLE_ATTR_FILENAME, item.getString(TABLE_ATTR_FILENAME), TABLE_ATTR_TIMESTAMP,
+ item.getNumber(TABLE_ATTR_TIMESTAMP));
+ }).collect(Collectors.toList());
+
+ for (int i = 0; i < primaryKeys.size(); i += TABLE_MAX_BATCH_WRITE_SIZE) {
+ PrimaryKey[] currentKeys = new PrimaryKey[Math.min(TABLE_MAX_BATCH_WRITE_SIZE, primaryKeys.size() - i)];
+ for (int j = 0; j < currentKeys.length; j++) {
+ currentKeys[j] = primaryKeys.get(i + j);
+ }
+
+ new DynamoDB(ddb).batchWriteItem(
+ new TableWriteItems(journalTable.getTableName()).withPrimaryKeysToDelete(currentKeys));
+ }
+ }
+
+ public List<String> getDocumentContents(String fileName) throws IOException {
+ return getDocumentsStream(fileName).map(item -> item.getString(TABLE_ATTR_CONTENT))
+ .collect(Collectors.toList());
+ }
+
+ public Stream<Item> getDocumentsStream(String fileName) throws IOException {
+ String FILENAME_KEY = ":v_filename";
+ QuerySpec spec = new QuerySpec().withScanIndexForward(false)
+ .withKeyConditionExpression(TABLE_ATTR_FILENAME + " = " + FILENAME_KEY)
+ .withValueMap(new ValueMap().withString(FILENAME_KEY, fileName));
+ try {
+ ItemCollection<QueryOutcome> outcome = journalTable.query(spec);
+ return StreamSupport.stream(outcome.spliterator(), false);
+ } catch (AmazonServiceException e) {
+ throw new IOException(e);
+ }
+ }
+
+ public void putDocument(String fileName, String line) throws IOException {
+ Item item = new Item().with(TABLE_ATTR_TIMESTAMP, new Date().getTime()).with(TABLE_ATTR_FILENAME, fileName)
+ .with(TABLE_ATTR_CONTENT, line);
+ try {
+ try {
+ Thread.sleep(1L);
+ } catch (InterruptedException e) {
+ }
+ journalTable.putItem(item);
+ } catch (AmazonServiceException e) {
+ throw new IOException(e);
+ }
+ }
+}
Added: jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/AwsGCJournalFile.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/AwsGCJournalFile.java?rev=1877731&view=auto
==============================================================================
--- jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/AwsGCJournalFile.java (added)
+++ jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/AwsGCJournalFile.java Thu May 14 11:11:12 2020
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.segment.aws;
+
+import java.io.IOException;
+import java.util.List;
+
+import org.apache.jackrabbit.oak.segment.spi.persistence.GCJournalFile;
+
+public class AwsGCJournalFile implements GCJournalFile {
+
+ private final AwsAppendableFile file;
+
+ public AwsGCJournalFile(AwsContext awsContext, String fileName) {
+ this.file = new AwsAppendableFile(awsContext, fileName);
+ }
+
+ @Override
+ public void writeLine(String line) throws IOException {
+ file.openJournalWriter().writeLine(line);
+ }
+
+ @Override
+ public List<String> readLines() throws IOException {
+ return file.readLines();
+ }
+
+ @Override
+ public void truncate() throws IOException {
+ file.openJournalWriter().truncate();
+ }
+}
Added: jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/AwsJournalFile.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/AwsJournalFile.java?rev=1877731&view=auto
==============================================================================
--- jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/AwsJournalFile.java (added)
+++ jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/AwsJournalFile.java Thu May 14 11:11:12 2020
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.segment.aws;
+
+import java.io.IOException;
+
+import org.apache.jackrabbit.oak.segment.spi.persistence.JournalFile;
+import org.apache.jackrabbit.oak.segment.spi.persistence.JournalFileReader;
+import org.apache.jackrabbit.oak.segment.spi.persistence.JournalFileWriter;
+
+public class AwsJournalFile implements JournalFile {
+
+ private final AwsAppendableFile file;
+
+ public AwsJournalFile(AwsContext awsContext, String fileName) {
+ this.file = new AwsAppendableFile(awsContext, fileName);
+ }
+
+ @Override
+ public JournalFileReader openJournalReader() throws IOException {
+ return file.openJournalReader();
+ }
+
+ @Override
+ public JournalFileWriter openJournalWriter() throws IOException {
+ return file.openJournalWriter();
+ }
+
+ @Override
+ public String getName() {
+ return file.getName();
+ }
+
+ @Override
+ public boolean exists() {
+ return file.exists();
+ }
+}
\ No newline at end of file
Added: jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/AwsManifestFile.java
URL: http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/AwsManifestFile.java?rev=1877731&view=auto
==============================================================================
--- jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/AwsManifestFile.java (added)
+++ jackrabbit/oak/trunk/oak-segment-aws/src/main/java/org/apache/jackrabbit/oak/segment/aws/AwsManifestFile.java Thu May 14 11:11:12 2020
@@ -0,0 +1,66 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.segment.aws;
+
+import java.io.IOException;
+import java.io.PipedInputStream;
+import java.io.PipedOutputStream;
+import java.util.Properties;
+
+import com.amazonaws.AmazonServiceException;
+import com.amazonaws.services.s3.model.S3Object;
+
+import org.apache.jackrabbit.oak.segment.spi.persistence.ManifestFile;
+
+public class AwsManifestFile implements ManifestFile {
+
+ private final AwsContext awsContext;
+ private final String manifestFile;
+
+ public AwsManifestFile(AwsContext awsContext, String manifestFile) throws IOException {
+ this.awsContext = awsContext;
+ this.manifestFile = manifestFile;
+ }
+
+ @Override
+ public boolean exists() {
+ return awsContext.doesObjectExist(manifestFile);
+ }
+
+ @Override
+ public Properties load() throws IOException {
+ Properties properties = new Properties();
+ if (this.exists()) {
+ try (S3Object object = awsContext.getObject(manifestFile)) {
+ properties.load(object.getObjectContent());
+ } catch (AmazonServiceException e) {
+ throw new IOException(e);
+ }
+ }
+ return properties;
+ }
+
+ @Override
+ public void save(Properties properties) throws IOException {
+ try (PipedInputStream input = new PipedInputStream()) {
+ try (PipedOutputStream src = new PipedOutputStream(input)) {
+ properties.store(src, null);
+ }
+ awsContext.putObject(manifestFile, input);
+ }
+ }
+}