You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airavata.apache.org by di...@apache.org on 2022/03/14 17:23:29 UTC
[airavata-mft] branch master updated: Fixing chunked transfer logic bugs and transfer client implementation
This is an automated email from the ASF dual-hosted git repository.
dimuthuupe pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/airavata-mft.git
The following commit(s) were added to refs/heads/master by this push:
new d8b196d Fixing chunked transfer logic bugs and transfer client implementation
d8b196d is described below
commit d8b196dee85055017ea83ae47463da4f6daf1085
Author: Dimuthu Wannipurage <di...@gmail.com>
AuthorDate: Mon Mar 14 13:23:03 2022 -0400
Fixing chunked transfer logic bugs and transfer client implementation
---
.../airavata/mft/agent/TransportMediator.java | 2 +
.../airavata/mft/command/line/MainRunner.java | 3 +-
.../mft/command/line/sub/s3/S3SubCommand.java | 4 +-
.../sub/s3/{ => secret}/S3SecretSubCommand.java | 2 +-
.../S3StorageAddSubCommand.java} | 66 ++++-----
.../line/sub/s3/storage/S3StorageSubCommand.java | 34 +++++
.../sub/transfer/SubmitTransferSubCommand.java | 77 +++++++++++
.../line/sub/transfer/TransferSubCommand.java | 8 ++
.../airavata/mft/core/ConnectorResolver.java | 6 +-
.../server/backend/sql/SQLResourceBackend.java | 147 +++++++++++----------
.../src/main/resources/applicationContext.xml | 3 +-
.../mft/transport/s3/S3IncomingConnector.java | 4 +-
.../mft/transport/s3/S3MetadataCollector.java | 8 +-
.../mft/transport/s3/S3OutgoingConnector.java | 8 +-
14 files changed, 256 insertions(+), 116 deletions(-)
diff --git a/agent/src/main/java/org/apache/airavata/mft/agent/TransportMediator.java b/agent/src/main/java/org/apache/airavata/mft/agent/TransportMediator.java
index b891b05..b51c9b1 100644
--- a/agent/src/main/java/org/apache/airavata/mft/agent/TransportMediator.java
+++ b/agent/src/main/java/org/apache/airavata/mft/agent/TransportMediator.java
@@ -126,6 +126,8 @@ public class TransportMediator {
Future<Integer> future = completionService.take();
}
+ inConnector.complete();
+ outConnector.complete();
logger.info("Completed chunked transfer for transfer {}", transferId);
} else if (inStreamingConnectorOp.isPresent() && outStreamingConnectorOp.isPresent()) {
diff --git a/command-line/src/main/java/org/apache/airavata/mft/command/line/MainRunner.java b/command-line/src/main/java/org/apache/airavata/mft/command/line/MainRunner.java
index 8855dc2..e6bda28 100644
--- a/command-line/src/main/java/org/apache/airavata/mft/command/line/MainRunner.java
+++ b/command-line/src/main/java/org/apache/airavata/mft/command/line/MainRunner.java
@@ -1,12 +1,13 @@
package org.apache.airavata.mft.command.line;
import org.apache.airavata.mft.command.line.sub.s3.S3SubCommand;
+import org.apache.airavata.mft.command.line.sub.transfer.TransferSubCommand;
import picocli.CommandLine;
import picocli.CommandLine.Command;
@Command(name = "checksum", mixinStandardHelpOptions = true, version = "checksum 4.0",
description = "Prints the checksum (SHA-256 by default) of a file to STDOUT.",
- subcommands = {S3SubCommand.class})
+ subcommands = {S3SubCommand.class, TransferSubCommand.class})
class MainRunner {
public static void main(String... args) {
diff --git a/command-line/src/main/java/org/apache/airavata/mft/command/line/sub/s3/S3SubCommand.java b/command-line/src/main/java/org/apache/airavata/mft/command/line/sub/s3/S3SubCommand.java
index abb0976..d774c30 100644
--- a/command-line/src/main/java/org/apache/airavata/mft/command/line/sub/s3/S3SubCommand.java
+++ b/command-line/src/main/java/org/apache/airavata/mft/command/line/sub/s3/S3SubCommand.java
@@ -1,9 +1,11 @@
package org.apache.airavata.mft.command.line.sub.s3;
+import org.apache.airavata.mft.command.line.sub.s3.secret.S3SecretSubCommand;
+import org.apache.airavata.mft.command.line.sub.s3.storage.S3StorageSubCommand;
import picocli.CommandLine;
@CommandLine.Command(name = "s3", description = "Manage S3 resources and credentials",
- subcommands = {S3ResourceSubCommand.class, S3SecretSubCommand.class})
+ subcommands = {S3StorageSubCommand.class, S3SecretSubCommand.class})
public class S3SubCommand {
diff --git a/command-line/src/main/java/org/apache/airavata/mft/command/line/sub/s3/S3SecretSubCommand.java b/command-line/src/main/java/org/apache/airavata/mft/command/line/sub/s3/secret/S3SecretSubCommand.java
similarity index 92%
rename from command-line/src/main/java/org/apache/airavata/mft/command/line/sub/s3/S3SecretSubCommand.java
rename to command-line/src/main/java/org/apache/airavata/mft/command/line/sub/s3/secret/S3SecretSubCommand.java
index 57add4f..175ff76 100644
--- a/command-line/src/main/java/org/apache/airavata/mft/command/line/sub/s3/S3SecretSubCommand.java
+++ b/command-line/src/main/java/org/apache/airavata/mft/command/line/sub/s3/secret/S3SecretSubCommand.java
@@ -1,4 +1,4 @@
-package org.apache.airavata.mft.command.line.sub.s3;
+package org.apache.airavata.mft.command.line.sub.s3.secret;
import picocli.CommandLine;
diff --git a/command-line/src/main/java/org/apache/airavata/mft/command/line/sub/s3/S3ResourceSubCommand.java b/command-line/src/main/java/org/apache/airavata/mft/command/line/sub/s3/storage/S3StorageAddSubCommand.java
similarity index 54%
rename from command-line/src/main/java/org/apache/airavata/mft/command/line/sub/s3/S3ResourceSubCommand.java
rename to command-line/src/main/java/org/apache/airavata/mft/command/line/sub/s3/storage/S3StorageAddSubCommand.java
index 85b17dc..1bc1815 100644
--- a/command-line/src/main/java/org/apache/airavata/mft/command/line/sub/s3/S3ResourceSubCommand.java
+++ b/command-line/src/main/java/org/apache/airavata/mft/command/line/sub/s3/storage/S3StorageAddSubCommand.java
@@ -1,4 +1,4 @@
-package org.apache.airavata.mft.command.line.sub.s3;
+package org.apache.airavata.mft.command.line.sub.s3.storage;
import org.apache.airavata.mft.api.client.MFTApiClient;
import org.apache.airavata.mft.common.AuthToken;
@@ -7,29 +7,39 @@ import org.apache.airavata.mft.credential.stubs.s3.S3SecretCreateRequest;
import org.apache.airavata.mft.resource.service.s3.S3StorageServiceGrpc;
import org.apache.airavata.mft.resource.stubs.s3.storage.S3Storage;
import org.apache.airavata.mft.resource.stubs.s3.storage.S3StorageCreateRequest;
-import org.apache.airavata.mft.resource.stubs.s3.storage.S3StorageListRequest;
-import org.apache.airavata.mft.resource.stubs.s3.storage.S3StorageListResponse;
import org.apache.airavata.mft.storage.stubs.storagesecret.StorageSecret;
import org.apache.airavata.mft.storage.stubs.storagesecret.StorageSecretCreateRequest;
import org.apache.airavata.mft.storage.stubs.storagesecret.StorageSecretServiceGrpc;
import picocli.CommandLine;
-@CommandLine.Command(name = "remote")
-public class S3ResourceSubCommand {
+import java.util.concurrent.Callable;
- @CommandLine.Command(name = "add")
- void addS3Resource() {
+@CommandLine.Command(name = "add")
+public class S3StorageAddSubCommand implements Callable<Integer> {
- AuthToken authToken = AuthToken.newBuilder().build();
+ @CommandLine.Option(names = {"-n", "--name"}, description = "Storage Name")
+ private String remoteName;
+
+ @CommandLine.Option(names = {"-b", "--bucker"}, description = "Bucket Name")
+ private String bucket;
+
+ @CommandLine.Option(names = {"-r", "--region"}, description = "Region")
+ private String region;
+
+ @CommandLine.Option(names = {"-e", "--endpoint"}, description = "S3 API Endpoint")
+ private String endpoint;
+
+ @CommandLine.Option(names = {"-k", "--key"}, description = "Access Key")
+ private String accessKey;
- String remoteName = System.console().readLine("Remote Name: ");
- String bucket = System.console().readLine("Bucket: ");
- String region = System.console().readLine("Region: ");
- String endpoint = System.console().readLine("S3 Endpoint: ");
- String useTLS = System.console().readLine("Use TLS [Y/n]: ");
+ @CommandLine.Option(names = {"-s", "--secret"}, description = "Access Secret")
+ private String accessSecret;
+
+ @Override
+ public Integer call() throws Exception {
+
+ AuthToken authToken = AuthToken.newBuilder().build();
- String accessKey = System.console().readLine("Access Key: ");
- String accessSecret = System.console().readLine("Access Secret: ");
System.out.println("Adding S3 Secret");
MFTApiClient mftApiClient = MFTApiClient.MFTApiClientBuilder.newBuilder().build();
@@ -47,7 +57,6 @@ public class S3ResourceSubCommand {
.setName(remoteName)
.setEndpoint(endpoint)
.setBucketName(bucket)
- .setUseTLS("Y".equals(useTLS))
.setRegion(region).build());
@@ -58,30 +67,11 @@ public class S3ResourceSubCommand {
.setSecretId(s3Secret.getSecretId())
.setType(StorageSecret.StorageType.S3).build());
- System.out.println("Created the storage secret " + storageSecret.getId());
- }
-
- @CommandLine.Command(name = "delete")
- void deleteS3Resource(@CommandLine.Parameters(index = "0") String resourceId) {
- System.out.println("Deleting S3 Resource " + resourceId);
- }
-
- @CommandLine.Command(name = "list")
- void listS3Resource() {
- System.out.println("Listing S3 Resource");
- MFTApiClient mftApiClient = MFTApiClient.MFTApiClientBuilder.newBuilder().build();
-
- S3StorageListResponse s3StorageListResponse = mftApiClient.getStorageServiceClient().s3()
- .listS3Storage(S3StorageListRequest.newBuilder().setOffset(0).setLimit(10).build());
- s3StorageListResponse.getStoragesList().forEach(s -> {
- System.out.println("Storage Id : " + s.getStorageId() + " Name : " + s.getName()+ " Bucket : " + s.getBucketName());
- });
- }
+ System.out.println("Created the storage secret " + storageSecret.getId());
- @CommandLine.Command(name = "get")
- void getS3Resource(@CommandLine.Parameters(index = "0") String resourceId) {
- System.out.println("Getting S3 Resource " + resourceId);
+ System.out.println("Added storage " + s3Storage.getStorageId());
+ return 0;
}
}
diff --git a/command-line/src/main/java/org/apache/airavata/mft/command/line/sub/s3/storage/S3StorageSubCommand.java b/command-line/src/main/java/org/apache/airavata/mft/command/line/sub/s3/storage/S3StorageSubCommand.java
new file mode 100644
index 0000000..87a7e2d
--- /dev/null
+++ b/command-line/src/main/java/org/apache/airavata/mft/command/line/sub/s3/storage/S3StorageSubCommand.java
@@ -0,0 +1,34 @@
+package org.apache.airavata.mft.command.line.sub.s3.storage;
+
+import org.apache.airavata.mft.api.client.MFTApiClient;
+import org.apache.airavata.mft.command.line.sub.s3.storage.S3StorageAddSubCommand;
+import org.apache.airavata.mft.resource.stubs.s3.storage.S3StorageListRequest;
+import org.apache.airavata.mft.resource.stubs.s3.storage.S3StorageListResponse;
+import picocli.CommandLine;
+
+@CommandLine.Command(name = "remote", subcommands = {S3StorageAddSubCommand.class})
+public class S3StorageSubCommand {
+
+ @CommandLine.Command(name = "delete")
+ void deleteS3Resource(@CommandLine.Parameters(index = "0") String resourceId) {
+ System.out.println("Deleting S3 Resource " + resourceId);
+ }
+
+ @CommandLine.Command(name = "list")
+ void listS3Resource() {
+ System.out.println("Listing S3 Resource");
+ MFTApiClient mftApiClient = MFTApiClient.MFTApiClientBuilder.newBuilder().build();
+
+ S3StorageListResponse s3StorageListResponse = mftApiClient.getStorageServiceClient().s3()
+ .listS3Storage(S3StorageListRequest.newBuilder().setOffset(0).setLimit(10).build());
+
+ s3StorageListResponse.getStoragesList().forEach(s -> {
+ System.out.println("Storage Id : " + s.getStorageId() + " Name : " + s.getName()+ " Bucket : " + s.getBucketName());
+ });
+ }
+
+ @CommandLine.Command(name = "get")
+ void getS3Resource(@CommandLine.Parameters(index = "0") String resourceId) {
+ System.out.println("Getting S3 Resource " + resourceId);
+ }
+}
diff --git a/command-line/src/main/java/org/apache/airavata/mft/command/line/sub/transfer/SubmitTransferSubCommand.java b/command-line/src/main/java/org/apache/airavata/mft/command/line/sub/transfer/SubmitTransferSubCommand.java
new file mode 100644
index 0000000..db0cf54
--- /dev/null
+++ b/command-line/src/main/java/org/apache/airavata/mft/command/line/sub/transfer/SubmitTransferSubCommand.java
@@ -0,0 +1,77 @@
+package org.apache.airavata.mft.command.line.sub.transfer;
+
+import org.apache.airavata.mft.api.client.MFTApiClient;
+import org.apache.airavata.mft.api.service.TransferApiRequest;
+import org.apache.airavata.mft.api.service.TransferApiResponse;
+import org.apache.airavata.mft.common.AuthToken;
+import org.apache.airavata.mft.resource.stubs.common.FileResource;
+import org.apache.airavata.mft.resource.stubs.common.GenericResource;
+import org.apache.airavata.mft.resource.stubs.common.GenericResourceCreateRequest;
+import org.apache.airavata.mft.storage.stubs.storagesecret.StorageSecretSearchRequest;
+import org.apache.airavata.mft.storage.stubs.storagesecret.StorageSecretSearchResponse;
+import picocli.CommandLine;
+
+import java.util.concurrent.Callable;
+
+@CommandLine.Command(name = "submit", description = "Submit a data transfer job")
+public class SubmitTransferSubCommand implements Callable<Integer> {
+
+ @CommandLine.Option(names = {"-st", "--source-type"}, description = "S3, SCP, LOCAL, FTP ...")
+ private String sourceType;
+
+ @CommandLine.Option(names = {"-dt", "--destination-type"}, description = "S3, SCP, LOCAL, FTP ...")
+ private String destinationType;
+
+ @CommandLine.Option(names = {"-s", "--source"}, description = "Source Storage Id")
+ private String sourceStorageId;
+
+ @CommandLine.Option(names = {"-d", "--destination"}, description = "Destination Storage Id")
+ private String destinationStorageId;
+
+ @CommandLine.Option(names = {"-sp", "--source-path"}, description = "Source Path")
+ private String sourcePath;
+
+ @CommandLine.Option(names = {"-dp", "--destination-path"}, description = "Destination Path")
+ private String destinationPath;
+
+
+ @Override
+ public Integer call() throws Exception {
+ System.out.println("Transferring data from " + sourceStorageId + " to " + destinationStorageId);
+ MFTApiClient mftApiClient = MFTApiClient.MFTApiClientBuilder.newBuilder().build();
+
+ AuthToken token = AuthToken.newBuilder().build();
+
+ StorageSecretSearchResponse sourceSecret = mftApiClient.getStorageServiceClient().storageSecret()
+ .searchStorageSecret(StorageSecretSearchRequest.newBuilder()
+ .setAuthzToken(token).setStorageId(sourceStorageId).build());
+ System.out.println(sourceSecret);
+
+ StorageSecretSearchResponse destSecret = mftApiClient.getStorageServiceClient().storageSecret()
+ .searchStorageSecret(StorageSecretSearchRequest.newBuilder()
+ .setAuthzToken(token).setStorageId(destinationStorageId).build());
+
+ GenericResource sourceResource = mftApiClient.getResourceClient().createGenericResource(GenericResourceCreateRequest.newBuilder()
+ .setAuthzToken(token)
+ .setFile(FileResource.newBuilder().setResourcePath(sourcePath).build())
+ .setStorageId(sourceStorageId)
+ .setStorageType(GenericResourceCreateRequest.StorageType.valueOf(sourceType)).build());
+
+ GenericResource destResource = mftApiClient.getResourceClient().createGenericResource(GenericResourceCreateRequest.newBuilder()
+ .setAuthzToken(token)
+ .setFile(FileResource.newBuilder().setResourcePath(destinationPath).build())
+ .setStorageId(destinationStorageId)
+ .setStorageType(GenericResourceCreateRequest.StorageType.valueOf(destinationType)).build());
+
+ TransferApiResponse transferResp = mftApiClient.getTransferClient().submitTransfer(TransferApiRequest.newBuilder()
+ .setSourceToken(sourceSecret.getStorageSecret().getSecretId())
+ .setDestinationToken(destSecret.getStorageSecret().getSecretId())
+ .setDestinationResourceId(destResource.getResourceId())
+ .setSourceResourceId(sourceResource.getResourceId())
+ .setSourceType(sourceType)
+ .setDestinationType(destinationType).build());
+
+ System.out.println("Submitted Transfer " + transferResp.getTransferId());
+ return 0;
+ }
+}
diff --git a/command-line/src/main/java/org/apache/airavata/mft/command/line/sub/transfer/TransferSubCommand.java b/command-line/src/main/java/org/apache/airavata/mft/command/line/sub/transfer/TransferSubCommand.java
new file mode 100644
index 0000000..e3b0a4f
--- /dev/null
+++ b/command-line/src/main/java/org/apache/airavata/mft/command/line/sub/transfer/TransferSubCommand.java
@@ -0,0 +1,8 @@
+package org.apache.airavata.mft.command.line.sub.transfer;
+
+import picocli.CommandLine;
+
+@CommandLine.Command(name = "transfer", description = "Data transfer operations",
+ subcommands = {SubmitTransferSubCommand.class})
+public class TransferSubCommand {
+}
diff --git a/core/src/main/java/org/apache/airavata/mft/core/ConnectorResolver.java b/core/src/main/java/org/apache/airavata/mft/core/ConnectorResolver.java
index 7a76bcd..f061a31 100644
--- a/core/src/main/java/org/apache/airavata/mft/core/ConnectorResolver.java
+++ b/core/src/main/java/org/apache/airavata/mft/core/ConnectorResolver.java
@@ -33,6 +33,9 @@ public final class ConnectorResolver {
case "SCP":
className = "org.apache.airavata.mft.transport.scp.SCPIncomingConnector";
break;
+ case "S3":
+ className = "org.apache.airavata.mft.transport.s3.S3IncomingConnector";
+ break;
}
if (className != null) {
@@ -50,9 +53,6 @@ public final class ConnectorResolver {
case "SCP":
className = "org.apache.airavata.mft.transport.scp.SCPOutgoingConnector";
break;
- case "S3":
- className = "org.apache.airavata.mft.transport.s3.S3IncomingConnector";
- break;
}
if (className != null) {
diff --git a/services/resource-service/server/src/main/java/org/apache/airavata/mft/resource/server/backend/sql/SQLResourceBackend.java b/services/resource-service/server/src/main/java/org/apache/airavata/mft/resource/server/backend/sql/SQLResourceBackend.java
index a3d3cd8..133c235 100644
--- a/services/resource-service/server/src/main/java/org/apache/airavata/mft/resource/server/backend/sql/SQLResourceBackend.java
+++ b/services/resource-service/server/src/main/java/org/apache/airavata/mft/resource/server/backend/sql/SQLResourceBackend.java
@@ -74,75 +74,79 @@ public class SQLResourceBackend implements ResourceBackend {
logger.info("Destroying database resource backend");
}
+
+ private GenericResource convertGenericResourceEntity(GenericResourceEntity resourceEty) throws Exception {
+ GenericResource.Builder builder = GenericResource.newBuilder();
+ builder.setResourceId(resourceEty.getResourceId());
+ switch (resourceEty.getResourceType()){
+ case DIRECTORY:
+ builder.setDirectory(DirectoryResource.newBuilder().setResourcePath(resourceEty.getResourcePath()).build());
+ break;
+ case FILE:
+ builder.setFile(FileResource.newBuilder().setResourcePath(resourceEty.getResourcePath()).build());
+ break;
+ }
+
+ switch (resourceEty.getStorageType()) {
+ case S3:
+ Optional<S3Storage> s3Storage = getS3Storage(S3StorageGetRequest.newBuilder()
+ .setStorageId(resourceEty.getStorageId()).build());
+ builder.setS3Storage(s3Storage.orElseThrow(() -> new Exception("Could not find a S3 storage with id "
+ + resourceEty.getStorageId() + " for resource " + resourceEty.getResourceId())));
+ break;
+ case SCP:
+ Optional<SCPStorage> scpStorage = getSCPStorage(SCPStorageGetRequest.newBuilder()
+ .setStorageId(resourceEty.getStorageId()).build());
+ builder.setScpStorage(scpStorage.orElseThrow(() -> new Exception("Could not find a SCP storage with id "
+ + resourceEty.getStorageId() + " for resource " + resourceEty.getResourceId())));
+ break;
+ case LOCAL:
+ Optional<LocalStorage> localStorage = getLocalStorage(LocalStorageGetRequest.newBuilder()
+ .setStorageId(resourceEty.getStorageId()).build());
+ builder.setLocalStorage(localStorage.orElseThrow(() -> new Exception("Could not find a Local storage with id "
+ + resourceEty.getStorageId() + " for resource " + resourceEty.getResourceId())));
+ break;
+ case FTP:
+ Optional<FTPStorage> ftpStorage = getFTPStorage(FTPStorageGetRequest.newBuilder()
+ .setStorageId(resourceEty.getStorageId()).build());
+ builder.setFtpStorage(ftpStorage.orElseThrow(() -> new Exception("Could not find a FTP storage with id "
+ + resourceEty.getStorageId() + " for resource " + resourceEty.getResourceId())));
+ break;
+ case BOX:
+ Optional<BoxStorage> boxStorage = getBoxStorage(BoxStorageGetRequest.newBuilder()
+ .setStorageId(resourceEty.getStorageId()).build());
+ builder.setBoxStorage(boxStorage.orElseThrow(() -> new Exception("Could not find a Box storage with id "
+ + resourceEty.getStorageId() + " for resource " + resourceEty.getResourceId())));
+ break;
+ case DROPBOX:
+ Optional<DropboxStorage> dropBoxStorage = getDropboxStorage(DropboxStorageGetRequest.newBuilder()
+ .setStorageId(resourceEty.getStorageId()).build());
+ builder.setDropboxStorage(dropBoxStorage.orElseThrow(() -> new Exception("Could not find a Dropbox storage with id "
+ + resourceEty.getStorageId() + " for resource " + resourceEty.getResourceId())));
+ break;
+ case GCS:
+ Optional<GCSStorage> gcsStorage = getGCSStorage(GCSStorageGetRequest.newBuilder()
+ .setStorageId(resourceEty.getStorageId()).build());
+ builder.setGcsStorage(gcsStorage.orElseThrow(() -> new Exception("Could not find a GCS storage with id "
+ + resourceEty.getStorageId() + " for resource " + resourceEty.getResourceId())));
+ break;
+ case AZURE:
+ Optional<AzureStorage> azureStorage = getAzureStorage(AzureStorageGetRequest.newBuilder()
+ .setStorageId(resourceEty.getStorageId()).build());
+ builder.setAzureStorage(azureStorage.orElseThrow(() -> new Exception("Could not find a Azure storage with id "
+ + resourceEty.getStorageId() + " for resource " + resourceEty.getResourceId())));
+ break;
+ }
+
+ return builder.build();
+ }
@Override
public Optional<GenericResource> getGenericResource(GenericResourceGetRequest request) throws Exception {
Optional<GenericResourceEntity> resourceEtyOp = resourceRepository.findByResourceId(request.getResourceId());
if (resourceEtyOp.isPresent()) {
GenericResourceEntity resourceEty = resourceEtyOp.get();
- GenericResource.Builder builder = GenericResource.newBuilder();
- builder.setResourceId(resourceEty.getResourceId());
- switch (resourceEty.getResourceType()){
- case DIRECTORY:
- builder.setDirectory(DirectoryResource.newBuilder().setResourcePath(resourceEty.getResourcePath()).build());
- break;
- case FILE:
- builder.setFile(FileResource.newBuilder().setResourcePath(resourceEty.getResourcePath()).build());
- break;
- }
-
- switch (resourceEty.getStorageType()) {
- case S3:
- Optional<S3Storage> s3Storage = getS3Storage(S3StorageGetRequest.newBuilder()
- .setStorageId(resourceEty.getStorageId()).build());
- builder.setS3Storage(s3Storage.orElseThrow(() -> new Exception("Could not find a S3 storage with id "
- + resourceEty.getStorageId() + " for resource " + resourceEty.getResourceId())));
- break;
- case SCP:
- Optional<SCPStorage> scpStorage = getSCPStorage(SCPStorageGetRequest.newBuilder()
- .setStorageId(resourceEty.getStorageId()).build());
- builder.setScpStorage(scpStorage.orElseThrow(() -> new Exception("Could not find a SCP storage with id "
- + resourceEty.getStorageId() + " for resource " + resourceEty.getResourceId())));
- break;
- case LOCAL:
- Optional<LocalStorage> localStorage = getLocalStorage(LocalStorageGetRequest.newBuilder()
- .setStorageId(resourceEty.getStorageId()).build());
- builder.setLocalStorage(localStorage.orElseThrow(() -> new Exception("Could not find a Local storage with id "
- + resourceEty.getStorageId() + " for resource " + resourceEty.getResourceId())));
- break;
- case FTP:
- Optional<FTPStorage> ftpStorage = getFTPStorage(FTPStorageGetRequest.newBuilder()
- .setStorageId(resourceEty.getStorageId()).build());
- builder.setFtpStorage(ftpStorage.orElseThrow(() -> new Exception("Could not find a FTP storage with id "
- + resourceEty.getStorageId() + " for resource " + resourceEty.getResourceId())));
- break;
- case BOX:
- Optional<BoxStorage> boxStorage = getBoxStorage(BoxStorageGetRequest.newBuilder()
- .setStorageId(resourceEty.getStorageId()).build());
- builder.setBoxStorage(boxStorage.orElseThrow(() -> new Exception("Could not find a Box storage with id "
- + resourceEty.getStorageId() + " for resource " + resourceEty.getResourceId())));
- break;
- case DROPBOX:
- Optional<DropboxStorage> dropBoxStorage = getDropboxStorage(DropboxStorageGetRequest.newBuilder()
- .setStorageId(resourceEty.getStorageId()).build());
- builder.setDropboxStorage(dropBoxStorage.orElseThrow(() -> new Exception("Could not find a Dropbox storage with id "
- + resourceEty.getStorageId() + " for resource " + resourceEty.getResourceId())));
- break;
- case GCS:
- Optional<GCSStorage> gcsStorage = getGCSStorage(GCSStorageGetRequest.newBuilder()
- .setStorageId(resourceEty.getStorageId()).build());
- builder.setGcsStorage(gcsStorage.orElseThrow(() -> new Exception("Could not find a GCS storage with id "
- + resourceEty.getStorageId() + " for resource " + resourceEty.getResourceId())));
- break;
- case AZURE:
- Optional<AzureStorage> azureStorage = getAzureStorage(AzureStorageGetRequest.newBuilder()
- .setStorageId(resourceEty.getStorageId()).build());
- builder.setAzureStorage(azureStorage.orElseThrow(() -> new Exception("Could not find a Azure storage with id "
- + resourceEty.getStorageId() + " for resource " + resourceEty.getResourceId())));
- break;
- }
-
- return Optional.of(builder.build());
+ return Optional.of(convertGenericResourceEntity(resourceEty));
} else {
return Optional.empty();
}
@@ -157,13 +161,18 @@ public class SQLResourceBackend implements ResourceBackend {
switch (request.getResourceCase()) {
case FILE:
entity.setResourcePath(request.getFile().getResourcePath());
+ entity.setResourceType(GenericResourceEntity.ResourceType.FILE);
break;
case DIRECTORY:
entity.setResourcePath(request.getDirectory().getResourcePath());
+ entity.setResourceType(GenericResourceEntity.ResourceType.DIRECTORY);
break;
}
- throw new UnsupportedOperationException("Operation is not supported in backend");
+ entity.setStorageType(GenericResourceEntity.StorageType.valueOf(request.getStorageType().name()));
+
+ GenericResourceEntity saved = resourceRepository.save(entity);
+ return convertGenericResourceEntity(saved);
}
@Override
@@ -197,13 +206,17 @@ public class SQLResourceBackend implements ResourceBackend {
@Override
public boolean deleteStorageSecret(StorageSecretDeleteRequest request) throws Exception {
resourceSecretRepository.deleteById(request.getId());
- return false;
+ return true;
}
@Override
public Optional<StorageSecret> searchStorageSecret(StorageSecretSearchRequest request) throws Exception {
- //resourceSecretRepository.findByStorageId();
- return Optional.empty();
+ Optional<StorageSecretEntity> ety = resourceSecretRepository.findByStorageId(request.getStorageId());
+ if (ety.isPresent()) {
+ return Optional.of(mapper.map(ety.get(), StorageSecret.newBuilder().getClass()).build());
+ } else {
+ return Optional.empty();
+ }
}
@Override
diff --git a/services/resource-service/server/src/main/resources/applicationContext.xml b/services/resource-service/server/src/main/resources/applicationContext.xml
index ea845a3..3ea3c25 100644
--- a/services/resource-service/server/src/main/resources/applicationContext.xml
+++ b/services/resource-service/server/src/main/resources/applicationContext.xml
@@ -6,7 +6,6 @@
http://www.springframework.org/schema/context
http://www.springframework.org/schema/context/spring-context.xsd">
- <bean id="resourceBackend" class="org.apache.airavata.mft.resource.server.backend.file.FileBasedResourceBackend"
+ <bean id="resourceBackend" class="org.apache.airavata.mft.resource.server.backend.sql.SQLResourceBackend"
init-method="init" destroy-method="destroy"></bean>
-
</beans>
\ No newline at end of file
diff --git a/transport/s3-transport/src/main/java/org/apache/airavata/mft/transport/s3/S3IncomingConnector.java b/transport/s3-transport/src/main/java/org/apache/airavata/mft/transport/s3/S3IncomingConnector.java
index 03b745b..be2c9ee 100644
--- a/transport/s3-transport/src/main/java/org/apache/airavata/mft/transport/s3/S3IncomingConnector.java
+++ b/transport/s3-transport/src/main/java/org/apache/airavata/mft/transport/s3/S3IncomingConnector.java
@@ -2,6 +2,7 @@ package org.apache.airavata.mft.transport.s3;
import com.amazonaws.auth.AWSStaticCredentialsProvider;
import com.amazonaws.auth.BasicAWSCredentials;
+import com.amazonaws.client.builder.AwsClientBuilder;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.AmazonS3ClientBuilder;
import com.amazonaws.services.s3.model.GetObjectRequest;
@@ -61,8 +62,9 @@ public class S3IncomingConnector implements IncomingChunkedConnector, IncomingSt
BasicAWSCredentials awsCreds = new BasicAWSCredentials(s3Secret.getAccessKey(), s3Secret.getSecretKey());
s3Client = AmazonS3ClientBuilder.standard()
+ .withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(
+ s3Storage.getEndpoint(), s3Storage.getRegion()))
.withCredentials(new AWSStaticCredentialsProvider(awsCreds))
- .withRegion(s3Storage.getRegion())
.build();
}
}
diff --git a/transport/s3-transport/src/main/java/org/apache/airavata/mft/transport/s3/S3MetadataCollector.java b/transport/s3-transport/src/main/java/org/apache/airavata/mft/transport/s3/S3MetadataCollector.java
index 5c85195..d512a07 100644
--- a/transport/s3-transport/src/main/java/org/apache/airavata/mft/transport/s3/S3MetadataCollector.java
+++ b/transport/s3-transport/src/main/java/org/apache/airavata/mft/transport/s3/S3MetadataCollector.java
@@ -19,6 +19,7 @@ package org.apache.airavata.mft.transport.s3;
import com.amazonaws.auth.AWSStaticCredentialsProvider;
import com.amazonaws.auth.BasicAWSCredentials;
+import com.amazonaws.client.builder.AwsClientBuilder;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.AmazonS3ClientBuilder;
import com.amazonaws.services.s3.model.ObjectMetadata;
@@ -75,8 +76,10 @@ public class S3MetadataCollector implements MetadataCollector {
BasicAWSCredentials awsCreds = new BasicAWSCredentials(s3Secret.getAccessKey(), s3Secret.getSecretKey());
AmazonS3 s3Client = AmazonS3ClientBuilder.standard()
+ .withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(
+ s3Resource.getS3Storage().getEndpoint(),
+ s3Resource.getS3Storage().getRegion()))
.withCredentials(new AWSStaticCredentialsProvider(awsCreds))
- .withRegion(s3Resource.getS3Storage().getRegion())
.build();
FileResourceMetadata metadata = new FileResourceMetadata();
@@ -135,6 +138,9 @@ public class S3MetadataCollector implements MetadataCollector {
BasicAWSCredentials awsCreds = new BasicAWSCredentials(s3Secret.getAccessKey(), s3Secret.getSecretKey());
AmazonS3 s3Client = AmazonS3ClientBuilder.standard()
+ .withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(
+ s3Resource.getS3Storage().getEndpoint(),
+ s3Resource.getS3Storage().getRegion()))
.withCredentials(new AWSStaticCredentialsProvider(awsCreds))
.withRegion(s3Resource.getS3Storage().getRegion())
.build();
diff --git a/transport/s3-transport/src/main/java/org/apache/airavata/mft/transport/s3/S3OutgoingConnector.java b/transport/s3-transport/src/main/java/org/apache/airavata/mft/transport/s3/S3OutgoingConnector.java
index 3e95383..c7be388 100644
--- a/transport/s3-transport/src/main/java/org/apache/airavata/mft/transport/s3/S3OutgoingConnector.java
+++ b/transport/s3-transport/src/main/java/org/apache/airavata/mft/transport/s3/S3OutgoingConnector.java
@@ -2,6 +2,7 @@ package org.apache.airavata.mft.transport.s3;
import com.amazonaws.auth.AWSStaticCredentialsProvider;
import com.amazonaws.auth.BasicAWSCredentials;
+import com.amazonaws.client.builder.AwsClientBuilder;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.AmazonS3ClientBuilder;
import com.amazonaws.services.s3.model.*;
@@ -64,14 +65,17 @@ public class S3OutgoingConnector implements OutgoingChunkedConnector {
BasicAWSCredentials awsCreds = new BasicAWSCredentials(s3Secret.getAccessKey(), s3Secret.getSecretKey());
s3Client = AmazonS3ClientBuilder.standard()
+ .withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(
+ s3Storage.getEndpoint(), s3Storage.getRegion()))
.withCredentials(new AWSStaticCredentialsProvider(awsCreds))
- .withRegion(s3Storage.getRegion())
.build();
}
InitiateMultipartUploadRequest initRequest = new InitiateMultipartUploadRequest(resource.getS3Storage().getBucketName(),
resource.getFile().getResourcePath());
initResponse = s3Client.initiateMultipartUpload(initRequest);
+ logger.info("Initialized multipart upload for file {} in bucket {}",
+ resource.getFile().getResourcePath(), resource.getS3Storage().getBucketName());
}
@Override
@@ -97,5 +101,7 @@ public class S3OutgoingConnector implements OutgoingChunkedConnector {
CompleteMultipartUploadRequest compRequest = new CompleteMultipartUploadRequest(resource.getS3Storage().getBucketName(),
resource.getFile().getResourcePath(), initResponse.getUploadId(), partETags);
s3Client.completeMultipartUpload(compRequest);
+ logger.info("Completing the upload for file {} in bucket {}", resource.getFile().getResourcePath(),
+ resource.getS3Storage().getBucketName());
}
}