You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airavata.apache.org by la...@apache.org on 2014/04/24 22:51:57 UTC
[01/11] creating gfac-bes and gfac-gram out from gfac-core
Repository: airavata
Updated Branches:
refs/heads/master 053ce56cf -> 7be9daea6
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/handler/SSHInputHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/handler/SSHInputHandler.java b/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/handler/SSHInputHandler.java
index 861f732..8da8253 100644
--- a/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/handler/SSHInputHandler.java
+++ b/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/handler/SSHInputHandler.java
@@ -35,7 +35,6 @@ import org.apache.airavata.commons.gfac.type.MappingFactory;
import org.apache.airavata.gfac.GFacException;
import org.apache.airavata.gfac.context.JobExecutionContext;
import org.apache.airavata.gfac.context.MessageContext;
-import org.apache.airavata.gfac.context.security.GSISecurityContext;
import org.apache.airavata.gfac.context.security.SSHSecurityContext;
import org.apache.airavata.gfac.util.GFACSSHUtils;
import org.apache.airavata.gfac.utils.GFacUtils;
@@ -114,12 +113,7 @@ public class SSHInputHandler extends AbstractHandler {
}
private static String stageInputFiles(JobExecutionContext jobExecutionContext, String paramValue) throws IOException, GFacException {
- Cluster cluster = null;
- if (jobExecutionContext.getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT) != null) {
- cluster = ((GSISecurityContext) jobExecutionContext.getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT)).getPbsCluster();
- } else {
- cluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(SSHSecurityContext.SSH_SECURITY_CONTEXT)).getPbsCluster();
- }
+ Cluster cluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(SSHSecurityContext.SSH_SECURITY_CONTEXT)).getPbsCluster();
if (cluster == null) {
throw new GFacException("Security context is not set properly");
} else {
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/handler/SSHOutputHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/handler/SSHOutputHandler.java b/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/handler/SSHOutputHandler.java
index a95f463..42cebba 100644
--- a/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/handler/SSHOutputHandler.java
+++ b/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/handler/SSHOutputHandler.java
@@ -36,7 +36,6 @@ import org.apache.airavata.commons.gfac.type.ActualParameter;
import org.apache.airavata.commons.gfac.type.ApplicationDescription;
import org.apache.airavata.gfac.GFacException;
import org.apache.airavata.gfac.context.JobExecutionContext;
-import org.apache.airavata.gfac.context.security.GSISecurityContext;
import org.apache.airavata.gfac.context.security.SSHSecurityContext;
import org.apache.airavata.gfac.provider.GFacProviderException;
import org.apache.airavata.gfac.util.GFACSSHUtils;
@@ -105,12 +104,7 @@ public class SSHOutputHandler extends AbstractHandler{
ApplicationDeploymentDescriptionType app = jobExecutionContext.getApplicationContext()
.getApplicationDeploymentDescription().getType();
try {
- Cluster cluster = null;
- if (jobExecutionContext.getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT) != null) {
- cluster = ((GSISecurityContext) jobExecutionContext.getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT)).getPbsCluster();
- } else {
- cluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(SSHSecurityContext.SSH_SECURITY_CONTEXT)).getPbsCluster();
- }
+ Cluster cluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(SSHSecurityContext.SSH_SECURITY_CONTEXT)).getPbsCluster();
if (cluster == null) {
throw new GFacProviderException("Security context is not set properly");
} else {
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/provider/impl/SSHProvider.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/provider/impl/SSHProvider.java b/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/provider/impl/SSHProvider.java
index 6886b8e..da48ae5 100644
--- a/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/provider/impl/SSHProvider.java
+++ b/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/provider/impl/SSHProvider.java
@@ -39,7 +39,6 @@ import org.apache.airavata.gfac.Constants;
import org.apache.airavata.gfac.GFacException;
import org.apache.airavata.gfac.context.JobExecutionContext;
import org.apache.airavata.gfac.context.MessageContext;
-import org.apache.airavata.gfac.context.security.GSISecurityContext;
import org.apache.airavata.gfac.context.security.SSHSecurityContext;
import org.apache.airavata.gfac.handler.GFacHandlerException;
import org.apache.airavata.gfac.notification.events.StartExecutionEvent;
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/pom.xml
----------------------------------------------------------------------
diff --git a/modules/gfac/pom.xml b/modules/gfac/pom.xml
index 83c6132..774060c 100644
--- a/modules/gfac/pom.xml
+++ b/modules/gfac/pom.xml
@@ -33,7 +33,9 @@
<module>gfac-core</module>
<module>gfac-ec2</module>
<module>gfac-ssh</module>
+ <module>gfac-gram</module>
<module>gfac-gsissh</module>
+ <module>gfac-bes</module>
</modules>
</profile>
</profiles>
[06/11] creating gfac-bes and gfac-gram out from gfac-core
Posted by la...@apache.org.
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/impl/BESProvider.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/impl/BESProvider.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/impl/BESProvider.java
deleted file mode 100644
index 654c9ec..0000000
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/impl/BESProvider.java
+++ /dev/null
@@ -1,568 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.provider.impl;
-
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.math.BigInteger;
-import java.security.InvalidKeyException;
-import java.security.KeyPair;
-import java.security.KeyPairGenerator;
-import java.security.PrivateKey;
-import java.security.cert.X509Certificate;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Calendar;
-import java.util.Date;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-import java.util.Random;
-import java.util.Set;
-
-import javax.security.auth.x500.X500Principal;
-
-import org.apache.airavata.gfac.Constants;
-import org.apache.airavata.gfac.GFacException;
-import org.apache.airavata.gfac.context.JobExecutionContext;
-import org.apache.airavata.gfac.context.security.GSISecurityContext;
-import org.apache.airavata.gfac.notification.events.StatusChangeEvent;
-import org.apache.airavata.gfac.notification.events.UnicoreJobIDEvent;
-import org.apache.airavata.gfac.provider.GFacProvider;
-import org.apache.airavata.gfac.provider.GFacProviderException;
-import org.apache.airavata.gfac.provider.utils.DataTransferrer;
-import org.apache.airavata.gfac.provider.utils.JSDLGenerator;
-import org.apache.airavata.gfac.provider.utils.StorageCreator;
-import org.apache.airavata.gfac.utils.GFacUtils;
-import org.apache.airavata.model.workspace.experiment.JobState;
-import org.apache.airavata.registry.api.workflow.ApplicationJob;
-import org.apache.airavata.registry.api.workflow.ApplicationJob.ApplicationJobStatus;
-import org.apache.airavata.schemas.gfac.UnicoreHostType;
-import org.apache.xmlbeans.XmlCursor;
-import org.bouncycastle.asn1.ASN1InputStream;
-import org.bouncycastle.asn1.x500.X500Name;
-import org.bouncycastle.asn1.x500.style.BCStyle;
-import org.bouncycastle.asn1.x509.AlgorithmIdentifier;
-import org.bouncycastle.asn1.x509.SubjectPublicKeyInfo;
-import org.ggf.schemas.bes.x2006.x08.besFactory.ActivityStateEnumeration;
-import org.ggf.schemas.bes.x2006.x08.besFactory.ActivityStateEnumeration.Enum;
-import org.ggf.schemas.bes.x2006.x08.besFactory.ActivityStatusType;
-import org.ggf.schemas.bes.x2006.x08.besFactory.CreateActivityDocument;
-import org.ggf.schemas.bes.x2006.x08.besFactory.CreateActivityResponseDocument;
-import org.ggf.schemas.bes.x2006.x08.besFactory.GetActivityStatusesDocument;
-import org.ggf.schemas.bes.x2006.x08.besFactory.GetActivityStatusesResponseDocument;
-import org.ggf.schemas.jsdl.x2005.x11.jsdl.JobDefinitionDocument;
-import org.ggf.schemas.jsdl.x2005.x11.jsdl.JobDefinitionType;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.w3.x2005.x08.addressing.EndpointReferenceType;
-
-import de.fzj.unicore.bes.client.FactoryClient;
-import de.fzj.unicore.bes.faults.UnknownActivityIdentifierFault;
-import de.fzj.unicore.uas.client.StorageClient;
-import de.fzj.unicore.wsrflite.xmlbeans.WSUtilities;
-import eu.emi.security.authn.x509.helpers.CertificateHelpers;
-import eu.emi.security.authn.x509.helpers.proxy.X509v3CertificateBuilder;
-import eu.emi.security.authn.x509.impl.CertificateUtils;
-import eu.emi.security.authn.x509.impl.CertificateUtils.Encoding;
-import eu.emi.security.authn.x509.impl.DirectoryCertChainValidator;
-import eu.emi.security.authn.x509.impl.KeyAndCertCredential;
-import eu.emi.security.authn.x509.impl.X500NameUtils;
-import eu.unicore.util.httpclient.DefaultClientConfiguration;
-
-
-
-public class BESProvider extends AbstractProvider{
- protected final Logger log = LoggerFactory.getLogger(this.getClass());
-
- private DefaultClientConfiguration secProperties;
-
- private String jobId;
-
-
-
- public void initialize(JobExecutionContext jobExecutionContext)
- throws GFacProviderException, GFacException {
- log.info("Initializing UNICORE Provider");
- super.initialize(jobExecutionContext);
- initSecurityProperties(jobExecutionContext);
- log.debug("initialized security properties");
- }
-
-
- public void execute(JobExecutionContext jobExecutionContext)
- throws GFacProviderException {
- UnicoreHostType host = (UnicoreHostType) jobExecutionContext.getApplicationContext().getHostDescription()
- .getType();
-
- String factoryUrl = host.getUnicoreBESEndPointArray()[0];
-
- EndpointReferenceType eprt = EndpointReferenceType.Factory.newInstance();
- eprt.addNewAddress().setStringValue(factoryUrl);
-
- String userDN = getUserName(jobExecutionContext);
-
- if (userDN == null || userDN.equalsIgnoreCase("admin")) {
- userDN = "CN=zdv575, O=Ultrascan Gateway, C=DE";
- }
-
- String xlogin = getCNFromUserDN(userDN);
- // create storage
- StorageCreator storageCreator = new StorageCreator(secProperties, factoryUrl, 5, xlogin);
-
- StorageClient sc = null;
- try {
- try {
- sc = storageCreator.createStorage();
- } catch (Exception e2) {
- log.error("Cannot create storage..");
- throw new GFacProviderException("Cannot create storage..", e2);
- }
-
- CreateActivityDocument cad = CreateActivityDocument.Factory.newInstance();
- JobDefinitionDocument jobDefDoc = JobDefinitionDocument.Factory.newInstance();
-
- JobDefinitionType jobDefinition = jobDefDoc.addNewJobDefinition();
- try {
- jobDefinition = JSDLGenerator.buildJSDLInstance(jobExecutionContext, sc.getUrl()).getJobDefinition();
- cad.addNewCreateActivity().addNewActivityDocument().setJobDefinition(jobDefinition);
-
- log.info("JSDL" + jobDefDoc.toString());
- } catch (Exception e1) {
- throw new GFacProviderException("Cannot generate JSDL instance from the JobExecutionContext.", e1);
- }
-
- // upload files if any
- DataTransferrer dt = new DataTransferrer(jobExecutionContext, sc);
- dt.uploadLocalFiles();
-
- FactoryClient factory = null;
- try {
- factory = new FactoryClient(eprt, secProperties);
- } catch (Exception e) {
- throw new GFacProviderException(e.getLocalizedMessage(), e);
- }
-
- CreateActivityResponseDocument response = null;
- try {
- log.info(String.format("Activity Submitting to %s ... \n", factoryUrl));
- response = factory.createActivity(cad);
- log.info(String.format("Activity Submitted to %s \n", factoryUrl));
- } catch (Exception e) {
- throw new GFacProviderException("Cannot create activity.", e);
- }
- EndpointReferenceType activityEpr = response.getCreateActivityResponse().getActivityIdentifier();
-
- log.info("Activity : " + activityEpr.getAddress().getStringValue() + " Submitted.");
-
- // factory.waitWhileActivityIsDone(activityEpr, 1000);
- jobId = WSUtilities.extractResourceID(activityEpr);
- if (jobId == null) {
- jobId = new Long(Calendar.getInstance().getTimeInMillis()).toString();
- }
- log.info("JobID: " + jobId);
- jobExecutionContext.getNotifier().publish(new UnicoreJobIDEvent(jobId));
- saveApplicationJob(jobExecutionContext, jobDefinition, activityEpr.toString());
-
- factory.getActivityStatus(activityEpr);
- log.info(formatStatusMessage(activityEpr.getAddress().getStringValue(),
- factory.getActivityStatus(activityEpr).toString()));
-
- // TODO publish the status messages to the message bus
- while ((factory.getActivityStatus(activityEpr) != ActivityStateEnumeration.FINISHED)
- && (factory.getActivityStatus(activityEpr) != ActivityStateEnumeration.FAILED)
- && (factory.getActivityStatus(activityEpr) != ActivityStateEnumeration.CANCELLED)) {
-
- ActivityStatusType activityStatus = null;
- try {
- activityStatus = getStatus(factory, activityEpr);
- JobState jobStatus = getApplicationJobStatus(activityStatus);
- String jobStatusMessage = "Status of job " + jobId + "is " + jobStatus;
- jobExecutionContext.getNotifier().publish(new StatusChangeEvent(jobStatusMessage));
- details.setJobID(jobId);
- GFacUtils.updateJobStatus(details, jobStatus);
- } catch (UnknownActivityIdentifierFault e) {
- throw new GFacProviderException(e.getMessage(), e.getCause());
- }catch (GFacException e) {
- throw new GFacProviderException(e.getMessage(), e.getCause());
- }
-
- try {
- Thread.sleep(5000);
- } catch (InterruptedException e) {
- }
- continue;
- }
-
- ActivityStatusType activityStatus = null;
- try {
- activityStatus = getStatus(factory, activityEpr);
- } catch (UnknownActivityIdentifierFault e) {
- throw new GFacProviderException(e.getMessage(), e.getCause());
- }
-
- log.info(formatStatusMessage(activityEpr.getAddress().getStringValue(), activityStatus.getState()
- .toString()));
-
- if ((activityStatus.getState() == ActivityStateEnumeration.FAILED)) {
- String error = activityStatus.getFault().getFaultcode().getLocalPart() + "\n"
- + activityStatus.getFault().getFaultstring() + "\n EXITCODE: " + activityStatus.getExitCode();
- log.info(error);
- try {
- Thread.sleep(5000);
- } catch (InterruptedException e) {
- }
- dt.downloadStdOuts();
- } else if (activityStatus.getState() == ActivityStateEnumeration.CANCELLED) {
- String experimentID = (String) jobExecutionContext.getProperty(Constants.PROP_TOPIC);
- JobState jobStatus = JobState.CANCELED;
- String jobStatusMessage = "Status of job " + jobId + "is " + jobStatus;
- jobExecutionContext.getNotifier().publish(new StatusChangeEvent(jobStatusMessage));
- details.setJobID(jobId);
- try {
- GFacUtils.saveJobStatus(details, jobStatus, jobExecutionContext.getTaskData().getTaskID());
- } catch (GFacException e) {
- throw new GFacProviderException(e.getLocalizedMessage(),e);
- }
- throw new GFacProviderException(experimentID + "Job Canceled");
- }
-
- else if (activityStatus.getState() == ActivityStateEnumeration.FINISHED) {
- try {
- Thread.sleep(5000);
- } catch (InterruptedException e) {
- }
- if (activityStatus.getExitCode() == 0) {
- dt.downloadRemoteFiles();
- } else {
- dt.downloadStdOuts();
- }
- }
-
- } catch (UnknownActivityIdentifierFault e1) {
- throw new GFacProviderException(e1.getLocalizedMessage(), e1);
- } finally {
- // destroy sms instance
- try {
- if (sc != null) {
- sc.destroy();
- }
- } catch (Exception e) {
- log.warn("Cannot destroy temporary SMS instance:" + sc.getUrl(), e);
- }
- }
- }
-
- private JobState getApplicationJobStatus(ActivityStatusType activityStatus){
- if (activityStatus == null) {
- return JobState.UNKNOWN;
- }
- Enum state = activityStatus.getState();
- String status = null;
- XmlCursor acursor = activityStatus.newCursor();
- try {
- if (acursor.toFirstChild()) {
- if (acursor.getName().getNamespaceURI().equals("http://schemas.ogf.org/hpcp/2007/01/fs")) {
- status = acursor.getName().getLocalPart();
- }
- }
- if (status != null) {
- if (status.equalsIgnoreCase("Queued") || status.equalsIgnoreCase("Starting")
- || status.equalsIgnoreCase("Ready")) {
- return JobState.QUEUED;
- } else if (status.equalsIgnoreCase("Staging-In")) {
- return JobState.SUBMITTED;
- } else if (status.equalsIgnoreCase("Staging-Out") || status.equalsIgnoreCase("FINISHED")) {
- return JobState.COMPLETE;
- } else if (status.equalsIgnoreCase("Executing")) {
- return JobState.ACTIVE;
- } else if (status.equalsIgnoreCase("FAILED")) {
- return JobState.FAILED;
- } else if (status.equalsIgnoreCase("CANCELLED")) {
- return JobState.CANCELED;
- }
- } else {
- if (ActivityStateEnumeration.CANCELLED.equals(state)) {
- return JobState.CANCELED;
- } else if (ActivityStateEnumeration.FAILED.equals(state)) {
- return JobState.FAILED;
- } else if (ActivityStateEnumeration.FINISHED.equals(state)) {
- return JobState.COMPLETE;
- } else if (ActivityStateEnumeration.RUNNING.equals(state)) {
- return JobState.ACTIVE;
- }
- }
- } finally {
- if (acursor != null)
- acursor.dispose();
- }
- return JobState.UNKNOWN;
- }
-
- private void saveApplicationJob(JobExecutionContext jobExecutionContext, JobDefinitionType jobDefinition,
- String metadata) {
- ApplicationJob appJob = GFacUtils.createApplicationJob(jobExecutionContext);
- appJob.setJobId(jobId);
- appJob.setJobData(jobDefinition.toString());
- appJob.setSubmittedTime(Calendar.getInstance().getTime());
- appJob.setStatus(ApplicationJobStatus.SUBMITTED);
- appJob.setStatusUpdateTime(appJob.getSubmittedTime());
- appJob.setMetadata(metadata);
- GFacUtils.recordApplicationJob(jobExecutionContext, appJob);
- }
-
- public void dispose(JobExecutionContext jobExecutionContext) throws GFacProviderException {
- secProperties = null;
- }
-
- /**
- * EndpointReference need to be saved to make cancel work.
- *
- * @param activityEpr
- * @param jobExecutionContext
- * @throws GFacProviderException
- */
- public void cancelJob(String activityEpr, JobExecutionContext jobExecutionContext) throws GFacProviderException {
- try {
- initSecurityProperties(jobExecutionContext);
- EndpointReferenceType eprt = EndpointReferenceType.Factory.parse(activityEpr);
- UnicoreHostType host = (UnicoreHostType) jobExecutionContext.getApplicationContext().getHostDescription()
- .getType();
-
- String factoryUrl = host.getUnicoreBESEndPointArray()[0];
- EndpointReferenceType epr = EndpointReferenceType.Factory.newInstance();
- epr.addNewAddress().setStringValue(factoryUrl);
-
- FactoryClient factory = new FactoryClient(epr, secProperties);
- factory.terminateActivity(eprt);
- } catch (Exception e) {
- throw new GFacProviderException(e.getLocalizedMessage(),e);
- }
-
- }
-
- protected void downloadOffline(String smsEpr, JobExecutionContext jobExecutionContext) throws GFacProviderException {
- try {
- initSecurityProperties(jobExecutionContext);
- EndpointReferenceType eprt = EndpointReferenceType.Factory.parse(smsEpr);
- StorageClient sms = new StorageClient(eprt, secProperties);
- DataTransferrer dt = new DataTransferrer(jobExecutionContext, sms);
- // there must be output files there
- // this is also possible if client is re-connected, the jobs are
- // still
- // running and no output is produced
- dt.downloadRemoteFiles();
-
- // may be use the below method before downloading for checking
- // the number of entries
- // sms.listDirectory(".");
-
- } catch (Exception e) {
- throw new GFacProviderException(e.getLocalizedMessage(), e);
- }
- }
-
- protected void initSecurityProperties(JobExecutionContext jobExecutionContext) throws GFacProviderException,
- GFacException {
-
- if (secProperties != null)
- return;
-
- GSISecurityContext gssContext = (GSISecurityContext) jobExecutionContext
- .getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT);
-
- try {
- String certLocation = gssContext.getTrustedCertificatePath();
- List<String> trustedCert = new ArrayList<String>();
- trustedCert.add(certLocation + "/*.0");
- trustedCert.add(certLocation + "/*.pem");
-
- DirectoryCertChainValidator dcValidator = new DirectoryCertChainValidator(trustedCert, Encoding.PEM, -1,
- 60000, null);
-
- String userID = getUserName(jobExecutionContext);
-
- if ( userID == null || "".equals(userID) || userID.equalsIgnoreCase("admin") ) {
- userID = "CN=zdv575, O=Ultrascan Gateway, C=DE";
- }
-
- String userDN = userID.replaceAll("^\"|\"$", "");
-
- // TODO: should be changed to default airavata server locations
- KeyAndCertCredential cred = generateShortLivedCertificate(userDN, certLocation
- + "/cacert.pem", certLocation
- + "/cakey.pem", "ultrascan3");
- secProperties = new DefaultClientConfiguration(dcValidator, cred);
-
- // secProperties.doSSLAuthn();
- secProperties.getETDSettings().setExtendTrustDelegation(true);
-
- secProperties.setDoSignMessage(true);
-
- String[] outHandlers = secProperties.getOutHandlerClassNames();
-
- Set<String> outHandlerLst = null;
-
- // timeout in milliseconds
- Properties p = secProperties.getExtraSettings();
- p.setProperty("http.connection.timeout", "300000");
- p.setProperty("http.socket.timeout", "300000");
-
- if (outHandlers == null) {
- outHandlerLst = new HashSet<String>();
- } else {
- outHandlerLst = new HashSet<String>(Arrays.asList(outHandlers));
- }
-
- outHandlerLst.add("de.fzj.unicore.uas.security.ProxyCertOutHandler");
-
- secProperties.setOutHandlerClassNames(outHandlerLst.toArray(new String[outHandlerLst.size()]));
-
- } catch (Exception e) {
- throw new GFacProviderException(e.getMessage(), e);
- }
- }
-
- //FIXME: Get user details
- private String getUserName(JobExecutionContext context) {
-// if (context.getConfigurationData()!= null) {
-// return context.getConfigurationData().getBasicMetadata().getUserName();
-// } else {
- return "";
-// }
- }
-
- protected ActivityStatusType getStatus(FactoryClient fc, EndpointReferenceType activityEpr)
- throws UnknownActivityIdentifierFault {
-
- GetActivityStatusesDocument stats = GetActivityStatusesDocument.Factory.newInstance();
-
- stats.addNewGetActivityStatuses().setActivityIdentifierArray(new EndpointReferenceType[] { activityEpr });
-
- GetActivityStatusesResponseDocument resDoc = fc.getActivityStatuses(stats);
-
- ActivityStatusType activityStatus = resDoc.getGetActivityStatusesResponse().getResponseArray()[0]
- .getActivityStatus();
- return activityStatus;
- }
-
- protected String formatStatusMessage(String activityUrl, String status) {
- return String.format("Activity %s is %s.\n", activityUrl, status);
- }
-
- protected String subStatusAsString(ActivityStatusType statusType) {
-
- StringBuffer sb = new StringBuffer();
-
- sb.append(statusType.getState().toString());
-
- XmlCursor acursor = statusType.newCursor();
- if (acursor.toFirstChild()) {
- do {
- if (acursor.getName().getNamespaceURI().equals("http://schemas.ogf.org/hpcp/2007/01/fs")) {
- sb.append(":");
- sb.append(acursor.getName().getLocalPart());
- }
- } while (acursor.toNextSibling());
- acursor.dispose();
- return sb.toString();
- } else {
- acursor.dispose();
- return sb.toString();
- }
-
- }
-
- public void initProperties(Map<String, String> properties) throws GFacProviderException, GFacException {
-
- }
-
- protected KeyAndCertCredential generateShortLivedCertificate(String userDN, String caCertPath, String caKeyPath,
- String caPwd) throws Exception {
- final long CredentialGoodFromOffset = 1000L * 60L * 15L; // 15 minutes
- // ago
-
- final long startTime = System.currentTimeMillis() - CredentialGoodFromOffset;
- final long endTime = startTime + 30 * 3600 * 1000;
-
- String keyLengthProp = "1024";
- int keyLength = Integer.parseInt(keyLengthProp);
- String signatureAlgorithm = "SHA1withRSA";
-
- KeyAndCertCredential caCred = getCACredential(caCertPath, caKeyPath, caPwd);
-
- KeyPairGenerator kpg = KeyPairGenerator.getInstance(caCred.getKey().getAlgorithm());
- kpg.initialize(keyLength);
- KeyPair pair = kpg.generateKeyPair();
-
- X500Principal subjectDN = new X500Principal(userDN);
- Random rand = new Random();
-
- SubjectPublicKeyInfo publicKeyInfo;
- try {
- publicKeyInfo = SubjectPublicKeyInfo.getInstance(new ASN1InputStream(pair.getPublic().getEncoded())
- .readObject());
- } catch (IOException e) {
- throw new InvalidKeyException("Can not parse the public key"
- + "being included in the short lived certificate", e);
- }
-
- X500Name issuerX500Name = CertificateHelpers.toX500Name(caCred.getCertificate().getSubjectX500Principal());
-
- X500Name subjectX500Name = CertificateHelpers.toX500Name(subjectDN);
-
- X509v3CertificateBuilder certBuilder = new X509v3CertificateBuilder(issuerX500Name, new BigInteger(20, rand),
- new Date(startTime), new Date(endTime), subjectX500Name, publicKeyInfo);
-
- AlgorithmIdentifier sigAlgId = X509v3CertificateBuilder.extractAlgorithmId(caCred.getCertificate());
-
- X509Certificate certificate = certBuilder.build(caCred.getKey(), sigAlgId, signatureAlgorithm, null, null);
-
- certificate.checkValidity(new Date());
- certificate.verify(caCred.getCertificate().getPublicKey());
- KeyAndCertCredential result = new KeyAndCertCredential(pair.getPrivate(), new X509Certificate[] { certificate,
- caCred.getCertificate() });
-
- return result;
- }
-
- private KeyAndCertCredential getCACredential(String caCertPath, String caKeyPath, String password) throws Exception {
- InputStream isKey = new FileInputStream(caKeyPath);
- PrivateKey pk = CertificateUtils.loadPrivateKey(isKey, Encoding.PEM, password.toCharArray());
-
- InputStream isCert = new FileInputStream(caCertPath);
- X509Certificate caCert = CertificateUtils.loadCertificate(isCert, Encoding.PEM);
-
- if (isKey != null)
- isKey.close();
- if (isCert != null)
- isCert.close();
-
- return new KeyAndCertCredential(pk, new X509Certificate[] { caCert });
- }
-
- private String getCNFromUserDN(String userDN) {
- return X500NameUtils.getAttributeValues(userDN, BCStyle.CN)[0];
-
- }
-}
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/impl/GramProvider.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/impl/GramProvider.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/impl/GramProvider.java
deleted file mode 100644
index 4066c00..0000000
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/impl/GramProvider.java
+++ /dev/null
@@ -1,527 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.provider.impl;
-
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.MalformedURLException;
-import java.util.Map;
-import java.util.MissingResourceException;
-import java.util.Properties;
-import java.util.concurrent.ConcurrentHashMap;
-
-import org.apache.airavata.common.exception.ApplicationSettingsException;
-import org.apache.airavata.common.utils.ServerSettings;
-import org.apache.airavata.gfac.GFacException;
-import org.apache.airavata.gfac.JobSubmissionFault;
-import org.apache.airavata.gfac.context.JobExecutionContext;
-import org.apache.airavata.gfac.context.security.GSISecurityContext;
-import org.apache.airavata.gfac.notification.events.JobIDEvent;
-import org.apache.airavata.gfac.notification.events.StartExecutionEvent;
-import org.apache.airavata.gfac.provider.GFacProviderException;
-import org.apache.airavata.gfac.utils.GFacUtils;
-import org.apache.airavata.gfac.utils.GramJobSubmissionListener;
-import org.apache.airavata.gfac.utils.GramProviderUtils;
-import org.apache.airavata.model.workspace.experiment.CorrectiveAction;
-import org.apache.airavata.model.workspace.experiment.ErrorCategory;
-import org.apache.airavata.model.workspace.experiment.JobState;
-import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
-import org.apache.airavata.schemas.gfac.GlobusHostType;
-import org.globus.gram.GramException;
-import org.globus.gram.GramJob;
-import org.globus.gram.WaitingForCommitException;
-import org.globus.gram.internal.GRAMConstants;
-import org.globus.gram.internal.GRAMProtocolErrorConstants;
-import org.ietf.jgss.GSSCredential;
-import org.ietf.jgss.GSSException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class GramProvider extends AbstractProvider{
- private static final Logger log = LoggerFactory.getLogger(GramJobSubmissionListener.class);
-
- private GramJob job;
- private GramJobSubmissionListener listener;
- private boolean twoPhase = true;
-
- /**
- * If normal job submission fail due to an authorisation failure or script failure we
- * will re-attempt to submit the job. In-order to avoid any recursive loop during a continuous
- * failure we track whether failure paths are tried or not. Following booleans keeps track whether
- * we already tried a failure path or not.
- */
- /**
- * To track job submissions during a authorisation failure while requesting job.
- */
- private boolean renewCredentialsAttempt = false;
- /**
- * To track job submission during a script error situation.
- */
- private boolean reSubmissionInProgress = false;
- /**
- * To track authorisation failures during status monitoring.
- */
- private boolean authorisationFailedAttempt = false;
-
- private static final Map<String, GramJob> currentlyExecutingJobCache
- = new ConcurrentHashMap<String, GramJob>();
-
- private static Properties resources;
-
- static {
- try {
-
- String propFileName = "errors.properties";
- resources = new Properties();
- InputStream inputStream = GramProvider.class.getClassLoader()
- .getResourceAsStream(propFileName);
-
- if (inputStream == null) {
- throw new FileNotFoundException("property file '" + propFileName
- + "' not found in the classpath");
- }
-
- resources.load(inputStream);
-
- } catch (FileNotFoundException mre) {
- log.error("errors.properties not found", mre);
- } catch (IOException e) {
- log.error("Error reading errors.properties file", e);
- }
- }
-
-
- // This method prepare the environment before the application invocation.
- public void initialize(JobExecutionContext jobExecutionContext) throws GFacProviderException, GFacException {
-
- try {
- super.initialize(jobExecutionContext);
- String strTwoPhase = ServerSettings.getSetting("TwoPhase");
- if (strTwoPhase != null) {
- twoPhase = Boolean.parseBoolean(strTwoPhase);
- log.info("Two phase commit is set to " + twoPhase);
- }
- } catch (ApplicationSettingsException e) {
- log.warn("Error reading TwoPhase property from configurations.", e);
- }
-
- job = GramProviderUtils.setupEnvironment(jobExecutionContext, twoPhase);
- listener = new GramJobSubmissionListener(job, jobExecutionContext);
- job.addListener(listener);
- }
-
- public void execute(JobExecutionContext jobExecutionContext) throws GFacProviderException, GFacException{
- jobExecutionContext.getNotifier().publish(new StartExecutionEvent());
- GlobusHostType host = (GlobusHostType) jobExecutionContext.getApplicationContext().
- getHostDescription().getType();
- ApplicationDeploymentDescriptionType app = jobExecutionContext.getApplicationContext().
- getApplicationDeploymentDescription().getType();
-
- StringBuilder stringBuilder = new StringBuilder();
- try {
-
- GSSCredential gssCred = ((GSISecurityContext)jobExecutionContext.
- getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT)).getGssCredentials();
- job.setCredentials(gssCred);
- // We do not support multiple gatekeepers in XBaya GUI, so we simply pick the 0th element in the array
- String gateKeeper = host.getGlobusGateKeeperEndPointArray(0);
- log.info("Request to contact:" + gateKeeper);
-
- stringBuilder.append("Finished launching job, Host = ").append(host.getHostAddress()).append(" RSL = ")
- .append(job.getRSL()).append(" working directory = ").append(app.getStaticWorkingDirectory())
- .append(" temp directory = ").append(app.getScratchWorkingDirectory())
- .append(" Globus GateKeeper Endpoint = ").append(gateKeeper);
-
- log.info(stringBuilder.toString());
-
- submitJobs(gateKeeper, jobExecutionContext, host);
-
- } catch (ApplicationSettingsException e) {
- throw new GFacException(e.getMessage(), e);
- } finally {
- if (job != null) {
- try {
- /*
- * Remove listener
- */
- job.removeListener(listener);
- } catch (Exception e) {
- log.error(e.getMessage());
- }
- }
- }
- }
-
- private void submitJobs(String gateKeeper,
- JobExecutionContext jobExecutionContext,
- GlobusHostType globusHostType) throws GFacException, GFacProviderException {
- boolean applicationSaved=false;
- String taskID = jobExecutionContext.getTaskData().getTaskID();
-
- if (twoPhase) {
- try {
- /*
- * The first boolean is to force communication through SSLv3
- * The second boolean is to specify the job is a batch job - use true for interactive and false for
- * batch.
- * The third boolean is to specify to use the full proxy and not delegate a limited proxy.
- */
- job.request(true, gateKeeper, false, false);
-
- // Single boolean to track all authentication failures, therefore we need to re-initialize
- // this here
- renewCredentialsAttempt = false;
-
- } catch (WaitingForCommitException e) {
- String jobID = job.getIDAsString();
-
- details.setJobID(jobID);
- details.setJobDescription(job.getRSL());
- jobExecutionContext.setJobDetails(details);
- GFacUtils.saveJobStatus(details, JobState.UN_SUBMITTED, taskID);
-
- applicationSaved=true;
- String jobStatusMessage = "Un-submitted JobID= " + jobID;
- log.info(jobStatusMessage);
- jobExecutionContext.getNotifier().publish(new JobIDEvent(jobStatusMessage));
-
- log.info("Two phase commit: sending COMMIT_REQUEST signal; Job id - " + jobID);
-
- try {
- job.signal(GramJob.SIGNAL_COMMIT_REQUEST);
-
- } catch (GramException gramException) {
- throw new GFacException("Error while sending commit request. Job Id - "
- + job.getIDAsString(), gramException);
- } catch (GSSException gssException) {
-
- // User credentials are invalid
- log.error("Error while submitting commit request - Credentials provided are invalid. Job Id - "
- + job.getIDAsString(), e);
- log.info("Attempting to renew credentials and re-submit commit signal...");
- GFacUtils.saveErrorDetails(gssException.getLocalizedMessage(), CorrectiveAction.RETRY_SUBMISSION, ErrorCategory.AIRAVATA_INTERNAL_ERROR, taskID);
- renewCredentials(jobExecutionContext);
-
- try {
- job.signal(GramJob.SIGNAL_COMMIT_REQUEST);
- } catch (GramException e1) {
- GFacUtils.saveErrorDetails(gssException.getLocalizedMessage(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR, taskID);
- throw new GFacException("Error while sending commit request. Job Id - "
- + job.getIDAsString(), e1);
- } catch (GSSException e1) {
- GFacUtils.saveErrorDetails(gssException.getLocalizedMessage(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR, taskID);
- throw new GFacException("Error while sending commit request. Job Id - "
- + job.getIDAsString() + ". Credentials provided invalid", e1);
- }
- }
- GFacUtils.updateJobStatus(details, JobState.SUBMITTED);
- jobStatusMessage = "Submitted JobID= " + job.getIDAsString();
- log.info(jobStatusMessage);
- jobExecutionContext.getNotifier().publish(new JobIDEvent(jobStatusMessage));
-
- } catch (GSSException e) {
- // Renew credentials and re-submit
- GFacUtils.saveErrorDetails(e.getLocalizedMessage(), CorrectiveAction.RETRY_SUBMISSION, ErrorCategory.AIRAVATA_INTERNAL_ERROR, taskID);
-
- reSubmitJob(gateKeeper, jobExecutionContext, globusHostType, e);
-
- } catch (GramException e) {
- GFacUtils.saveErrorDetails(e.getLocalizedMessage(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR, taskID);
-
- throw new GFacException("An error occurred while submitting a job, job id = " + job.getIDAsString(), e);
- }
- } else {
-
- /*
- * The first boolean is to force communication through SSLv3
- * The second boolean is to specify the job is a batch job - use true for interactive and false for
- * batch.
- * The third boolean is to specify to use the full proxy and not delegate a limited proxy.
- */
- try {
-
- job.request(true, gateKeeper, false, false);
- renewCredentialsAttempt = false;
-
- } catch (GramException e) {
- GFacUtils.saveErrorDetails(e.getLocalizedMessage(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR, taskID);
- throw new GFacException("An error occurred while submitting a job, job id = " + job.getIDAsString(), e);
- } catch (GSSException e) {
- GFacUtils.saveErrorDetails(e.getLocalizedMessage(), CorrectiveAction.RETRY_SUBMISSION, ErrorCategory.AIRAVATA_INTERNAL_ERROR, taskID);
- // Renew credentials and re-submit
- reSubmitJob(gateKeeper, jobExecutionContext, globusHostType, e);
- }
-
- String jobStatusMessage = "Un-submitted JobID= " + job.getIDAsString();
- log.info(jobStatusMessage);
- jobExecutionContext.getNotifier().publish(new JobIDEvent(jobStatusMessage));
-
- }
-
- currentlyExecutingJobCache.put(job.getIDAsString(), job);
- /*
- * Wait until job is done
- */
- listener.waitFor();
-
- checkJobStatus(jobExecutionContext, globusHostType, gateKeeper);
-
- }
-
- private void renewCredentials(JobExecutionContext jobExecutionContext) throws GFacException {
-
- renewCredentials(this.job, jobExecutionContext);
- }
-
- private void renewCredentials(GramJob gramJob, JobExecutionContext jobExecutionContext) throws GFacException {
-
- try {
- GSSCredential gssCred = ((GSISecurityContext)jobExecutionContext.
- getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT)).renewCredentials();
- gramJob.renew(gssCred);
- } catch (GramException e1) {
- throw new GFacException("Unable to renew credentials. Job Id - "
- + gramJob.getIDAsString(), e1);
- } catch (GSSException e1) {
- throw new GFacException("Unable to renew credentials. Job Id - "
- + gramJob.getIDAsString(), e1);
- } catch (ApplicationSettingsException e) {
- throw new GFacException(e.getLocalizedMessage(), e);
- }
- }
-
- private void reSubmitJob(String gateKeeper,
- JobExecutionContext jobExecutionContext,
- GlobusHostType globusHostType, Exception e) throws GFacException, GFacProviderException {
-
- if (!renewCredentialsAttempt) {
-
- renewCredentialsAttempt = true;
-
- // User credentials are invalid
- log.error("Error while submitting job - Credentials provided are invalid. Job Id - "
- + job.getIDAsString(), e);
- log.info("Attempting to renew credentials and re-submit jobs...");
-
- // Remove existing listener and register a new listener
- job.removeListener(listener);
- listener = new GramJobSubmissionListener(job, jobExecutionContext);
-
- job.addListener(listener);
-
- renewCredentials(jobExecutionContext);
-
- submitJobs(gateKeeper, jobExecutionContext, globusHostType);
-
- } else {
- throw new GFacException("Error while submitting job - Credentials provided are invalid. Job Id - "
- + job.getIDAsString(), e);
- }
-
- }
-
- private void reSubmitJob(String gateKeeper,
- JobExecutionContext jobExecutionContext,
- GlobusHostType globusHostType) throws GFacException, GFacProviderException {
-
- // User credentials are invalid
- log.info("Attempting to renew credentials and re-submit jobs...");
-
- // Remove existing listener and register a new listener
- job.removeListener(listener);
- listener = new GramJobSubmissionListener(job, jobExecutionContext);
-
- job.addListener(listener);
-
- renewCredentials(jobExecutionContext);
-
- submitJobs(gateKeeper, jobExecutionContext, globusHostType);
-
- }
-
-
-
- public void dispose(JobExecutionContext jobExecutionContext) throws GFacProviderException {
- }
-
- @Override
- public void cancelJob(String jobId, JobExecutionContext jobExecutionContext) throws GFacException {
- cancelSingleJob(jobId, jobExecutionContext);
- }
-
-
- private void cancelSingleJob(String jobId, JobExecutionContext context) throws GFacException {
- // First check whether job id is in the cache
- if (currentlyExecutingJobCache.containsKey(jobId)) {
-
- synchronized (this) {
- GramJob gramJob = currentlyExecutingJobCache.get(jobId);
-
- // Even though we check using containsKey, at this point job could be null
- if (gramJob != null && (gramJob.getStatus() != GRAMConstants.STATUS_DONE ||
- gramJob.getStatus() != GRAMConstants.STATUS_FAILED)) {
- cancelJob(gramJob, context);
- }
- }
-
- } else {
-
- try {
- GSSCredential gssCred = ((GSISecurityContext)context.
- getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT)).getGssCredentials();
-
- GramJob gramJob = new GramJob(null);
- try {
- gramJob.setID(jobId);
- } catch (MalformedURLException e) {
- throw new GFacException("Invalid job id - " + jobId, e);
- }
- gramJob.setCredentials(gssCred);
-
- synchronized (this) {
- if (gramJob.getStatus() != GRAMConstants.STATUS_DONE ||
- gramJob.getStatus() != GRAMConstants.STATUS_FAILED) {
- cancelJob(gramJob, context);
- }
- }
- } catch (ApplicationSettingsException e) {
- throw new GFacException(e);
- }
- }
- }
-
- private void cancelJob(GramJob gramJob, JobExecutionContext context) throws GFacException{
-
- try {
- gramJob.cancel();
- } catch (GramException e) {
- throw new GFacException("Error cancelling job, id - " + gramJob.getIDAsString(), e);
- } catch (GSSException e) {
-
- log.warn("Credentials invalid to cancel job. Attempting to renew credentials and re-try. " +
- "Job id - " + gramJob.getIDAsString());
- renewCredentials(gramJob, context);
-
- try {
- gramJob.cancel();
- gramJob.signal(GramJob.SIGNAL_COMMIT_END);
- } catch (GramException e1) {
- throw new GFacException("Error cancelling job, id - " + gramJob.getIDAsString(), e1);
- } catch (GSSException e1) {
- throw new GFacException("Error cancelling job, invalid credentials. Job id - "
- + gramJob.getIDAsString(), e);
- }
- }
-
- }
-
- public void initProperties(Map<String, String> properties) throws GFacException {
-
- }
-
- private void checkJobStatus(JobExecutionContext jobExecutionContext, GlobusHostType host, String gateKeeper)
- throws GFacProviderException {
- int jobStatus = listener.getCurrentStatus();
-
- if (jobStatus == GramJob.STATUS_FAILED) {
-
- String errorMsg = "Job " + job.getIDAsString() + " on host " + host.getHostAddress() + " Job Exit Code = "
- + listener.getError() + " Error Description = " + getGramErrorString(listener.getError());
-
- if (listener.getError() == GRAMProtocolErrorConstants.INVALID_SCRIPT_REPLY) {
-
- // re-submitting without renewing
- // TODO verify why we re-submit jobs when we get a invalid script reply
- if (!reSubmissionInProgress) {
- reSubmissionInProgress = true;
-
- log.info("Invalid script reply received. Re-submitting job, id - " + job.getIDAsString());
- try {
- reSubmitJob(gateKeeper, jobExecutionContext, host);
- } catch (GFacException e) {
- throw new GFacProviderException
- ("Error during re-submission. Original job submission data - " + errorMsg, e);
- }
- return;
- }
-
- } else if (listener.getError() == GRAMProtocolErrorConstants.ERROR_AUTHORIZATION) {
-
- // re-submit with renewed credentials
- if (!authorisationFailedAttempt) {
- authorisationFailedAttempt = true;
- log.info("Authorisation error contacting provider. Re-submitting job with renewed credentials.");
-
- try {
- renewCredentials(jobExecutionContext);
- reSubmitJob(gateKeeper, jobExecutionContext, host);
- } catch (GFacException e) {
- throw new GFacProviderException
- ("Error during re-submission. Original job submission data - " + errorMsg, e);
- }
-
- return;
- }
-
- } else if (listener.getError() == GRAMProtocolErrorConstants.USER_CANCELLED) {
-
- log.info("User successfully cancelled job id " + job.getIDAsString());
- return;
- }
-
-
-
- log.error(errorMsg);
-
- synchronized (this) {
- currentlyExecutingJobCache.remove(job.getIDAsString());
- }
-
- throw new JobSubmissionFault(new Exception(errorMsg), host.getHostAddress(), gateKeeper,
- job.getRSL(), jobExecutionContext, getGramErrorString(listener.getError()),
- listener.getError());
-
- } else if (jobStatus == GramJob.STATUS_DONE) {
- log.info("Job " + job.getIDAsString() + " on host " + host.getHostAddress() + " is successfully executed.");
-
- synchronized (this) {
- currentlyExecutingJobCache.remove(job.getIDAsString());
- }
- }
- }
-
- public String getGramErrorString(int errorCode) {
-
- if (resources != null) {
- try {
- return resources.getProperty(String.valueOf(errorCode));
- } catch (MissingResourceException mre) {
- log.warn("Error reading globus error descriptions.", mre);
- return "Error code: " + errorCode;
- }
- } else {
- return "Error code: " + errorCode;
- }
-
- }
-
-}
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/ApplicationProcessor.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/ApplicationProcessor.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/ApplicationProcessor.java
deleted file mode 100644
index d88cddf..0000000
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/ApplicationProcessor.java
+++ /dev/null
@@ -1,252 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.provider.utils;
-
-import org.apache.airavata.gfac.context.JobExecutionContext;
-import org.apache.airavata.schemas.gfac.ExtendedKeyValueType;
-import org.apache.airavata.schemas.gfac.HpcApplicationDeploymentType;
-import org.apache.airavata.schemas.gfac.JobTypeType;
-import org.apache.airavata.schemas.gfac.NameValuePairType;
-import org.ggf.schemas.jsdl.x2005.x11.jsdl.ApplicationType;
-import org.ggf.schemas.jsdl.x2005.x11.jsdl.JobDefinitionType;
-import org.ggf.schemas.jsdl.x2005.x11.jsdlPosix.EnvironmentType;
-import org.ggf.schemas.jsdl.x2005.x11.jsdlPosix.FileNameType;
-import org.ggf.schemas.jsdl.x2005.x11.jsdlPosix.UserNameType;
-import org.ogf.schemas.jsdl.x2007.x02.jsdlSpmd.NumberOfProcessesType;
-import org.ogf.schemas.jsdl.x2007.x02.jsdlSpmd.ProcessesPerHostType;
-import org.ogf.schemas.jsdl.x2007.x02.jsdlSpmd.ThreadsPerProcessType;
-
-import java.io.File;
-
-
-public class ApplicationProcessor {
-
- public static void generateJobSpecificAppElements(JobDefinitionType value, JobExecutionContext context){
-
- String userName = getUserNameFromContext(context);
- if (userName.equalsIgnoreCase("admin")){
- userName = "CN=zdv575, O=Ultrascan Gateway, C=DE";
- }
-
- HpcApplicationDeploymentType appDepType = (HpcApplicationDeploymentType) context
- .getApplicationContext().getApplicationDeploymentDescription()
- .getType();
-
- createGenericApplication(value, appDepType);
-
- if (appDepType.getApplicationEnvironmentArray().length > 0) {
- createApplicationEnvironment(value,
- appDepType.getApplicationEnvironmentArray(), appDepType);
- }
-
-
- if (appDepType.getExecutableLocation() != null) {
- FileNameType fNameType = FileNameType.Factory.newInstance();
- fNameType.setStringValue(appDepType.getExecutableLocation());
- if(isParallelJob(appDepType)) {
- JSDLUtils.getOrCreateSPMDApplication(value).setExecutable(fNameType);
- JSDLUtils.getSPMDApplication(value).setSPMDVariation(getSPMDVariation(appDepType));
-
- if(getValueFromMap(appDepType, JSDLUtils.NUMBEROFPROCESSES)!=null){
- NumberOfProcessesType num = NumberOfProcessesType.Factory.newInstance();
- num.setStringValue(getValueFromMap(appDepType, JSDLUtils.NUMBEROFPROCESSES));
- JSDLUtils.getSPMDApplication(value).setNumberOfProcesses(num);
- }
-
- if(getValueFromMap(appDepType, JSDLUtils.PROCESSESPERHOST)!=null){
- ProcessesPerHostType pph = ProcessesPerHostType.Factory.newInstance();
- pph.setStringValue(getValueFromMap(appDepType, JSDLUtils.PROCESSESPERHOST));
- JSDLUtils.getSPMDApplication(value).setProcessesPerHost(pph);
- }
-
- if(getValueFromMap(appDepType, JSDLUtils.THREADSPERHOST)!=null){
- ThreadsPerProcessType tpp = ThreadsPerProcessType.Factory.newInstance();
- tpp.setStringValue(getValueFromMap(appDepType, JSDLUtils.THREADSPERHOST));
- JSDLUtils.getSPMDApplication(value).setThreadsPerProcess(tpp);
-
- }
-
- if(userName != null) {
- UserNameType userNameType = UserNameType.Factory.newInstance();
- userNameType.setStringValue(userName);
- JSDLUtils.getSPMDApplication(value).setUserName(userNameType);
- }
- }
- else {
- JSDLUtils.getOrCreatePOSIXApplication(value).setExecutable(fNameType);
- if(userName != null) {
- UserNameType userNameType = UserNameType.Factory.newInstance();
- userNameType.setStringValue(userName);
- JSDLUtils.getOrCreatePOSIXApplication(value).setUserName(userNameType);
- }
- }
- }
-
-
- String stdout = (appDepType.getStandardOutput() != null) ? new File(appDepType.getStandardOutput()).getName(): "stdout";
- ApplicationProcessor.setApplicationStdOut(value, appDepType, stdout);
-
-
- String stderr = (appDepType.getStandardError() != null) ? new File(appDepType.getStandardError()).getName() : "stderr";
- ApplicationProcessor.setApplicationStdErr(value, appDepType, stderr);
-
- }
-
- public static String getUserNameFromContext(JobExecutionContext jobContext) {
- if(jobContext.getTaskData() == null)
- return null;
- //FIXME: Discuss to get user and change this
- return "admin";
- }
- public static boolean isParallelJob(HpcApplicationDeploymentType appDepType) {
-
- boolean isParallel = false;
-
- if (appDepType.getJobType() != null) {
- // TODO set data output directory
- int status = appDepType.getJobType().intValue();
-
- switch (status) {
- // TODO: this check should be done outside this class
- case JobTypeType.INT_MPI:
- case JobTypeType.INT_OPEN_MP:
- isParallel = true;
- break;
-
- case JobTypeType.INT_SERIAL:
- case JobTypeType.INT_SINGLE:
- isParallel = false;
- break;
-
- default:
- isParallel = false;
- break;
- }
- }
- return isParallel;
- }
-
-
- public static void createApplicationEnvironment(JobDefinitionType value, NameValuePairType[] nameValuePairs, HpcApplicationDeploymentType appDepType) {
-
- if(isParallelJob(appDepType)) {
- for (NameValuePairType nv : nameValuePairs) {
- EnvironmentType envType = JSDLUtils.getOrCreateSPMDApplication(value).addNewEnvironment();
- envType.setName(nv.getName());
- envType.setStringValue(nv.getValue());
- }
- }
- else {
- for (NameValuePairType nv : nameValuePairs) {
- EnvironmentType envType = JSDLUtils.getOrCreatePOSIXApplication(value).addNewEnvironment();
- envType.setName(nv.getName());
- envType.setStringValue(nv.getValue());
- }
- }
-
- }
-
-
- public static String getSPMDVariation (HpcApplicationDeploymentType appDepType) {
-
- String variation = null;
-
- if (appDepType.getJobType() != null) {
- // TODO set data output directory
- int status = appDepType.getJobType().intValue();
-
- switch (status) {
- // TODO: this check should be done outside this class
- case JobTypeType.INT_MPI:
- variation = SPMDVariations.MPI.value();
- break;
-
- case JobTypeType.INT_OPEN_MP:
- variation = SPMDVariations.OpenMPI.value();
- break;
-
- }
- }
- return variation;
- }
-
-
- public static void addApplicationArgument(JobDefinitionType value, HpcApplicationDeploymentType appDepType, String stringPrm) {
- if(isParallelJob(appDepType))
- JSDLUtils.getOrCreateSPMDApplication(value)
- .addNewArgument().setStringValue(stringPrm);
- else
- JSDLUtils.getOrCreatePOSIXApplication(value)
- .addNewArgument().setStringValue(stringPrm);
-
- }
-
- public static void setApplicationStdErr(JobDefinitionType value, HpcApplicationDeploymentType appDepType, String stderr) {
- FileNameType fName = FileNameType.Factory.newInstance();
- fName.setStringValue(stderr);
- if (isParallelJob(appDepType))
- JSDLUtils.getOrCreateSPMDApplication(value).setError(fName);
- else
- JSDLUtils.getOrCreatePOSIXApplication(value).setError(fName);
- }
-
- public static void setApplicationStdOut(JobDefinitionType value, HpcApplicationDeploymentType appDepType, String stderr) {
- FileNameType fName = FileNameType.Factory.newInstance();
- fName.setStringValue(stderr);
- if (isParallelJob(appDepType))
- JSDLUtils.getOrCreateSPMDApplication(value).setOutput(fName);
- else
- JSDLUtils.getOrCreatePOSIXApplication(value).setOutput(fName);
- }
-
- public static String getApplicationStdOut(JobDefinitionType value, HpcApplicationDeploymentType appDepType) throws RuntimeException {
- if (isParallelJob(appDepType)) return JSDLUtils.getOrCreateSPMDApplication(value).getOutput().getStringValue();
- else return JSDLUtils.getOrCreatePOSIXApplication(value).getOutput().getStringValue();
- }
-
- public static String getApplicationStdErr(JobDefinitionType value, HpcApplicationDeploymentType appDepType) throws RuntimeException {
- if (isParallelJob(appDepType)) return JSDLUtils.getOrCreateSPMDApplication(value).getError().getStringValue();
- else return JSDLUtils.getOrCreatePOSIXApplication(value).getError().getStringValue();
- }
-
- public static void createGenericApplication(JobDefinitionType value, HpcApplicationDeploymentType appDepType) {
- if (appDepType.getApplicationName() != null) {
- ApplicationType appType = JSDLUtils.getOrCreateApplication(value);
- String appName = appDepType.getApplicationName()
- .getStringValue();
- appType.setApplicationName(appName);
- JSDLUtils.getOrCreateJobIdentification(value).setJobName(appName);
- }
- }
-
-
- public static String getValueFromMap(HpcApplicationDeploymentType appDepType, String name) {
- ExtendedKeyValueType[] extended = appDepType.getKeyValuePairsArray();
- for(ExtendedKeyValueType e: extended) {
- if(e.getName().equalsIgnoreCase(name)) {
- return e.getStringValue();
- }
- }
- return null;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/DataStagingProcessor.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/DataStagingProcessor.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/DataStagingProcessor.java
deleted file mode 100644
index 80112df..0000000
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/DataStagingProcessor.java
+++ /dev/null
@@ -1,236 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.provider.utils;
-
-import java.io.File;
-import java.net.URI;
-import java.util.Map;
-
-import org.apache.airavata.commons.gfac.type.ActualParameter;
-import org.apache.airavata.gfac.context.JobExecutionContext;
-import org.apache.airavata.gfac.utils.GFacUtils;
-import org.apache.airavata.schemas.gfac.HpcApplicationDeploymentType;
-import org.apache.airavata.schemas.gfac.StringArrayType;
-import org.apache.airavata.schemas.gfac.StringParameterType;
-import org.apache.airavata.schemas.gfac.URIArrayType;
-import org.apache.airavata.schemas.gfac.URIParameterType;
-import org.apache.airavata.schemas.gfac.UnicoreHostType;
-import org.ggf.schemas.jsdl.x2005.x11.jsdl.JobDefinitionType;
-
-public class DataStagingProcessor {
-
- public static void generateDataStagingElements(JobDefinitionType value, JobExecutionContext context) throws Exception{
-
- HpcApplicationDeploymentType appDepType = (HpcApplicationDeploymentType) context
- .getApplicationContext().getApplicationDeploymentDescription()
- .getType();
-
-
- String gridftpEndpoint = ((UnicoreHostType) context.getApplicationContext().getHostDescription().getType())
- .getGridFTPEndPointArray()[0];
-
-
- if (context.getInMessageContext().getParameters().size() > 0) {
- buildDataStagingFromInputContext(context, value, gridftpEndpoint, appDepType);
- }
-
- if (context.getOutMessageContext().getParameters().size() > 0) {
- buildFromOutputContext(context, value, gridftpEndpoint, appDepType);
- }
-
- createStdOutURIs(value, appDepType, gridftpEndpoint, isUnicoreEndpoint(context));
-
- }
-
- private static void createInURIElement(JobDefinitionType value,
- String endpoint, String inputDir, ActualParameter inParam)
- throws Exception {
-
- String uri = ((URIParameterType) inParam.getType()).getValue();
- String fileName = new File(uri).getName();
- if (uri.startsWith("file")) {
- URI gridFTPInputDirURI = GFacUtils.createGsiftpURI(endpoint,
- inputDir);
- String filePath = gridFTPInputDirURI.toString() + File.separator
- + fileName;
- JSDLUtils
- .addDataStagingSourceElement(value, filePath, null, fileName);
- } else if (uri.startsWith("gsiftp") || uri.startsWith("http")
- || uri.startsWith("rns")) {
- // no need to stage-in those files to the input
- // directory
- JSDLUtils.addDataStagingSourceElement(value, uri, null, fileName);
- }
-
- }
-
- private static void createStdOutURIs(JobDefinitionType value,
- HpcApplicationDeploymentType appDepType, String endpoint,
- boolean isUnicore) throws Exception {
-
- URI remoteOutputDir = GFacUtils.createGsiftpURI(endpoint,
- appDepType.getOutputDataDirectory());
-
- String stdout = ApplicationProcessor.getApplicationStdOut(value, appDepType);
-
- String stderr = ApplicationProcessor.getApplicationStdErr(value, appDepType);
-
- String stdoutFileName = (stdout == null || stdout.equals("")) ? "stdout"
- : stdout;
- String stdoutURI = GFacUtils.createGsiftpURIAsString(
- remoteOutputDir.toString(), stdoutFileName);
- JSDLUtils.addDataStagingTargetElement(value, null, stdoutFileName,
- stdoutURI);
-
- String stderrFileName = (stdout == null || stderr.equals("")) ? "stderr"
- : stderr;
- String stderrURI = GFacUtils.createGsiftpURIAsString(
- remoteOutputDir.toString(), stderrFileName);
- JSDLUtils.addDataStagingTargetElement(value, null, stderrFileName,
- stderrURI);
-
- if(isUnicore) {
- String scriptExitCodeFName = "UNICORE_SCRIPT_EXIT_CODE";
- String scriptExitCode = GFacUtils.createGsiftpURIAsString(
- remoteOutputDir.toString(), scriptExitCodeFName);
- JSDLUtils.addDataStagingTargetElement(value, null,
- scriptExitCodeFName, scriptExitCode.toString());
- }
-
- }
-
-
- private static void createOutStringElements(JobDefinitionType value,
- HpcApplicationDeploymentType appDeptype, String endpoint, String prmValue) throws Exception {
-
- if(prmValue == null || "".equals(prmValue)) return;
-
-
- String outputUri = GFacUtils.createGsiftpURIAsString(endpoint, appDeptype.getOutputDataDirectory());
-
- URI finalOutputUri = GFacUtils.createGsiftpURI(outputUri, prmValue);
- JSDLUtils.addDataStagingTargetElement(value, null, prmValue, finalOutputUri.toString());
- }
-
-
- private static void createOutURIElement(JobDefinitionType value,
- String prmValue) throws Exception {
- String fileName = new File(prmValue.toString()).getName();
- JSDLUtils.addDataStagingTargetElement(value, null, fileName, prmValue);
- }
-
-
- private static JobDefinitionType buildFromOutputContext(JobExecutionContext context,
- JobDefinitionType value, String gridftpEndpoint,
- HpcApplicationDeploymentType appDepType) throws Exception {
-
- Map<String, Object> outputParams = context.getOutMessageContext()
- .getParameters();
-
- for (String paramKey : outputParams.keySet()) {
-
- ActualParameter outParam = (ActualParameter) outputParams
- .get(paramKey);
-
- // if single urls then convert each url into jsdl source
- // elements,
- // that are formed by concat of gridftpurl+inputdir+filename
-
- String paramDataType = outParam.getType().getType().toString();
-
- if ("URI".equals(paramDataType)) {
- String uriPrm = ((URIParameterType) outParam.getType())
- .getValue();
- createOutURIElement(value, uriPrm);
- }
-
- // string params are converted into the job arguments
-
- else if (("URIArray").equals(paramDataType)) {
- String[] uriArray = ((URIArrayType) outParam.getType())
- .getValueArray();
- for (String u : uriArray) {
-
- createOutURIElement(value, u);
- }
-
- }
- else if ("String".equals(paramDataType)) {
- String stringPrm = ((StringParameterType) outParam
- .getType()).getValue();
- createOutStringElements(value, appDepType, gridftpEndpoint, stringPrm);
- }
-
- else if ("StringArray".equals(paramDataType)) {
- String[] valueArray = ((StringArrayType) outParam.getType())
- .getValueArray();
- for (String v : valueArray) {
- createOutStringElements(value, appDepType, gridftpEndpoint, v);
- }
- }
- }
-
- return value;
- }
-
-
- private static void buildDataStagingFromInputContext(JobExecutionContext context, JobDefinitionType value, String gridftpEndpoint, HpcApplicationDeploymentType appDepType)
- throws Exception {
-
- // TODO set data directory
- Map<String, Object> inputParams = context.getInMessageContext()
- .getParameters();
-
- for (String paramKey : inputParams.keySet()) {
-
- ActualParameter inParam = (ActualParameter) inputParams
- .get(paramKey);
-
- // if single urls then convert each url into jsdl source
- // elements,
- // that are formed by concat of gridftpurl+inputdir+filename
-
- String paramDataType = inParam.getType().getType().toString();
-
- if ("URI".equals(paramDataType)) {
- createInURIElement(value, gridftpEndpoint,
- appDepType.getInputDataDirectory(), inParam);
- }
-
- // string params are converted into the job arguments
-
- else if ("String".equals(paramDataType)) {
- String stringPrm = ((StringParameterType) inParam.getType())
- .getValue();
- ApplicationProcessor.addApplicationArgument(value, appDepType, stringPrm);
- }
- }
-
- }
-
-
- public static boolean isUnicoreEndpoint(JobExecutionContext context) {
- return ( (context.getApplicationContext().getHostDescription().getType() instanceof UnicoreHostType)?true:false );
- }
-
-
-}
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/DataTransferrer.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/DataTransferrer.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/DataTransferrer.java
deleted file mode 100644
index ff3f9e2..0000000
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/DataTransferrer.java
+++ /dev/null
@@ -1,241 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.provider.utils;
-
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.FileReader;
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.airavata.commons.gfac.type.ActualParameter;
-import org.apache.airavata.commons.gfac.type.ApplicationDescription;
-import org.apache.airavata.gfac.Constants;
-import org.apache.airavata.gfac.context.JobExecutionContext;
-import org.apache.airavata.gfac.provider.GFacProviderException;
-import org.apache.airavata.model.workspace.experiment.TaskDetails;
-import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
-import org.apache.airavata.schemas.gfac.HpcApplicationDeploymentType;
-import org.apache.airavata.schemas.gfac.StringArrayType;
-import org.apache.airavata.schemas.gfac.StringParameterType;
-import org.apache.airavata.schemas.gfac.URIParameterType;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import de.fzj.unicore.uas.client.StorageClient;
-
-
-public class DataTransferrer {
- protected final Logger log = LoggerFactory.getLogger(this.getClass());
-
- private JobExecutionContext jobContext;
-
- private StorageClient storageClient;
-
- public DataTransferrer(JobExecutionContext jobContext, StorageClient storageClient) {
- this.jobContext = jobContext;
- this.storageClient = storageClient;
- }
-
-
- public void uploadLocalFiles() throws GFacProviderException {
- Map<String, Object> inputParams = jobContext.getInMessageContext()
- .getParameters();
- for (String paramKey : inputParams.keySet()) {
- ActualParameter inParam = (ActualParameter) inputParams
- .get(paramKey);
- String paramDataType = inParam.getType().getType().toString();
- if("URI".equals(paramDataType)) {
- String uri = ((URIParameterType) inParam.getType()).getValue();
- String fileName = new File(uri).getName();
- if (uri.startsWith("file")) {
- try {
- String uriWithoutProtocol = uri.substring(
- uri.lastIndexOf("://") + 1, uri.length());
- FileUploader fileUploader = new FileUploader(
- uriWithoutProtocol, "input/" + fileName,
- Mode.overwrite);
- fileUploader.perform(storageClient);
- } catch (FileNotFoundException e3) {
- throw new GFacProviderException(
- "Error while staging-in, local file "+fileName+" not found", e3);
- } catch (Exception e) {
- throw new GFacProviderException("Cannot upload files", e);
-
- }
-
- }
- }
- }
-
- }
-
- /**
- * This method will download all the remote files specified according to the output
- * context of a job.
- * */
- public void downloadRemoteFiles() throws GFacProviderException {
-
- String downloadLocation = getDownloadLocation();
-
- File file = new File(downloadLocation);
- if(!file.exists()){
- file.mkdirs();
- }
-
- Map<String, ActualParameter> stringMap = new HashMap<String, ActualParameter>();
-
- Map<String, Object> outputParams = jobContext.getOutMessageContext()
- .getParameters();
-
- for (String paramKey : outputParams.keySet()) {
-
- ActualParameter outParam = (ActualParameter) outputParams
- .get(paramKey);
-
- // if single urls then convert each url into jsdl source
- // elements,
- // that are formed by concat of gridftpurl+inputdir+filename
-
- String paramDataType = outParam.getType().getType().toString();
-
- if ("String".equals(paramDataType)) {
- String stringPrm = ((StringParameterType) outParam
- .getType()).getValue();
- String localFileName = null;
- //TODO: why analysis.tar? it wont scale to gateways..
- if(stringPrm == null || stringPrm.isEmpty()){
- localFileName = "analysis-results.tar";
- }else{
- localFileName = stringPrm.substring(stringPrm.lastIndexOf("/")+1);
- }
- String outputLocation = downloadLocation+File.separator+localFileName;
- FileDownloader fileDownloader = new FileDownloader("output/"+stringPrm,outputLocation, Mode.overwrite);
- try {
- fileDownloader.perform(storageClient);
- ((StringParameterType) outParam.getType()).setValue(outputLocation);
- stringMap.put(paramKey, outParam);
- } catch (Exception e) {
- throw new GFacProviderException(e.getLocalizedMessage(),e);
- }
- }
-
- else if ("StringArray".equals(paramDataType)) {
- String[] valueArray = ((StringArrayType) outParam.getType())
- .getValueArray();
- for (String v : valueArray) {
- String localFileName = v.substring(v.lastIndexOf("/")+1);;
- String outputLocation = downloadLocation+File.separator+localFileName;
- FileDownloader fileDownloader = new FileDownloader("output/"+v,outputLocation, Mode.overwrite);
- try {
- fileDownloader.perform(storageClient);
- ((StringParameterType) outParam.getType()).setValue(outputLocation);
- stringMap.put(paramKey, outParam);
- } catch (Exception e) {
- throw new GFacProviderException(e.getLocalizedMessage(),e);
- }
- }
- }
- }
- if (stringMap == null || stringMap.isEmpty()) {
- throw new GFacProviderException("Empty Output returned from the Application, Double check the application" +
- "and ApplicationDescriptor output Parameter Names");
- }
-
- downloadStdOuts();
- }
-
-
- public void downloadStdOuts() throws GFacProviderException{
- String downloadLocation = getDownloadLocation();
- File file = new File(downloadLocation);
- if(!file.exists()){
- file.mkdirs();
- }
-
- HpcApplicationDeploymentType appDepType = (HpcApplicationDeploymentType) jobContext
- .getApplicationContext().getApplicationDeploymentDescription()
- .getType();
-
- String stdout = appDepType.getStandardOutput();
- String stderr = appDepType.getStandardError();
- if(stdout != null) {
- stdout = stdout.substring(stdout.lastIndexOf('/')+1);
- }
-
- if(stderr != null) {
- stderr = stderr.substring(stderr.lastIndexOf('/')+1);
- }
-
- String stdoutFileName = (stdout == null || stdout.equals("")) ? "stdout"
- : stdout;
- String stderrFileName = (stdout == null || stderr.equals("")) ? "stderr"
- : stderr;
-
- ApplicationDescription application = jobContext.getApplicationContext().getApplicationDeploymentDescription();
- ApplicationDeploymentDescriptionType appDesc = application.getType();
-
- String stdoutLocation = downloadLocation+File.separator+stdoutFileName;
- FileDownloader f1 = new FileDownloader("output/"+stdoutFileName,stdoutLocation, Mode.overwrite);
- try {
- f1.perform(storageClient);
- String stdoutput = readFile(stdoutLocation);
- appDesc.setStandardOutput(stdoutput);
- } catch (Exception e) {
- throw new GFacProviderException(e.getLocalizedMessage(),e);
- }
- String stderrLocation = downloadLocation+File.separator+stderrFileName;
- FileDownloader f2 = new FileDownloader("output/"+stderrFileName,stderrLocation, Mode.overwrite);
- try {
- f2.perform(storageClient);
- String stderror = readFile(stderrLocation);
- appDesc.setStandardError(stderror);
- } catch (Exception e) {
- throw new GFacProviderException(e.getLocalizedMessage(),e);
- }
- }
-
- private String readFile(String localFile) throws IOException {
- BufferedReader instream = new BufferedReader(new FileReader(localFile));
- StringBuffer buff = new StringBuffer();
- String temp = null;
- while ((temp = instream.readLine()) != null) {
- buff.append(temp);
- buff.append(Constants.NEWLINE);
- }
-
- log.info("finish read file:" + localFile);
-
- return buff.toString();
- }
-
- private String getDownloadLocation() {
- TaskDetails taskData = jobContext.getTaskData();
- if (taskData != null && taskData.getAdvancedOutputDataHandling() != null) {
- String outputDataDirectory = taskData.getAdvancedOutputDataHandling().getOutputDataDir();
- return outputDataDirectory;
- }
- return null;
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/FileDownloader.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/FileDownloader.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/FileDownloader.java
deleted file mode 100644
index 1bbf2b7..0000000
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/FileDownloader.java
+++ /dev/null
@@ -1,256 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.provider.utils;
-
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.util.Map;
-
-import org.unigrids.services.atomic.types.GridFileType;
-import org.unigrids.services.atomic.types.ProtocolType;
-
-import de.fzj.unicore.uas.client.FileTransferClient;
-import de.fzj.unicore.uas.client.StorageClient;
-import de.fzj.unicore.uas.client.UFTPConstants;
-import de.fzj.unicore.uas.client.UFTPFileTransferClient;
-import de.fzj.unicore.uas.fts.FiletransferOptions.IMonitorable;
-import de.fzj.unicore.uas.fts.FiletransferOptions.SupportsPartialRead;
-
-/**
- * helper that exports remote files from a UNICORE Storage
- * to the local client machine.<br/>
- * Simple wildcards ("*" and "?") and download of
- * directories are supported.
- *
- * TODO this should be refactored so the single-file download logic
- * is separated from the wildcard/directory/provided outputStream logic
- *
- * @author schuller
- */
-public class FileDownloader extends FileTransferBase{
-
- private boolean showProgress=true;
-
- private boolean forceFileOnly=false;
-
- private OutputStream targetStream=null;
-
- public FileDownloader(String from, String to, Mode mode){
- this(from,to,mode,true);
- }
-
- public FileDownloader(String from, String to, Mode mode, boolean failOnError){
- this.to=to;
- this.from=from;
- this.mode=mode;
- this.failOnError=failOnError;
- }
-
- public void perform(StorageClient sms)throws Exception{
- boolean isWildcard=hasWildCards(from);
- boolean isDirectory=false;
- GridFileType gridSource=null;
- if(isWildcard){
- performWildCardExport(sms);
- }
- else {
- //check if source is a directory
- gridSource=sms.listProperties(from);
- isDirectory=gridSource.getIsDirectory();
- if(isDirectory){
- if(forceFileOnly){
- throw new IOException("Source is a directory");
- }
- performDirectoryExport(gridSource, new File(to), sms);
- }
- else{
- download(gridSource,new File(to),sms);
- }
- }
- }
-
- protected void performDirectoryExport(GridFileType directory, File targetDirectory, StorageClient sms)throws Exception{
- if(!targetDirectory.exists()|| !targetDirectory.canWrite()){
- throw new IOException("Target directory <"+to+"> does not exist or is not writable!");
- }
- if(!targetDirectory.isDirectory()){
- throw new IOException("Target <"+to+"> is not a directory!");
- }
- GridFileType[]gridFiles=sms.listDirectory(directory.getPath());
- for(GridFileType file: gridFiles){
- if(file.getIsDirectory()){
- if(!recurse) {
- System.out.println("Skipping directory "+file.getPath());
- continue;
- }
- else{
- File newTargetDirectory=new File(targetDirectory,getName(file.getPath()));
- boolean success=newTargetDirectory.mkdirs();
- if(!success)throw new IOException("Can create directory: "+newTargetDirectory.getAbsolutePath());
- performDirectoryExport(file, newTargetDirectory, sms);
- continue;
- }
- }
- download(file, new File(targetDirectory,getName(file.getPath())), sms);
- }
- }
-
- protected void performWildCardExport(StorageClient sms)throws Exception{
- String dir=getDir(from);
- if(dir==null)dir="/";
- GridFileType[] files=sms.find(dir, false, from, false, null, null);
- File targetDir=targetStream==null?new File(to):null;
- if(targetStream==null){
- if(!targetDir.isDirectory())throw new IOException("Target is not a directory.");
- }
- for(GridFileType f: files){
- download(f, targetDir, sms);
- }
- }
-
- private String getDir(String path){
- return new File(path).getParent();
- }
-
- private String getName(String path){
- return new File(path).getName();
- }
-
- /**
- * download a single regular file
- *
- * @param source - grid file descriptor
- * @param localFile - local file or directory to write to
- * @param sms
- * @throws Exception
- */
- private void download(GridFileType source, File localFile, StorageClient sms)throws Exception{
- if(source==null || source.getIsDirectory()){
- throw new IllegalStateException("Source="+source);
- }
-
- OutputStream os=targetStream!=null?targetStream:null;
- FileTransferClient ftc=null;
- try{
- String path=source.getPath();
- if(targetStream==null){
- if(localFile.isDirectory()){
- localFile=new File(localFile,getName(source.getPath()));
- }
- if(mode.equals(Mode.nooverwrite) && localFile.exists()){
- System.out.println("File exists and creation mode was set to 'nooverwrite'.");
- return;
- }
- System.out.println("Downloading remote file '"+sms.getUrl()+"#/"+path+"' -> "+localFile.getAbsolutePath());
- os=new FileOutputStream(localFile.getAbsolutePath(), mode.equals(Mode.append));
- }
-
- chosenProtocol=sms.findSupportedProtocol(preferredProtocols.toArray(new ProtocolType.Enum[preferredProtocols.size()]));
- Map<String,String>extraParameters=makeExtraParameters(chosenProtocol);
- ftc=sms.getExport(path,extraParameters,chosenProtocol);
- configure(ftc, extraParameters);
- System.out.println("DEB:File transfer URL : "+ftc.getUrl());
-// ProgressBar p=null;
- if(ftc instanceof IMonitorable && showProgress){
- long size=ftc.getSourceFileSize();
- if(isRange()){
- size=getRangeSize();
- }
-// p=new ProgressBar(localFile.getName(),size,msg);
-// ((IMonitorable) ftc).setProgressListener(p);
- }
- long startTime=System.currentTimeMillis();
- if(isRange()){
- if(!(ftc instanceof SupportsPartialRead)){
- throw new Exception("Byte range is defined but protocol does not allow " +
- "partial read! Please choose a different protocol!");
- }
- System.out.println("Byte range: "+startByte+" - "+(getRangeSize()>0?endByte:""));
- SupportsPartialRead pReader=(SupportsPartialRead)ftc;
- pReader.readPartial(startByte, endByte-startByte+1, os);
- }
- else{
- ftc.readAllData(os);
- }
-// if(p!=null){
-// p.finish();
-// }
- if(timing){
- long duration=System.currentTimeMillis()-startTime;
- double rate=(double)localFile.length()/(double)duration;
- System.out.println("Rate: " +rate+ " kB/sec.");
- }
- if(targetStream==null)copyProperties(source, localFile);
- }
- finally{
- try{
- if(targetStream==null && os!=null){
- os.close();
- }
- }catch(Exception ignored){}
- if(ftc!=null){
- try{
- ftc.destroy();
- }catch(Exception e1){
-// System.out.println("Could not destroy the filetransfer client",e1);
- }
- }
- }
- }
-
- /**
- * if possible, copy the remote executable flag to the local file
- * @throws Exception
- */
- private void copyProperties(GridFileType source, File localFile)throws Exception{
- try{
- localFile.setExecutable(source.getPermissions().getExecutable());
- }
- catch(Exception ex){
- //TODO: logging
-// ("Can't set 'executable' flag for "+localFile.getName(), ex);
- }
- }
-
- private void configure(FileTransferClient ftc, Map<String,String>params){
- if(ftc instanceof UFTPFileTransferClient){
- UFTPFileTransferClient u=(UFTPFileTransferClient)ftc;
- String secret=params.get(UFTPConstants.PARAM_SECRET);
- u.setSecret(secret);
- }
- }
-
- public void setShowProgress(boolean showProgress) {
- this.showProgress = showProgress;
- }
-
- public void setForceFileOnly(boolean forceFileOnly) {
- this.forceFileOnly = forceFileOnly;
- }
-
- public void setTargetStream(OutputStream targetStream) {
- this.targetStream = targetStream;
- }
-
-}
[05/11] creating gfac-bes and gfac-gram out from gfac-core
Posted by la...@apache.org.
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/FileTransferBase.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/FileTransferBase.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/FileTransferBase.java
deleted file mode 100644
index 44988bf..0000000
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/FileTransferBase.java
+++ /dev/null
@@ -1,227 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.provider.utils;
-
-import java.io.File;
-import java.io.FilenameFilter;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-import java.util.regex.Pattern;
-
-import org.unigrids.services.atomic.types.GridFileType;
-import org.unigrids.services.atomic.types.ProtocolType;
-
-import de.fzj.unicore.uas.client.StorageClient;
-import de.fzj.unicore.uas.util.PropertyHelper;
-public class FileTransferBase {
-
- protected Properties extraParameterSource;
-
- protected boolean timing=false;
-
- protected boolean recurse=false;
-
- protected String from;
-
- protected String to;
-
- //index of first byte to download
- protected Long startByte;
-
- //index of last byte to download
- protected Long endByte;
-
- /**
- * the creation mode
- */
- protected Mode mode;
-
- /**
- * whether the job processing should fail if an error occurs
- */
- protected boolean failOnError;
-
- protected List<ProtocolType.Enum> preferredProtocols=new ArrayList<ProtocolType.Enum>();
-
- public FileTransferBase(){
- preferredProtocols.add(ProtocolType.BFT);
- }
-
- protected Map<String,String>makeExtraParameters(ProtocolType.Enum protocol){
- Map<String, String> res;
- if(extraParameterSource==null){
- res=new HashMap<String, String>();
- }
- else{
- String p=String.valueOf(protocol);
- PropertyHelper ph=new PropertyHelper(extraParameterSource, new String[]{p,p.toLowerCase()});
- res= ph.getFilteredMap();
- }
- if(res.size()>0){
- // TODO: change it to logger
- System.out.println("Have "+res.size()+" extra parameters for protocol "+protocol);
- }
- return res;
- }
-
-
- public String getTo() {
- return to;
- }
-
- public String getFrom() {
- return from;
- }
-
- public void setTo(String to) {
- this.to = to;
- }
-
- public void setFrom(String from) {
- this.from = from;
- }
-
- public Mode getMode() {
- return mode;
- }
-
- public boolean isFailOnError() {
- return failOnError;
- }
-
- public boolean isTiming() {
- return timing;
- }
-
- public void setTiming(boolean timing) {
- this.timing = timing;
- }
-
- public void setFailOnError(boolean failOnError) {
- this.failOnError = failOnError;
- }
-
- public List<ProtocolType.Enum> getPreferredProtocols() {
- return preferredProtocols;
- }
-
- public void setPreferredProtocols(List<ProtocolType.Enum> preferredProtocols) {
- this.preferredProtocols = preferredProtocols;
- }
-
- public void setExtraParameterSource(Properties properties){
- this.extraParameterSource=properties;
- }
-
- public void setRecurse(boolean recurse) {
- this.recurse = recurse;
- }
- /**
- * check if the given path denotes a valid remote directory
- * @param remotePath - the path
- * @param sms - the storage
- * @return <code>true</code> if the remote directory exists and is a directory
- */
- protected boolean isValidDirectory(String remotePath, StorageClient sms){
- boolean result=false;
- if(! ("/".equals(remotePath) || ".".equals(remotePath)) ){
- try{
- GridFileType gft=sms.listProperties(remotePath);
- result=gft.getIsDirectory();
- }catch(Exception ex){
- result=false;
- }
- }
- else result=true;
-
- return result;
- }
-
- public File[] resolveWildCards(File original){
- final String name=original.getName();
- if(!hasWildCards(original))return new File[]{original};
- File parent=original.getParentFile();
- if(parent==null)parent=new File(".");
- FilenameFilter filter=new FilenameFilter(){
- Pattern p=createPattern(name);
- public boolean accept(File file, String name){
- return p.matcher(name).matches();
- }
- };
- return parent.listFiles(filter);
- }
-
- protected boolean hasWildCards(File file){
- return hasWildCards(file.getName());
- }
-
- public boolean hasWildCards(String name){
- return name.contains("*") || name.contains("?");
- }
-
- private Pattern createPattern(String nameWithWildcards){
- String regex=nameWithWildcards.replace("?",".").replace("*", ".*");
- return Pattern.compile(regex);
- }
-
- protected ProtocolType.Enum chosenProtocol=null;
-
- public ProtocolType.Enum getChosenProtocol(){
- return chosenProtocol;
- }
-
- public Long getStartByte() {
- return startByte;
- }
-
- public void setStartByte(Long startByte) {
- this.startByte = startByte;
- }
-
- public Long getEndByte() {
- return endByte;
- }
-
- public void setEndByte(Long endByte) {
- this.endByte = endByte;
- }
-
- /**
- * checks if a byte range is defined
- * @return <code>true</code> iff both startByte and endByte are defined
- */
- protected boolean isRange(){
- return startByte!=null && endByte!=null;
- }
-
- /**
- * get the number of bytes in the byte range, or "-1" if the range is open-ended
- * @return
- */
- protected long getRangeSize(){
- if(Long.MAX_VALUE==endByte)return -1;
- return endByte-startByte;
- }
-}
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/FileUploader.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/FileUploader.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/FileUploader.java
deleted file mode 100644
index fdc3503..0000000
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/FileUploader.java
+++ /dev/null
@@ -1,245 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.provider.utils;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.util.Map;
-
-import org.unigrids.services.atomic.types.ProtocolType;
-
-import de.fzj.unicore.uas.client.FileTransferClient;
-import de.fzj.unicore.uas.client.StorageClient;
-import de.fzj.unicore.uas.client.UFTPConstants;
-import de.fzj.unicore.uas.client.UFTPFileTransferClient;
-import de.fzj.unicore.uas.fts.FiletransferOptions.IMonitorable;
-
-/**
- * upload local file(s) to a remote location
- *
- * @author schuller
- */
-public class FileUploader extends FileTransferBase{
-
- public FileUploader(String from, String to, Mode mode)throws FileNotFoundException{
- this(from,to,mode,true);
- }
-
- public FileUploader(String from, String to, Mode mode, boolean failOnError)throws FileNotFoundException{
- this.to=to;
- this.from=from;
- this.mode=mode;
- this.failOnError=failOnError;
- checkOK();
- }
-
- public String getFrom() {
- return from;
- }
-
- public String getTo() {
- return to;
- }
-
-
- public void perform(StorageClient sms)throws Exception{
- File fileSpec=new File(from);
- boolean hasWildCards=false;
- boolean isDirectory=fileSpec.isDirectory();
- File[] fileset=null;
-
- if(!isDirectory){
- hasWildCards=hasWildCards(fileSpec);
- }
-
- chosenProtocol=sms.findSupportedProtocol(preferredProtocols.toArray(new ProtocolType.Enum[preferredProtocols.size()]));
- Map<String,String>extraParameters=makeExtraParameters(chosenProtocol);
-
- if(!hasWildCards && !isDirectory){
- //single regular file
- uploadFile(fileSpec,to,sms,chosenProtocol,extraParameters);
- return;
- }
-
- //handle wildcards or directory
- if(hasWildCards){
- fileset=resolveWildCards(fileSpec);
- }
- else{
- fileset=fileSpec.listFiles();
- }
-
- if(!isValidDirectory(to, sms)){
- throw new IOException("The specified remote target '"+to+"' is not a directory");
- }
- if(to==null)to="/";
- String target=isDirectory?to+"/"+fileSpec.getName():to;
- sms.createDirectory(target);
- uploadFiles(fileset,target,sms,chosenProtocol,extraParameters);
- }
-
- /**
- * upload a set of files to a remote directory (which must exist)
- *
- * @param files
- * @param remoteDirectory
- * @param sms
- * @param protocol
- * @param extraParameters
- * @param msg
- * @throws Exception
- */
- private void uploadFiles(File[]files, String remoteDirectory, StorageClient sms, ProtocolType.Enum protocol,
- Map<String,String>extraParameters)throws Exception{
- for(File localFile: files){
- String target=remoteDirectory+"/"+localFile.getName();
- if(localFile.isDirectory()){
- if(!recurse){
- System.out.println("Skipping directory "+localFile.getAbsolutePath());
- }else{
- File[] fileset=localFile.listFiles();
- sms.createDirectory(target);
- uploadFiles(fileset,target,sms,protocol,extraParameters);
- }
- }else{
- uploadFile(localFile,target,sms,protocol,extraParameters);
- }
- }
- }
-
- /**
- * uploads a single regular file
- *
- * @param localFile
- * @param remotePath
- * @param sms
- * @param protocol
- * @param extraParameters
- * @param msg
- * @throws Exception
- */
- private void uploadFile(File localFile, String remotePath, StorageClient sms, ProtocolType.Enum protocol,
- Map<String,String>extraParameters) throws Exception{
- long startTime=System.currentTimeMillis();
- FileInputStream is=null;
- FileTransferClient ftc=null;
- try{
- if(remotePath==null){
- remotePath="/"+localFile.getName();
- }
- else if(remotePath.endsWith("/")){
- remotePath+=localFile.getName();
- }
- System.out.println("Uploading local file '"+localFile.getAbsolutePath()+"' -> '"+sms.getUrl()+"#"+remotePath+"'");
- is=new FileInputStream(localFile.getAbsolutePath());
- boolean append=Mode.append.equals(mode);
- ftc=sms.getImport(remotePath, append, extraParameters, protocol);
- configure(ftc, extraParameters);
- if(append)ftc.setAppend(true);
- String url=ftc.getUrl();
- System.out.println("File transfer URL : "+url);
-// ProgressBar p=null;
- if(ftc instanceof IMonitorable){
- long size=localFile.length();
- if(isRange()){
- size=getRangeSize();
- }
-// p=new ProgressBar(localFile.getName(),size,msg);
-// ((IMonitorable) ftc).setProgressListener(p);
- }
- if(isRange()){
- System.out.println("Byte range: "+startByte+" - "+(getRangeSize()>0?endByte:""));
- long skipped=0;
- while(skipped<startByte){
- skipped+=is.skip(startByte);
- }
- ftc.writeAllData(is, endByte-startByte+1);
-
- }else{
- ftc.writeAllData(is);
- }
- copyProperties(localFile, sms, remotePath);
-
-// if(ftc instanceof IMonitorable){
-// p.finish();
-// }
-
- }finally{
- if(ftc!=null){
- try{
- ftc.destroy();
- }catch(Exception e1){
-// msg.error("Could not clean-up the filetransfer at <"+ftc.getUrl()+">",e1);
- }
- }
- try{ if(is!=null)is.close(); }catch(Exception ignored){}
- }
- if(timing){
- long duration=System.currentTimeMillis()-startTime;
- double rate=(double)localFile.length()/(double)duration;
- System.out.println("Rate: "+rate+ " kB/sec.");
- }
- }
-
- /**
- * if possible, copy the local executable flag to the remote file
- * @param sourceFile - local file
- * @throws Exception
- */
- private void copyProperties(File sourceFile, StorageClient sms, String target)throws Exception{
- boolean x=sourceFile.canExecute();
- try{
- if(x){
- sms.changePermissions(target, true, true, x);
- }
- }catch(Exception ex){
-// System.out.println("Can't set exectuable flag on remote file.",ex);
- }
- }
-
- private void checkOK()throws FileNotFoundException{
- if(!failOnError){
- return;
- }
- File orig=new File(from);
- if(!orig.isAbsolute()){
- orig=new File(System.getProperty("user.dir"),from);
- }
- File[] files=resolveWildCards(orig);
- if(files==null){
- throw new FileNotFoundException("Local import '"+from+"' does not exist.");
- }
- for(File f: files){
- if(!f.exists())throw new FileNotFoundException("Local import '"+from+"' does not exist.");
- }
- }
-
- private void configure(FileTransferClient ftc, Map<String,String>params){
- if(ftc instanceof UFTPFileTransferClient){
- UFTPFileTransferClient u=(UFTPFileTransferClient)ftc;
- String secret=params.get(UFTPConstants.PARAM_SECRET);
- u.setSecret(secret);
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/JSDLGenerator.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/JSDLGenerator.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/JSDLGenerator.java
deleted file mode 100644
index 058f2c4..0000000
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/JSDLGenerator.java
+++ /dev/null
@@ -1,103 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-package org.apache.airavata.gfac.provider.utils;
-
-
-import org.apache.airavata.gfac.context.JobExecutionContext;
-import org.apache.airavata.schemas.gfac.HpcApplicationDeploymentType;
-import org.ggf.schemas.jsdl.x2005.x11.jsdl.JobDefinitionDocument;
-import org.ggf.schemas.jsdl.x2005.x11.jsdl.JobDefinitionType;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- *
- * Utility class generates a JSDL instance from JobExecutionContext instance
- * @author shahbaz memon
- *
- * */
-
-public class JSDLGenerator {
-
- protected final Logger log = LoggerFactory.getLogger(this.getClass());
-
-
- public synchronized static JobDefinitionDocument buildJSDLInstance(JobExecutionContext context) throws Exception {
-
- JobDefinitionDocument jobDefDoc = JobDefinitionDocument.Factory
- .newInstance();
- JobDefinitionType value = jobDefDoc.addNewJobDefinition();
-
- HpcApplicationDeploymentType appDepType = (HpcApplicationDeploymentType) context
- .getApplicationContext().getApplicationDeploymentDescription()
- .getType();
-
- // build Identification
- createJobIdentification(value, appDepType);
-
- ResourceProcessor.generateResourceElements(value, context);
-
- ApplicationProcessor.generateJobSpecificAppElements(value, context);
-
- DataStagingProcessor.generateDataStagingElements(value, context);
-
-
- return jobDefDoc;
- }
-
-
- public synchronized static JobDefinitionDocument buildJSDLInstance(JobExecutionContext context, String smsUrl) throws Exception {
-
- JobDefinitionDocument jobDefDoc = JobDefinitionDocument.Factory
- .newInstance();
- JobDefinitionType value = jobDefDoc.addNewJobDefinition();
-
- HpcApplicationDeploymentType appDepType = (HpcApplicationDeploymentType) context
- .getApplicationContext().getApplicationDeploymentDescription()
- .getType();
-
- // build Identification
- createJobIdentification(value, appDepType);
-
- ResourceProcessor.generateResourceElements(value, context);
-
- ApplicationProcessor.generateJobSpecificAppElements(value, context);
-
- UASDataStagingProcessor.generateDataStagingElements(value, context, smsUrl);
-
- return jobDefDoc;
- }
-
- private static void createJobIdentification(JobDefinitionType value, HpcApplicationDeploymentType appDepType){
- if( appDepType.getProjectAccount() != null ){
-
- if (appDepType.getProjectAccount().getProjectAccountNumber() != null)
- JSDLUtils.addProjectName(value, appDepType.getProjectAccount()
- .getProjectAccountNumber());
-
- if (appDepType.getProjectAccount().getProjectAccountDescription() != null)
- JSDLUtils.getOrCreateJobIdentification(value).setDescription(
- appDepType.getProjectAccount()
- .getProjectAccountDescription());
- }
- }
-
-}
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/JSDLUtils.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/JSDLUtils.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/JSDLUtils.java
deleted file mode 100644
index 46203cf..0000000
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/JSDLUtils.java
+++ /dev/null
@@ -1,540 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.provider.utils;
-
-
-import javax.xml.namespace.QName;
-
-import org.apache.commons.httpclient.URIException;
-import org.apache.xmlbeans.XmlCursor;
-import org.apache.xmlbeans.XmlObject;
-import org.ggf.schemas.jsdl.x2005.x11.jsdl.ApplicationType;
-import org.ggf.schemas.jsdl.x2005.x11.jsdl.BoundaryType;
-import org.ggf.schemas.jsdl.x2005.x11.jsdl.CPUArchitectureType;
-import org.ggf.schemas.jsdl.x2005.x11.jsdl.CandidateHostsType;
-import org.ggf.schemas.jsdl.x2005.x11.jsdl.CreationFlagEnumeration;
-import org.ggf.schemas.jsdl.x2005.x11.jsdl.DataStagingType;
-import org.ggf.schemas.jsdl.x2005.x11.jsdl.ExactType;
-import org.ggf.schemas.jsdl.x2005.x11.jsdl.JobDefinitionType;
-import org.ggf.schemas.jsdl.x2005.x11.jsdl.JobDescriptionType;
-import org.ggf.schemas.jsdl.x2005.x11.jsdl.JobIdentificationType;
-import org.ggf.schemas.jsdl.x2005.x11.jsdl.OperatingSystemType;
-import org.ggf.schemas.jsdl.x2005.x11.jsdl.OperatingSystemTypeEnumeration;
-import org.ggf.schemas.jsdl.x2005.x11.jsdl.OperatingSystemTypeType;
-import org.ggf.schemas.jsdl.x2005.x11.jsdl.ProcessorArchitectureEnumeration;
-import org.ggf.schemas.jsdl.x2005.x11.jsdl.ResourcesType;
-import org.ggf.schemas.jsdl.x2005.x11.jsdl.SourceTargetType;
-import org.ggf.schemas.jsdl.x2005.x11.jsdlPosix.EnvironmentType;
-import org.ggf.schemas.jsdl.x2005.x11.jsdlPosix.POSIXApplicationDocument;
-import org.ggf.schemas.jsdl.x2005.x11.jsdlPosix.POSIXApplicationType;
-import org.ggf.schemas.jsdl.x2006.x07.jsdlHpcpa.HPCProfileApplicationDocument;
-import org.ggf.schemas.jsdl.x2006.x07.jsdlHpcpa.HPCProfileApplicationType;
-import org.ogf.schemas.jsdl.x2007.x02.jsdlSpmd.SPMDApplicationDocument;
-import org.ogf.schemas.jsdl.x2007.x02.jsdlSpmd.SPMDApplicationType;
-
-
-/**
- *
- * @author shahbaz memon, bastian demuth
- *
- */
-public class JSDLUtils
-{
-
- public static final int FLAG_OVERWRITE = 1;
- public static final int FLAG_APPEND = 2;
- public static final int FLAG_DELETE_ON_TERMINATE = 32;
-
- public static final QName POSIX_APPLICATION=POSIXApplicationDocument.type.getDocumentElementName();
-
- public static final QName HPC_PROFILE_APPLICATION=HPCProfileApplicationDocument.type.getDocumentElementName();
-
- public static final QName SPMD_APPLICATION=SPMDApplicationDocument.type.getDocumentElementName();
-
- public static final String PROCESSESPERHOST = "ProcessesPerHost";
- public static final String NUMBEROFPROCESSES = "NumberOfProcesses";
- public static final String THREADSPERHOST = "ThreadsPerHost";
-
-
-
- public static EnvironmentType addEnvVariable(JobDefinitionType def,String name, String value) {
- POSIXApplicationType posixApp = getOrCreatePOSIXApplication(def);
- EnvironmentType newEnv = posixApp.addNewEnvironment();
- newEnv.setName(name);
- newEnv.setStringValue(value);
- return newEnv;
- }
-
- public static void setApplicationName(JobDefinitionType value, String applicationName) {
- getOrCreateApplication(value).setApplicationName(applicationName);
- }
-
- public static void setApplicationVersion(JobDefinitionType value, String applicationVersion) {
- getOrCreateApplication(value).setApplicationVersion(applicationVersion);
- }
-
- public static void addProjectName(JobDefinitionType value, String projectName) {
- getOrCreateJobIdentification(value).addNewJobProject().setStringValue(projectName);
- }
-
- public static void addMultipleProjectNames(JobDefinitionType value, String[] projectNames) {
- for (String name : projectNames) {
- getOrCreateJobIdentification(value).addNewJobProject().setStringValue(name);
- }
- }
-
- public static void addCandidateHost(JobDefinitionType value, String host) {
- getOrCreateCandidateHosts(value).addHostName(host);
-
- }
- public static void addDataStagingTargetElement(JobDefinitionType value, String fileSystem, String file, String uri) {
- addDataStagingTargetElement(value,fileSystem, file, uri, 0);
- }
-
- public static void addDataStagingTargetElement(JobDefinitionType value, String fileSystem, String file, String uri, int flags) {
- JobDescriptionType jobDescr = getOrCreateJobDescription(value);
- DataStagingType newDS = jobDescr.addNewDataStaging();
- CreationFlagEnumeration.Enum creationFlag = CreationFlagEnumeration.DONT_OVERWRITE;
- if((flags & FLAG_OVERWRITE) != 0) creationFlag = CreationFlagEnumeration.OVERWRITE;
- if((flags & FLAG_APPEND) != 0) creationFlag = CreationFlagEnumeration.APPEND;
- boolean deleteOnTerminate = (flags & FLAG_DELETE_ON_TERMINATE) != 0;
- newDS.setCreationFlag(creationFlag);
- newDS.setDeleteOnTermination(deleteOnTerminate);
- SourceTargetType target = newDS.addNewTarget();
-
- try {
- uri = (uri == null) ? null : URIUtils.encodeAll(uri);
- } catch (URIException e) {
- }
- target.setURI(uri);
- newDS.setFileName(file);
- if (fileSystem != null && !fileSystem.equals("Work")) { //$NON-NLS-1$
- newDS.setFilesystemName(fileSystem);
- }
- }
-
- public static void addDataStagingSourceElement(JobDefinitionType value, String uri, String fileSystem, String file) {
- addDataStagingSourceElement(value, uri, fileSystem, file, 0);
- }
-
- public static void addDataStagingSourceElement(JobDefinitionType value, String uri, String fileSystem, String file, int flags) {
- JobDescriptionType jobDescr = getOrCreateJobDescription(value);
-
- try {
- uri = (uri == null) ? null : URIUtils.encodeAll(uri);
- } catch (URIException e) {
- }
- DataStagingType newDS = jobDescr.addNewDataStaging();
- CreationFlagEnumeration.Enum creationFlag = CreationFlagEnumeration.DONT_OVERWRITE;
- if((flags & FLAG_OVERWRITE) != 0) creationFlag = CreationFlagEnumeration.OVERWRITE;
- if((flags & FLAG_APPEND) != 0) creationFlag = CreationFlagEnumeration.APPEND;
- boolean deleteOnTerminate = (flags & FLAG_DELETE_ON_TERMINATE) != 0;
- newDS.setCreationFlag(creationFlag);
- newDS.setDeleteOnTermination(deleteOnTerminate);
- SourceTargetType source = newDS.addNewSource();
- source.setURI(uri);
- newDS.setFileName(file);
- if (fileSystem != null && !fileSystem.equals("Work")) { //$NON-NLS-1$
- newDS.setFilesystemName(fileSystem);
- }
- }
-
-
- public static ApplicationType getOrCreateApplication(JobDefinitionType value) {
- JobDescriptionType jobDescr = getOrCreateJobDescription(value);
- if (!jobDescr.isSetApplication()) {
- jobDescr.addNewApplication();
- }
- return jobDescr.getApplication();
- }
-
- public static CandidateHostsType getOrCreateCandidateHosts(JobDefinitionType value) {
- ResourcesType resources = getOrCreateResources(value);
- if (!resources.isSetCandidateHosts()) {
- resources.addNewCandidateHosts();
- }
- return resources.getCandidateHosts();
- }
-
- public static CPUArchitectureType getOrCreateCPUArchitecture(JobDefinitionType value) {
-
- ResourcesType jobResources = getOrCreateResources(value);
- if (!jobResources.isSetCPUArchitecture()) {
- jobResources.addNewCPUArchitecture();
- }
- return jobResources.getCPUArchitecture();
- }
-
- public static org.ggf.schemas.jsdl.x2005.x11.jsdl.RangeValueType getOrCreateIndividualCPUCount(JobDefinitionType value) {
- ResourcesType jobResources = getOrCreateResources(value);
- if (!jobResources.isSetIndividualCPUCount()) {
- jobResources.addNewIndividualCPUCount();
- }
- return jobResources.getIndividualCPUCount();
- }
-
-
- public static org.ggf.schemas.jsdl.x2005.x11.jsdl.RangeValueType getOrCreateIndividualCPUSpeed(JobDefinitionType value) {
-
- ResourcesType jobResources = getOrCreateResources(value);
- if (!jobResources.isSetIndividualCPUSpeed()) {
- jobResources.addNewIndividualCPUSpeed();
- }
- return jobResources.getIndividualCPUSpeed();
- }
-
- public static org.ggf.schemas.jsdl.x2005.x11.jsdl.RangeValueType getOrCreateIndividualCPUTime(JobDefinitionType value) {
-
- ResourcesType jobResources = getOrCreateResources(value);
- if ( !jobResources.isSetIndividualCPUTime() ) {
- jobResources.addNewIndividualCPUTime();
- }
- return jobResources.getIndividualCPUTime();
- }
-
- public static org.ggf.schemas.jsdl.x2005.x11.jsdl.RangeValueType getOrCreateIndividualDiskSpace(JobDefinitionType value) {
-
- ResourcesType jobResources = getOrCreateResources(value);
- if (!jobResources.isSetIndividualDiskSpace()) {
- jobResources.addNewIndividualDiskSpace();
- }
- return jobResources.getIndividualDiskSpace();
- }
-
- public static org.ggf.schemas.jsdl.x2005.x11.jsdl.RangeValueType getOrCreateIndividualPhysicalMemory(JobDefinitionType value) {
-
- ResourcesType jobResources = getOrCreateResources(value);
- if (!jobResources.isSetIndividualPhysicalMemory()) {
- jobResources.addNewIndividualPhysicalMemory();
- }
- return jobResources.getIndividualPhysicalMemory();
- }
-
- public static JobDescriptionType getOrCreateJobDescription(JobDefinitionType value) {
- if (value.getJobDescription() == null) {
- return value.addNewJobDescription();
- }
- return value.getJobDescription();
- }
-
- public static JobIdentificationType getOrCreateJobIdentification(JobDefinitionType value) {
- JobDescriptionType descr = getOrCreateJobDescription(value);
- if (descr.getJobIdentification() == null) {
- return descr.addNewJobIdentification();
- }
- return descr.getJobIdentification();
- }
-
- public static OperatingSystemType getOrCreateOperatingSystem(JobDefinitionType value)
- {
- ResourcesType jobResources = getOrCreateResources(value);
- if(!jobResources.isSetOperatingSystem()) {
- jobResources.addNewOperatingSystem();
- }
- return jobResources.getOperatingSystem();
- }
-
- public static ResourcesType getOrCreateResources(JobDefinitionType value) {
- JobDescriptionType jobDescr = getOrCreateJobDescription(value);
- if (!jobDescr.isSetResources()) {
- jobDescr.addNewResources();
- }
- return jobDescr.getResources();
- }
-
-
- public static org.ggf.schemas.jsdl.x2005.x11.jsdl.RangeValueType getOrCreateTotalCPUCount(JobDefinitionType value) {
-
- ResourcesType jobResources = getOrCreateResources(value);
- if ( !jobResources.isSetTotalCPUCount() ) {
- jobResources.addNewTotalCPUCount();
- }
- return jobResources.getTotalCPUCount();
- }
-
-
- public static org.ggf.schemas.jsdl.x2005.x11.jsdl.RangeValueType getOrCreateTotalResourceCount(JobDefinitionType value) {
-
- ResourcesType jobResources = getOrCreateResources(value);
- if ( !jobResources.isSetTotalResourceCount())
- {
- jobResources.addNewTotalResourceCount();
- }
- return jobResources.getTotalResourceCount();
- }
-
- public static POSIXApplicationType getOrCreatePOSIXApplication(JobDefinitionType value) {
-
- ApplicationType application = getOrCreateApplication(value);
-
- if(getHPCProfileApplication(value) != null){
- //TODO handle: not creating POSIX element if HPCProfile already exists
- return getPOSIXApplication(value);
- }
-
- if (getPOSIXApplication(value) == null) {
- XmlCursor acursor = application.newCursor();
- acursor.toEndToken();
- acursor.insertElement(POSIX_APPLICATION);
- acursor.dispose();
- }
- return getPOSIXApplication(value);
- }
-
-
- public static SPMDApplicationType getOrCreateSPMDApplication(JobDefinitionType value) {
-
- ApplicationType application = getOrCreateApplication(value);
-
- if (getSPMDApplication(value) == null) {
- XmlCursor acursor = application.newCursor();
- acursor.toEndToken();
- acursor.insertElement(SPMD_APPLICATION);
- acursor.dispose();
- }
- return getSPMDApplication(value);
- }
-
- public static SPMDApplicationType getSPMDApplication(JobDefinitionType value) {
- if (value != null &&
- value.getJobDescription() != null &&
- value.getJobDescription().isSetApplication() ) {
- XmlCursor acursor = value.getJobDescription().getApplication().newCursor();
- if (acursor.toFirstChild()) {
- do {
- if(acursor.getName().equals(SPMD_APPLICATION)) {
- XmlObject result = acursor.getObject();
- acursor.dispose();
- return (SPMDApplicationType) result;
- }
- } while (acursor.toNextSibling());
- acursor.dispose();
- return null;
- } else {
- acursor.dispose();
- return null;
- }
- } else {
- return null;
- }
- }
-
-
-
- public static POSIXApplicationType getPOSIXApplication(JobDefinitionType value) {
- if (value != null &&
- value.getJobDescription() != null &&
- value.getJobDescription().isSetApplication() ) {
- XmlCursor acursor = value.getJobDescription().getApplication().newCursor();
- if (acursor.toFirstChild()) {
- do {
- if(acursor.getName().equals(POSIX_APPLICATION)) {
- XmlObject result = acursor.getObject();
- acursor.dispose();
- return (POSIXApplicationType) result;
- }
- } while (acursor.toNextSibling());
- acursor.dispose();
- return null;
- } else {
- acursor.dispose();
- return null;
- }
- } else {
- return null;
- }
- }
-
-
-
- public static HPCProfileApplicationType getOrCreateHPCProfileApplication(JobDefinitionType value) {
-
- ApplicationType application = getOrCreateApplication(value);
-
- if(getPOSIXApplication(value) != null){
- //TODO handle: creating HPC element if POSIX already exists
- return getHPCProfileApplication(value);
- }
-
- if (getHPCProfileApplication(value) == null) {
- XmlCursor acursor = application.newCursor();
- acursor.toEndToken();
- acursor.insertElement(HPC_PROFILE_APPLICATION);
- acursor.dispose();
- }
- return getHPCProfileApplication(value);
- }
-
-
- public static HPCProfileApplicationType getHPCProfileApplication(JobDefinitionType value) {
- if (value != null &&
- value.getJobDescription() != null &&
- value.getJobDescription().isSetApplication() ) {
- XmlCursor acursor = value.getJobDescription().getApplication().newCursor();
- if (acursor.toFirstChild()) {
- do {
- if(acursor.getName().equals(HPC_PROFILE_APPLICATION)) {
- XmlObject result = acursor.getObject();
- acursor.dispose();
- return (HPCProfileApplicationType) result;
- }
- } while (acursor.toNextSibling());
- acursor.dispose();
- return null;
- } else {
- acursor.dispose();
- return null;
- }
- } else {
- return null;
- }
- }
-
-
-
-
- public static RangeValueType getTotalCPUCountRequirements(JobDefinitionType value) {
- if(value != null && value.getJobDescription() != null && value.getJobDescription().isSetResources() &&
- value.getJobDescription().getResources().isSetTotalCPUCount()){
- return toU6RangeValue(value.getJobDescription().getResources().getTotalCPUCount());
- }
- else
- return null;
- }
-
- public static RangeValueType getTotalResourceCountRequirements(JobDefinitionType value) {
- if(value != null && value.getJobDescription() != null && value.getJobDescription().isSetResources() &&
- value.getJobDescription().getResources().isSetTotalResourceCount()){
- return toU6RangeValue(value.getJobDescription().getResources().getTotalResourceCount());
- }
- else
- return null;
- }
-
-
- public static RangeValueType toU6RangeValue(org.ggf.schemas.jsdl.x2005.x11.jsdl.RangeValueType jsdlType) {
- RangeValueType result = new RangeValueType();
- if(jsdlType.getExactArray().length > 0){
- result.setExact(jsdlType.getExactArray(0).getDoubleValue());
- }
- if(jsdlType.isSetLowerBoundedRange()){
- result.setLowerBound(jsdlType.getLowerBoundedRange().getDoubleValue());
- }
- if(jsdlType.isSetUpperBoundedRange()){
- result.setUpperBound(jsdlType.getUpperBoundedRange().getDoubleValue());
- }
- return result;
- }
-
-
-
- public static void setCPUArchitectureRequirements(JobDefinitionType value, ProcessorRequirement cpuArchitecture) {
- if(cpuArchitecture == null || cpuArchitecture.getValue() == null) return;
- CPUArchitectureType cpuArch = getOrCreateCPUArchitecture(value);
- cpuArch.setCPUArchitectureName(ProcessorArchitectureEnumeration.Enum.forString(cpuArchitecture.getValue()));
- }
-
- public static void setIndividualCPUCountRequirements(JobDefinitionType value, RangeValueType cpuCount) {
- org.ggf.schemas.jsdl.x2005.x11.jsdl.RangeValueType individualCPUCount = getOrCreateIndividualCPUCount(value);
- setRangeValue(cpuCount, individualCPUCount);
- }
-
- public static void setIndividualCPUSpeedRequirements(JobDefinitionType value, RangeValueType cpuSpeed) {
- org.ggf.schemas.jsdl.x2005.x11.jsdl.RangeValueType individualCPUSpeed = getOrCreateIndividualCPUSpeed(value);
- setRangeValue(cpuSpeed, individualCPUSpeed);
- }
-
- public static void setIndividualCPUTimeRequirements(JobDefinitionType value, RangeValueType cpuTime) {
- org.ggf.schemas.jsdl.x2005.x11.jsdl.RangeValueType cpuIndividualTime = getOrCreateIndividualCPUTime(value);
- setRangeValue(cpuTime, cpuIndividualTime);
- }
-
- public static void setIndividualDiskSpaceRequirements(JobDefinitionType value, RangeValueType diskSpace) {
- org.ggf.schemas.jsdl.x2005.x11.jsdl.RangeValueType individualDiskSpace = getOrCreateIndividualDiskSpace(value);
- setRangeValue(diskSpace, individualDiskSpace);
- }
-
- public static void setIndividualPhysicalMemoryRequirements(JobDefinitionType value, RangeValueType physicalMemory) {
- org.ggf.schemas.jsdl.x2005.x11.jsdl.RangeValueType individualPhysicalMemory = getOrCreateIndividualPhysicalMemory(value);
- setRangeValue(physicalMemory, individualPhysicalMemory);
- }
-
-
- public static void setName(JobDefinitionType value, String name) {
- getOrCreateJobIdentification(value).setJobName(name);
- }
-
- public static void setOperatingSystemRequirements(JobDefinitionType value, OSRequirement osType) {
- if(osType == null || osType.getOSType() == null) return;
- OperatingSystemType os_Type = getOrCreateOperatingSystem(value);
- OperatingSystemTypeType ostt = os_Type.addNewOperatingSystemType();
- ostt.setOperatingSystemName(OperatingSystemTypeEnumeration.Enum.forString(osType.getOSType().getValue()));
- if(osType.getOSVersion() != null)
- {
- os_Type.setOperatingSystemVersion(osType.getOSVersion());
- }
- }
-
- public static void setRangeValue(RangeValueType u6Type, org.ggf.schemas.jsdl.x2005.x11.jsdl.RangeValueType jsdlType) {
- Double exact = u6Type.getExact();
- Double epsilon = u6Type.getEpsilon();
- Double lower = u6Type.getLowerBound();
- Double upper = u6Type.getUpperBound();
-
-
- if(lower.isNaN() && upper.isNaN())
- {
- ExactType exactType = jsdlType.getExactArray().length > 0 ? jsdlType.getExactArray(0) : jsdlType.addNewExact();
- exactType.setDoubleValue(exact);
- if(!epsilon.isNaN() && epsilon != 0)
- {
- exactType.setEpsilon(epsilon);
- }
- }
- else
- {
- if(!lower.isNaN())
- {
- BoundaryType lowerBound = jsdlType.isSetLowerBoundedRange() ? jsdlType.getLowerBoundedRange() : jsdlType.addNewLowerBoundedRange();
- lowerBound.setDoubleValue(lower);
- lowerBound.setExclusiveBound(!u6Type.isIncludeLowerBound());
- }
-
- if(!upper.isNaN())
- {
- BoundaryType upperBound = jsdlType.isSetUpperBoundedRange() ? jsdlType.getUpperBoundedRange() : jsdlType.addNewUpperBoundedRange();
- upperBound.setDoubleValue(upper);
- upperBound.setExclusiveBound(!u6Type.isIncludeUpperBound());
- }
- }
- }
-
- public static void setTotalCPUCountRequirements(JobDefinitionType value, RangeValueType cpuCount) {
- org.ggf.schemas.jsdl.x2005.x11.jsdl.RangeValueType cpuTotalCount = getOrCreateTotalCPUCount(value);
- setRangeValue(cpuCount, cpuTotalCount);
- }
-
- public static void setTotalResourceCountRequirements(JobDefinitionType value, RangeValueType resourceCount) {
- org.ggf.schemas.jsdl.x2005.x11.jsdl.RangeValueType totalCount = getOrCreateTotalResourceCount(value);
- setRangeValue(resourceCount, totalCount);
- }
-}
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/Mode.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/Mode.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/Mode.java
deleted file mode 100644
index 80cd766..0000000
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/Mode.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.provider.utils;
-
-/**
- * file creation modes
- */
-public enum Mode {
-
- /**
- * overwrite any existing file
- */
- overwrite,
-
- /**
- * append to an existing file
- */
- append,
-
- /**
- * do NOT overwrite and fail if the file exists
- */
- nooverwrite
-
-
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/OSRequirement.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/OSRequirement.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/OSRequirement.java
deleted file mode 100644
index 9f4cffd..0000000
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/OSRequirement.java
+++ /dev/null
@@ -1,106 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.provider.utils;
-
-public class OSRequirement implements ResourceRequirement{
- private OSType osType;
- private String version;
- protected boolean enabled;
-
-
- public OSRequirement() {
- }
-
- /**
- *
- * @param type -
- * the type of the O/S
- * @param version -
- * the version of the O/S
- */
- public OSRequirement(OSType osType, String osVersion) {
- setOSType(osType);
- setOSVersion(osVersion);
- }
-
- /**
- * Set the type of the O/S
- *
- * @param type -
- * the type of the O/S
- */
- public void setOSType(OSType osType) {
- this.osType = osType;
- }
-
- /**
- * Get the type of the O/S
- *
- * @return the type of the O/S
- */
- public OSType getOSType() {
- return osType;
- }
-
- /**
- * Set the version of the O/S
- *
- * @param version -
- * the version of the O/S
- */
- public void setOSVersion(String version) {
- this.version = version;
- }
-
- /**
- * Get the version of the O/S
- *
- * @return the version of the O/S
- */
- public String getOSVersion() {
- return version;
- }
-
- /**
- *
- * equals this instance of class with another instance
- */
- public boolean equals(Object obj) {
- if (this == obj)
- return true;
- if (obj==null || getClass() != obj.getClass()) return false;
- final OSRequirement other = (OSRequirement) obj;
- boolean typeEqual = osType == null ? other.osType == null : osType.equals(other.osType);
- boolean versionEqual = version == null ? other.version == null : version.equals(other.version);
- return typeEqual && versionEqual && isEnabled() == other.isEnabled();
- }
-
-
-
- public boolean isEnabled() {
- return enabled;
- }
-
- public void setEnabled(boolean enabled) {
- this.enabled = enabled;
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/OSType.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/OSType.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/OSType.java
deleted file mode 100644
index 250e9b7..0000000
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/OSType.java
+++ /dev/null
@@ -1,124 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.provider.utils;
-
-public enum OSType {
-
- unknown("Unknown"), //$NON-NLS-1$
- linux("LINUX"), //$NON-NLS-1$
- mac_os("MACOS"), //$NON-NLS-1$
- win95("WIN95"), //$NON-NLS-1$
- win98("WIN98"), //$NON-NLS-1$
- windows_R_Me("Windows_R_Me"), //$NON-NLS-1$
- winNT("WINNT"), //$NON-NLS-1$
- windows_2000("Windows_2000"), //$NON-NLS-1$
- windows_XP("Windows_XP"), //$NON-NLS-1$
- msdos("MSDOS"), //$NON-NLS-1$
- solaris("Solaris"), //$NON-NLS-1$
- sunOS("SunOS"), //$NON-NLS-1$
- freeBSD("FreeBSD"), //$NON-NLS-1$
- netBSD("NetBSD"), //$NON-NLS-1$
- openBSD("OpenBSD"), //$NON-NLS-1$
- bsdunix("BSDUNIX"), //$NON-NLS-1$
- aix("AIX"), //$NON-NLS-1$
- z_OS("z_OS"), //$NON-NLS-1$
- os_2("OS_2"), //$NON-NLS-1$
- os9("OS9"), //$NON-NLS-1$
- netWare("NetWare"), //$NON-NLS-1$
- tru64_unix("Tru64_UNIX"), //$NON-NLS-1$
- irix("IRIX"), //$NON-NLS-1$
- osf("OSF"), //$NON-NLS-1$
-
- mvs("MVS"), //$NON-NLS-1$
- os400("OS400"), //$NON-NLS-1$
- javaVM("JavaVM"), //$NON-NLS-1$
- win3x("WIN3x"), //$NON-NLS-1$
- winCE("WINCE"), //$NON-NLS-1$
- NCR3000("NCR3000"), //$NON-NLS-1$
- dc_os("DC_OS"), //$NON-NLS-1$
- reliant_unix("Reliant_UNIX"), //$NON-NLS-1$
- sco_unixWare("SCO_UnixWare"), //$NON-NLS-1$
- sco_openServer("SCO_OpenServer"), //$NON-NLS-1$
- sequent("Sequent"), //$NON-NLS-1$
- u6000("U6000"), //$NON-NLS-1$
- aseries("ASERIES"), //$NON-NLS-1$
- tandemNSK("TandemNSK"), //$NON-NLS-1$
- tandemNT("TandemNT"), //$NON-NLS-1$
- bs2000("BS2000"), //$NON-NLS-1$
- lynx("Lynx"), //$NON-NLS-1$
- xenix("XENIX"), //$NON-NLS-1$
- vm("VM"), //$NON-NLS-1$
- interactive_unix("Interactive_UNIX"), //$NON-NLS-1$
- gnu_hurd("GNU_Hurd"), //$NON-NLS-1$
- mach_kernel("MACH_Kernel"), //$NON-NLS-1$
- inferno("Inferno"), //$NON-NLS-1$
- qnx("QNX"), //$NON-NLS-1$
- epoc("EPOC"), //$NON-NLS-1$
- ixWorks("IxWorks"), //$NON-NLS-1$
- vxWorks("VxWorks"), //$NON-NLS-1$
- mint("MiNT"), //$NON-NLS-1$
- beOS("BeOS"), //$NON-NLS-1$
- hp_mpe("HP_MPE"), //$NON-NLS-1$
- nextStep("NextStep"), //$NON-NLS-1$
- palmPilot("PalmPilot"), //$NON-NLS-1$
- rhapsody("Rhapsody"), //$NON-NLS-1$
- dedicated("Dedicated"), //$NON-NLS-1$
- os_390("OS_390"), //$NON-NLS-1$
- vse("VSE"), //$NON-NLS-1$
- tpf("TPF"), //$NON-NLS-1$
- caldera_open_unix("Caldera_Open_UNIX"), //$NON-NLS-1$
- attunix("ATTUNIX"), //$NON-NLS-1$
- dgux("DGUX"), //$NON-NLS-1$
- decnt("DECNT"), //$NON-NLS-1$
- openVMS("OpenVMS"), //$NON-NLS-1$
- hpux("HPUX"), //$NON-NLS-1$
- other("other"); //$NON-NLS-1$
-
-
- private OSType(String value) {
- this.value = value;
- }
-
- private final String value;
-
- public String getValue() {
- return value;
- }
-
- public static OSType fromString(String value)
- {
- for(OSType type : values())
- {
- if(type.value.equals(value))
- {
- return type;
- }
- }
- return null;
- }
-
- public String toString()
- {
- return value;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/ProcessorRequirement.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/ProcessorRequirement.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/ProcessorRequirement.java
deleted file mode 100644
index 124bdd0..0000000
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/ProcessorRequirement.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-
-package org.apache.airavata.gfac.provider.utils;
-
-public enum ProcessorRequirement{
- sparc("sparc"), //$NON-NLS-1$
- powerpc("powerpc"), //$NON-NLS-1$
- x86("x86"), //$NON-NLS-1$
- x86_32("x86_32"), //$NON-NLS-1$
- x86_64("x86_64"), //$NON-NLS-1$
- parisc("parisc"), //$NON-NLS-1$
- mips("mips"), //$NON-NLS-1$
- ia64("ia64"), //$NON-NLS-1$
- arm("arm"), //$NON-NLS-1$
- other("other"); //$NON-NLS-1$
-
- ProcessorRequirement(String value) {
- this.value = value;
- }
-
- private final String value;
-
- public String getValue() {
- return value;
- }
-
- public static ProcessorRequirement fromString(String value)
- {
- for (ProcessorRequirement type : values()) {
- if (type.value.equals(value)) {
- return type;
- }
- }
- return other;
- }
-
- public String toString()
- {
- return value;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/RangeValueType.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/RangeValueType.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/RangeValueType.java
deleted file mode 100644
index 1a9c325..0000000
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/RangeValueType.java
+++ /dev/null
@@ -1,272 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.provider.utils;
-
-public class RangeValueType implements ResourceRequirement {
-
-
- private double exact = Double.NaN;
- private double lowerBound = Double.NEGATIVE_INFINITY;
- private double upperBound = Double.POSITIVE_INFINITY;
-
- private double epsilon = Double.NaN;
- private boolean includeLowerBound = true;
- private boolean includeUpperBound = true;
-
- private boolean enabled = false;
-
-
- public RangeValueType(double exact, double epsilon, double lowerBound, boolean includeLowerBound, double upperBound, boolean includeUpperBound, boolean enabled) {
- this.exact = exact;
- this.epsilon = epsilon;
- this.lowerBound = lowerBound;
- this.includeLowerBound = includeLowerBound;
- this.upperBound = upperBound;
- this.includeUpperBound = includeUpperBound;
- this.enabled = enabled;
- }
-
-
-
- /**
- * Create the range requirements
- *
- * @param exact -
- * the exact value
- * @param lowerBound -
- * the lower bound
- * @param upperBound -
- * the upper bound
- * @param includelowerBound -
- * true, if lowerBound should be included in range
- * @param includeUpperBound -
- * true, if upperBound should be included in range
- *
- */
- public RangeValueType(double exact, double epsilon, double lowerBound, boolean includeLowerBound, double upperBound, boolean includeUpperBound) {
- this(exact,epsilon,lowerBound,includeLowerBound,upperBound,includeUpperBound,false);
-
- }
-
-
- /**
- * Create the range requirements
- *
- * @param exact -
- * the exact value
- * @param lowerBound -
- * the lower bound
- * @param upperBound -
- * the upper bound
- */
- public RangeValueType(double exact, double epsilon, double lowerBound, double upperBound) {
- this(exact,epsilon,lowerBound,true,upperBound,true);
- }
-
-
- public RangeValueType(double exact, double lowerBound, double upperBound) {
- this(exact,Double.NaN,lowerBound,true,upperBound,true);
- }
-
- /**
- * Create the exact requirements
- *
- * @param exact -
- * the exact value
- * @param epsilon -
- * the epsilon arround exact
- *
- */
- public RangeValueType(double exact, double epsilon) {
- this(exact,epsilon,Double.NaN,Double.NaN);
- }
-
-
- /**
- * Create the exact requirements
- *
- * @param exact -
- * the exact value
- */
- public RangeValueType(double exact) {
- this(exact,Double.NaN);
- }
-
- public RangeValueType() {
- }
-
- /**
- * Get exact requirements
- *
- * @return the exact requirements
- */
- public double getExact() {
- return exact;
- }
-
- /**
- * Set exact requirements
- *
- * @param exact -
- * the exact requirements
- */
- public void setExact(double exact) {
- this.exact = exact;
- }
-
- /**
- * Get epsilon
- *
- * @return the epsilon
- */
- public double getEpsilon() {
- return epsilon;
- }
-
- /**
- * Set epsilon
- *
- * @param epsilon -
- * epsilon belonging to to exact requirements
- */
- public void setEpsilon(double epsilon) {
- this.epsilon = epsilon;
- }
-
- /**
- * Get lower bound
- *
- * @return the lower bound
- */
- public double getLowerBound() {
- return lowerBound;
- }
-
- /**
- * Set lower bound
- *
- * @param lowerBound -
- * the lower bound
- */
- public void setLowerBound(double lowerBound) {
- this.lowerBound = lowerBound;
- }
-
- /**
- * Get upper bound
- *
- * @return the upper bound
- */
- public double getUpperBound() {
- return upperBound;
- }
-
- /**
- * Set upper bound
- *
- * @param upperBound -
- * the upper bound
- */
- public void setUpperBound(double upperBound) {
- this.upperBound = upperBound;
- }
-
- /**
- * Test if requirements are met
- *
- * @param value -
- * the tested value
- * @return <code>true</code> if value is in the range and not less than
- * the exact value
- */
- public boolean lowerThanDouble(double value) {
- return (value >= exact && value >= lowerBound && value <= upperBound) ? true : false;
- }
-
- public String toString() {
- if (lowerBound == Double.NEGATIVE_INFINITY && upperBound == Double.POSITIVE_INFINITY) {
- return Double.toString(exact);
- }
- else {
- return "(e=" + Double.toString(exact) + ",l=" + Double.toString(lowerBound) + ",u=" //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
- + Double.toString(upperBound) + ")"; //$NON-NLS-1$
- }
- }
-
-
- public boolean isIncludeLowerBound() {
- return includeLowerBound;
- }
-
-
- public void setIncludeLowerBound(boolean includeLowerBound) {
- this.includeLowerBound = includeLowerBound;
- }
-
-
- public boolean isIncludeUpperBound() {
- return includeUpperBound;
- }
-
-
- public void setIncludeUpperBound(boolean includeUpperBound) {
- this.includeUpperBound = includeUpperBound;
- }
-
- public RangeValueType clone(){
- return new RangeValueType(this.exact, this.epsilon, this.lowerBound, this.includeLowerBound, this.upperBound, this.includeUpperBound,this.enabled);
- }
-
-
-
- public boolean isEnabled() {
- return enabled;
- }
-
-
-
- public void setEnabled(boolean enabled) {
- this.enabled = enabled;
- }
-
-
- public boolean equals(Object o)
- {
- if(! (o instanceof RangeValueType)) return false;
- RangeValueType other = (RangeValueType) o;
- return doublesEqual(getExact(),other.getExact())
- && doublesEqual(getEpsilon(), other.getEpsilon())
- && doublesEqual(getLowerBound(), other.getLowerBound())
- && doublesEqual(getUpperBound(), other.getUpperBound())
- && isIncludeLowerBound() == other.isIncludeLowerBound()
- && isIncludeUpperBound() == other.isIncludeUpperBound()
- && isEnabled() == other.isEnabled();
- }
-
-
- private boolean doublesEqual(double a, double b)
- {
- Double A = new Double(a);
- Double B = new Double(b);
- return A.equals(B);
- }
-}
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/ResourceProcessor.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/ResourceProcessor.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/ResourceProcessor.java
deleted file mode 100644
index 61e713a..0000000
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/ResourceProcessor.java
+++ /dev/null
@@ -1,152 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.provider.utils;
-
-import org.apache.airavata.gfac.context.JobExecutionContext;
-import org.apache.airavata.gfac.provider.GFacProviderException;
-import org.apache.airavata.model.workspace.experiment.ComputationalResourceScheduling;
-import org.apache.airavata.model.workspace.experiment.TaskDetails;
-import org.apache.airavata.schemas.gfac.HpcApplicationDeploymentType;
-import org.apache.airavata.schemas.gfac.QueueType;
-import org.ggf.schemas.jsdl.x2005.x11.jsdl.JobDefinitionType;
-import org.ogf.schemas.jsdl.x2007.x02.jsdlSpmd.NumberOfProcessesType;
-
-public class ResourceProcessor {
-
-
- public static void generateResourceElements(JobDefinitionType value, JobExecutionContext context) throws Exception{
-
- HpcApplicationDeploymentType appDepType = (HpcApplicationDeploymentType) context
- .getApplicationContext().getApplicationDeploymentDescription()
- .getType();
-
- createMemory(value, appDepType);
- TaskDetails taskData = context.getTaskData();
- if(taskData != null && taskData.isSetTaskScheduling()){
- ComputationalResourceScheduling computionResource= taskData.getTaskScheduling();
- try {
- int cpuCount = computionResource.getTotalCPUCount();
- if(cpuCount>0){
-// appDepType.setCpuCount(cpuCount);
- NumberOfProcessesType num = NumberOfProcessesType.Factory.newInstance();
- String processers = Integer.toString(cpuCount);
- num.setStringValue(processers);
- JSDLUtils.getOrCreateSPMDApplication(value).setNumberOfProcesses(num);
- }
- } catch (NullPointerException e) {
- new GFacProviderException("No Value sent in WorkflowContextHeader for Node Count, value in the Deployment Descriptor will be used",e);
- }
- try {
- int nodeCount = computionResource.getNodeCount();
- if(nodeCount>0){
- appDepType.setNodeCount(nodeCount);
- }
- } catch (NullPointerException e) {
- new GFacProviderException("No Value sent in WorkflowContextHeader for Node Count, value in the Deployment Descriptor will be used",e);
- }
- try {
- String queueName = computionResource.getQueueName();
- if (queueName != null) {
- if(appDepType.getQueue() == null){
- QueueType queueType = appDepType.addNewQueue();
- queueType.setQueueName(queueName);
- }else{
- appDepType.getQueue().setQueueName(queueName);
- }
- }
- } catch (NullPointerException e) {
- new GFacProviderException("No Value sent in WorkflowContextHeader for Node Count, value in the Deployment Descriptor will be used",e);
- }
- try {
- int maxwallTime = computionResource.getWallTimeLimit();
- if(maxwallTime>0){
- appDepType.setMaxWallTime(maxwallTime);
- }
- } catch (NullPointerException e) {
- new GFacProviderException("No Value sent in WorkflowContextHeader for Node Count, value in the Deployment Descriptor will be used",e);
- }
- }
-
- if (appDepType.getCpuCount() > 0) {
- RangeValueType rangeType = new RangeValueType();
- rangeType.setLowerBound(Double.NaN);
- rangeType.setUpperBound(Double.NaN);
- rangeType.setExact(appDepType.getCpuCount());
- JSDLUtils.setTotalCPUCountRequirements(value, rangeType);
- }
-
- if (appDepType.getProcessorsPerNode() > 0) {
- RangeValueType rangeType = new RangeValueType();
- rangeType.setLowerBound(Double.NaN);
- rangeType.setUpperBound(Double.NaN);
- rangeType.setExact(appDepType.getProcessorsPerNode());
- JSDLUtils.setIndividualCPUCountRequirements(value, rangeType);
- }
-
- if (appDepType.getNodeCount() > 0) {
- RangeValueType rangeType = new RangeValueType();
- rangeType.setLowerBound(Double.NaN);
- rangeType.setUpperBound(Double.NaN);
- rangeType.setExact(appDepType.getNodeCount());
- JSDLUtils.setTotalResourceCountRequirements(value, rangeType);
- }
-
- if(appDepType.getMaxWallTime() > 0) {
- RangeValueType cpuTime = new RangeValueType();
- cpuTime.setLowerBound(Double.NaN);
- cpuTime.setUpperBound(Double.NaN);
- long wallTime = appDepType.getMaxWallTime() * 60;
- cpuTime.setExact(wallTime);
- JSDLUtils.setIndividualCPUTimeRequirements(value, cpuTime);
- }
- }
-
-
- private static void createMemory(JobDefinitionType value, HpcApplicationDeploymentType appDepType){
- if (appDepType.getMinMemory() > 0 && appDepType.getMaxMemory() > 0) {
- RangeValueType rangeType = new RangeValueType();
- rangeType.setLowerBound(appDepType.getMinMemory());
- rangeType.setUpperBound(appDepType.getMaxMemory());
- JSDLUtils.setIndividualPhysicalMemoryRequirements(value, rangeType);
- }
-
- else if (appDepType.getMinMemory() > 0 && appDepType.getMaxMemory() <= 0) {
- // TODO set Wall time
- RangeValueType rangeType = new RangeValueType();
- rangeType.setLowerBound(appDepType.getMinMemory());
- JSDLUtils.setIndividualPhysicalMemoryRequirements(value, rangeType);
- }
-
- else if (appDepType.getMinMemory() <= 0 && appDepType.getMaxMemory() > 0) {
- // TODO set Wall time
- RangeValueType rangeType = new RangeValueType();
- rangeType.setUpperBound(appDepType.getMinMemory());
- JSDLUtils.setIndividualPhysicalMemoryRequirements(value, rangeType);
- }
-
- }
-
-
-
-
-
-}
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/SPMDProcessor.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/SPMDProcessor.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/SPMDProcessor.java
deleted file mode 100644
index 06f4e75..0000000
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/SPMDProcessor.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.provider.utils;
-
-import org.apache.airavata.gfac.context.JobExecutionContext;
-import org.ggf.schemas.jsdl.x2005.x11.jsdl.JobDefinitionType;
-
-public class SPMDProcessor {
-
- public static void generateSPMDElements(JobDefinitionType value, JobExecutionContext context) {
-
- }
-
-}
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/SPMDVariations.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/SPMDVariations.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/SPMDVariations.java
deleted file mode 100644
index ca37611..0000000
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/SPMDVariations.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.provider.utils;
-
-public enum SPMDVariations {
-
- MPI ("http://www.ogf.org/jsdl/2007/02/jsdl-spmd/MPI"),
- GridMPI ("http://www.ogf.org/jsdl/2007/02/jsdl-spmd/GridMPI"),
- IntelMPI ("http://www.ogf.org/jsdl/2007/02/jsdl-spmd/IntelMPI"),
- LAMMPI ("http://www.ogf.org/jsdl/2007/02/jsdl-spmd/LAM-MPI"),
- MPICH1 ("http://www.ogf.org/jsdl/2007/02/jsdl-spmd/MPICH1"),
- MPICH2 ("http://www.ogf.org/jsdl/2007/02/jsdl-spmd/MPICH2"),
- MPICHGM ("http://www.ogf.org/jsdl/2007/02/jsdl-spmd/MPICH-GM"),
- MPICHMX ("http://www.ogf.org/jsdl/2007/02/jsdl-spmd/MPICH-MX"),
- MVAPICH ("http://www.ogf.org/jsdl/2007/02/jsdl-spmd/MVAPICH"),
- MVAPICH2 ("http://www.ogf.org/jsdl/2007/02/jsdl-spmd/MVAPICH2"),
- OpenMPI ("http://www.ogf.org/jsdl/2007/02/jsdl-spmd/OpenMPI"),
- POE ("http://www.ogf.org/jsdl/2007/02/jsdl-spmd/POE"),
- PVM ("http://www.ogf.org/jsdl/2007/02/jsdl-spmd/PVM");
-
- private final String variation;
-
- private SPMDVariations(String variation) {
- this.variation = variation;
- }
-
- public String value(){
- return variation;
- }
-
-}
-
-
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/StorageCreator.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/StorageCreator.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/StorageCreator.java
deleted file mode 100644
index cdccd4d..0000000
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/StorageCreator.java
+++ /dev/null
@@ -1,211 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.provider.utils;
-
-import java.util.Calendar;
-
-import javax.security.auth.x500.X500Principal;
-
-import org.oasisOpen.docs.wsrf.sg2.EntryType;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.unigrids.services.atomic.types.PropertyType;
-import org.unigrids.x2006.x04.services.smf.CreateSMSDocument;
-import org.unigrids.x2006.x04.services.smf.StorageBackendParametersDocument.StorageBackendParameters;
-import org.unigrids.x2006.x04.services.smf.StorageDescriptionType;
-import org.w3.x2005.x08.addressing.EndpointReferenceType;
-
-import de.fzj.unicore.uas.StorageFactory;
-import de.fzj.unicore.uas.client.StorageClient;
-import de.fzj.unicore.uas.client.StorageFactoryClient;
-import de.fzj.unicore.wsrflite.xmlbeans.WSUtilities;
-import de.fzj.unicore.wsrflite.xmlbeans.client.RegistryClient;
-import de.fzj.unicore.wsrflite.xmlbeans.sg.Registry;
-
-
-import eu.unicore.util.httpclient.DefaultClientConfiguration;
-
-public class StorageCreator {
- protected final Logger log = LoggerFactory.getLogger(this.getClass());
-
- /**
- * the initial lifetime (in days) for newly created SMSs
- */
- private int initialLifeTime;
-
- /**
- * factory URL to use
- */
- private String factoryUrl;
-
- /**
- * site where to create the storage
- */
- private String siteName;
-
- /**
- * storage type to create
- */
- private String storageType;
-
- private DefaultClientConfiguration secProps;
-
- private String userName;
-
- public StorageCreator(DefaultClientConfiguration secProps, String besUrl, int initialLifetime, String storageType, String userName) {
- this.secProps = secProps;
- this.factoryUrl = getStorageFactoryUrl(besUrl);
- this.storageType = storageType;
- this.initialLifeTime = initialLifetime;
- this.userName = userName;
- }
-
-
- public StorageCreator(DefaultClientConfiguration secProps, String besUrl, int initialLifetime, String userName) {
- this.secProps = secProps;
- this.factoryUrl = getStorageFactoryUrl(besUrl);
- this.initialLifeTime = initialLifetime;
- this.userName = userName;
- }
-
-
- // The target site must have storage factory deployed with bes factory
- public StorageClient createStorage() throws Exception{
-
- if(factoryUrl == null) {
- throw new Exception("Cannot create Storage Factory Url");
- }
-
- EndpointReferenceType sfEpr= WSUtilities.makeServiceEPR(factoryUrl, StorageFactory.SMF_PORT);
-
- String dn = findServerName(factoryUrl, sfEpr);
-
- WSUtilities.addServerIdentity(sfEpr, dn);
-
- secProps.getETDSettings().setReceiver(new X500Principal(dn));
- secProps.getETDSettings().setIssuerCertificateChain(secProps.getCredential().getCertificateChain());
-
- // TODO: remove it afterwards
- if(userName != null) {
- secProps.getETDSettings().getRequestedUserAttributes2().put("xlogin", new String[]{userName});
- }
-
- StorageFactoryClient sfc = new StorageFactoryClient(sfEpr, secProps);
-
- if (log.isDebugEnabled()){
- log.debug("Using storage factory at <"+sfc.getUrl()+">");
- }
-
- StorageClient sc = null;
- try{
- sc=sfc.createSMS(getCreateSMSDocument());
-
- String addr=sc.getEPR().getAddress().getStringValue();
- log.info(addr);
-
- }catch(Exception ex){
- log.error("Could not create storage",ex);
- throw new Exception(ex);
- }
-
- return sc;
- }
-
- protected String findServerName(String besUrl, EndpointReferenceType smsEpr)throws Exception{
-
- int besIndex = besUrl.indexOf("StorageFactory?res");
- String ss = besUrl.substring(0, besIndex);
- ss = ss + "Registry";
-
- EndpointReferenceType eprt = WSUtilities.makeServiceEPR(ss, "default_registry", Registry.REGISTRY_PORT);
-
- RegistryClient registry = new RegistryClient(eprt, secProps);
-
- //first, check if server name is already in the EPR...
- String dn=WSUtilities.extractServerIDFromEPR(smsEpr);
- if(dn!=null){
- return dn;
- }
- //otherwise find a matching service in the registry
- String url=smsEpr.getAddress().getStringValue();
- if(url.contains("/services/"))url=url.substring(0,url.indexOf("/services"));
- if(log.isDebugEnabled()) log.debug("Checking for services at "+url);
- for(EntryType entry:registry.listEntries()){
- if(entry.getMemberServiceEPR().getAddress().getStringValue().startsWith(url)){
- dn=WSUtilities.extractServerIDFromEPR(entry.getMemberServiceEPR());
- if(dn!=null){
- return dn;
- }
- }
- }
- return null;
- }
-
-
- public static String getStorageFactoryUrl(String besUrl){
- int besIndex = besUrl.indexOf("BESFactory?res");
- String ss = besUrl.substring(0, besIndex);
- ss = ss + "StorageFactory?res=default_storage_factory";
- return ss;
- }
-
- /**
- * prepare request
- * */
- protected CreateSMSDocument getCreateSMSDocument(String ...keyValueParams){
- CreateSMSDocument in=CreateSMSDocument.Factory.newInstance();
- in.addNewCreateSMS();
- if(initialLifeTime>0){
- in.getCreateSMS().addNewTerminationTime().setCalendarValue(getTermTime());
- }
- if(storageType!=null){
- if(log.isDebugEnabled()) {
- log.debug("Will create storage of type : "+storageType);
- }
- StorageDescriptionType desc=in.getCreateSMS().addNewStorageDescription();
- desc.setStorageBackendType(storageType);
- if(keyValueParams.length>1){
- //other parameters from the cmdline as key=value
- StorageBackendParameters params=desc.addNewStorageBackendParameters();
- for(int i=1;i<keyValueParams.length;i++){
- String arg=keyValueParams[i];
- String[]sp=arg.split("=",2);
- PropertyType prop=params.addNewProperty();
- prop.setName(sp[0]);
- prop.setValue(sp[1]);
- if(log.isDebugEnabled()) {
- log.debug("Have parameter : "+arg);
- }
- }
- }
- }
- return in;
- }
-
- protected Calendar getTermTime(){
- Calendar c = Calendar.getInstance();
- c.add(Calendar.DATE, initialLifeTime);
- return c;
- }
-
-
-}
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/UASDataStagingProcessor.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/UASDataStagingProcessor.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/UASDataStagingProcessor.java
deleted file mode 100644
index 565f7d5..0000000
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/UASDataStagingProcessor.java
+++ /dev/null
@@ -1,225 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.provider.utils;
-
-import java.io.File;
-import java.util.Map;
-
-import org.apache.airavata.commons.gfac.type.ActualParameter;
-import org.apache.airavata.gfac.context.JobExecutionContext;
-import org.apache.airavata.gfac.context.MessageContext;
-import org.apache.airavata.schemas.gfac.HpcApplicationDeploymentType;
-import org.apache.airavata.schemas.gfac.StringArrayType;
-import org.apache.airavata.schemas.gfac.StringParameterType;
-import org.apache.airavata.schemas.gfac.URIArrayType;
-import org.apache.airavata.schemas.gfac.URIParameterType;
-import org.apache.airavata.schemas.gfac.UnicoreHostType;
-import org.ggf.schemas.jsdl.x2005.x11.jsdl.JobDefinitionType;
-
-public class UASDataStagingProcessor {
-
- public static void generateDataStagingElements(JobDefinitionType value, JobExecutionContext context, String smsUrl) throws Exception{
-
- HpcApplicationDeploymentType appDepType = (HpcApplicationDeploymentType) context
- .getApplicationContext().getApplicationDeploymentDescription()
- .getType();
-
- smsUrl = "BFT:"+smsUrl;
-
- if (context.getInMessageContext().getParameters().size() > 0) {
- buildDataStagingFromInputContext(context, value, smsUrl, appDepType);
- }
- MessageContext outMessage = new MessageContext();
- ActualParameter a1 = new ActualParameter();
- a1.getType().changeType(StringParameterType.type);
- ((StringParameterType)a1.getType()).setValue("output/analysis-results.tar");
- outMessage.addParameter("o1", a1);
- context.setOutMessageContext(outMessage);
-
- if (context.getOutMessageContext().getParameters().size() > 0) {
- buildFromOutputContext(context, value, smsUrl, appDepType);
- }
- createStdOutURIs(value, appDepType, smsUrl, isUnicoreEndpoint(context));
- }
-
- private static void createInURISMSElement(JobDefinitionType value,
- String smsUrl, String inputDir, ActualParameter inParam)
- throws Exception {
-
- String uri = ((URIParameterType) inParam.getType()).getValue();
- //TODO: To add this input file name setting part of Airavata API
- String fileName = "input/" + new File(uri).getName();
- if (uri.startsWith("file")) {
- String fileUri = smsUrl+"#/"+fileName;
-
- JSDLUtils.addDataStagingSourceElement(value, fileUri, null, fileName);
- } else if (uri.startsWith("gsiftp") || uri.startsWith("http")
- || uri.startsWith("rns")) {
- // no need to stage-in those files to the input
- // directory because unicore site will fetch them for the user
- JSDLUtils.addDataStagingSourceElement(value, uri, null, fileName);
- }
-
- }
-
- private static void createStdOutURIs(JobDefinitionType value,
- HpcApplicationDeploymentType appDepType, String smsUrl,
- boolean isUnicore) throws Exception {
-
-
- String stdout = ApplicationProcessor.getApplicationStdOut(value, appDepType);
-
- String stderr = ApplicationProcessor.getApplicationStdErr(value, appDepType);
-
- String stdoutFileName = (stdout == null || stdout.equals("")) ? "stdout"
- : stdout;
- String stdoutURI = smsUrl+"#/output/"+stdoutFileName;
- JSDLUtils.addDataStagingTargetElement(value, null, stdoutFileName,
- stdoutURI);
-
- String stderrFileName = (stdout == null || stderr.equals("")) ? "stderr"
- : stderr;
- String stderrURI = smsUrl+"#/output/"+stderrFileName;
- JSDLUtils.addDataStagingTargetElement(value, null, stderrFileName,
- stderrURI);
-
- if(isUnicore) {
- String scriptExitCodeFName = "UNICORE_SCRIPT_EXIT_CODE";
- String scriptExitCode = smsUrl+"#/output/"+scriptExitCodeFName;
- JSDLUtils.addDataStagingTargetElement(value, null,
- scriptExitCodeFName, scriptExitCode.toString());
- }
-
- }
-
-
- private static void createOutStringElements(JobDefinitionType value,
- HpcApplicationDeploymentType appDeptype, String smsUrl, String prmValue) throws Exception {
-
- if(prmValue == null || "".equals(prmValue)) return;
-
- String finalSMSPath = smsUrl + "#/output/"+prmValue;
-
- JSDLUtils.addDataStagingTargetElement(value, null, prmValue, finalSMSPath);
- }
-
-
- private static void createOutURIElement(JobDefinitionType value,
- String prmValue) throws Exception {
- String fileName = new File(prmValue.toString()).getName();
- JSDLUtils.addDataStagingTargetElement(value, null, fileName, prmValue);
- }
-
-
- private static JobDefinitionType buildFromOutputContext(JobExecutionContext context,
- JobDefinitionType value, String smsUrl,
- HpcApplicationDeploymentType appDepType) throws Exception {
-
- Map<String, Object> outputParams = context.getOutMessageContext()
- .getParameters();
-
- for (String paramKey : outputParams.keySet()) {
-
- ActualParameter outParam = (ActualParameter) outputParams
- .get(paramKey);
-
- // if single urls then convert each url into jsdl source
- // elements,
- // that are formed by concat of gridftpurl+inputdir+filename
-
- String paramDataType = outParam.getType().getType().toString();
-
- if ("URI".equals(paramDataType)) {
- String uriPrm = ((URIParameterType) outParam.getType())
- .getValue();
- createOutURIElement(value, uriPrm);
- }
-
- // string params are converted into the job arguments
-
- else if (("URIArray").equals(paramDataType)) {
- String[] uriArray = ((URIArrayType) outParam.getType())
- .getValueArray();
- for (String u : uriArray) {
-
- createOutURIElement(value, u);
- }
-
- }
- else if ("String".equals(paramDataType)) {
- String stringPrm = ((StringParameterType) outParam
- .getType()).getValue();
- createOutStringElements(value, appDepType, smsUrl, stringPrm);
- }
-
- else if ("StringArray".equals(paramDataType)) {
- String[] valueArray = ((StringArrayType) outParam.getType())
- .getValueArray();
- for (String v : valueArray) {
- createOutStringElements(value, appDepType, smsUrl, v);
- }
- }
- }
-
- return value;
- }
-
-
- private static void buildDataStagingFromInputContext(JobExecutionContext context, JobDefinitionType value, String smsUrl, HpcApplicationDeploymentType appDepType)
- throws Exception {
-
- // TODO set data directory
- Map<String, Object> inputParams = context.getInMessageContext()
- .getParameters();
-
- for (String paramKey : inputParams.keySet()) {
-
- ActualParameter inParam = (ActualParameter) inputParams
- .get(paramKey);
-
- // if single urls then convert each url into jsdl source
- // elements,
- // that are formed by concat of gridftpurl+inputdir+filename
-
- String paramDataType = inParam.getType().getType().toString();
-
- if ("URI".equals(paramDataType)) {
- createInURISMSElement(value, smsUrl,
- appDepType.getInputDataDirectory(), inParam);
- }
-
- // string params are converted into the job arguments
-
- else if ("String".equals(paramDataType)) {
- String stringPrm = ((StringParameterType) inParam.getType())
- .getValue();
- ApplicationProcessor.addApplicationArgument(value, appDepType, stringPrm);
- }
- }
-
- }
-
- public static boolean isUnicoreEndpoint(JobExecutionContext context) {
- return ( (context.getApplicationContext().getHostDescription().getType() instanceof UnicoreHostType)?true:false );
- }
-
-}
[07/11] creating gfac-bes and gfac-gram out from gfac-core
Posted by la...@apache.org.
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/external/GridFtp.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/external/GridFtp.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/external/GridFtp.java
deleted file mode 100644
index 5acc406..0000000
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/external/GridFtp.java
+++ /dev/null
@@ -1,631 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.external;
-
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.FileReader;
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-import java.util.Vector;
-
-import org.apache.airavata.gfac.Constants;
-import org.apache.airavata.gfac.GFacConfiguration;
-import org.apache.airavata.gfac.GFacException;
-import org.apache.airavata.gfac.ToolsException;
-import org.apache.airavata.gfac.utils.GFacUtils;
-import org.apache.airavata.gfac.utils.GridConfigurationHandler;
-import org.apache.airavata.gfac.utils.GridFTPContactInfo;
-import org.globus.ftp.DataChannelAuthentication;
-import org.globus.ftp.DataSourceStream;
-import org.globus.ftp.FileInfo;
-import org.globus.ftp.GridFTPClient;
-import org.globus.ftp.HostPort;
-import org.globus.ftp.Marker;
-import org.globus.ftp.MarkerListener;
-import org.globus.ftp.MlsxEntry;
-import org.globus.ftp.Session;
-import org.globus.ftp.exception.ClientException;
-import org.globus.ftp.exception.ServerException;
-import org.globus.gsi.gssapi.auth.HostAuthorization;
-import org.ietf.jgss.GSSCredential;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * GridFTP tools
- */
-public class GridFtp {
- public static final Logger log = LoggerFactory.getLogger(GridFtp.class);
-
- public static final String GSIFTP_SCHEME = "gsiftp";
- public static final String HOST = "host";
-
- /**
- * Make directory at remote location
- *
- * @param destURI
- * @param gssCred
- * @throws ServerException
- * @throws IOException
- */
- public void makeDir(URI destURI, GSSCredential gssCred) throws ToolsException {
- GridFTPClient destClient = null;
- GridFTPContactInfo destHost = new GridFTPContactInfo(destURI.getHost(), destURI.getPort());
- try {
-
- String destPath = destURI.getPath();
- log.info(("Creating Directory = " + destHost + "=" + destPath));
-
- destClient = new GridFTPClient(destHost.hostName, destHost.port);
-
- int tryCount = 0;
- while (true) {
- try {
- destClient.setAuthorization(new HostAuthorization(GridFtp.HOST));
- destClient.authenticate(gssCred);
- destClient.setDataChannelAuthentication(DataChannelAuthentication.SELF);
- makeDirExternalConfigurations(destClient, destPath);
-
- if (!destClient.exists(destPath)) {
- destClient.makeDir(destPath);
- }
- break;
- } catch (ServerException e) {
- tryCount++;
- if (tryCount >= 3) {
- throw new ToolsException(e.getMessage(), e);
- }
- Thread.sleep(10000);
- } catch (IOException e) {
- tryCount++;
- if (tryCount >= 3) {
- throw new ToolsException(e.getMessage(), e);
- }
- Thread.sleep(10000);
- }
- }
- } catch (ServerException e) {
- throw new ToolsException("Cannot Create GridFTP Client to:" + destHost.toString(), e);
- } catch (IOException e) {
- throw new ToolsException("Cannot Create GridFTP Client to:" + destHost.toString(), e);
- } catch (InterruptedException e) {
- throw new ToolsException("Internal Error cannot sleep", e);
- } finally {
- if (destClient != null) {
- try {
- destClient.close();
- } catch (Exception e) {
- log.warn("Cannot close GridFTP client connection",e);
- }
- }
- }
- }
-
- /**
- * Upload file from stream
- *
- * @param destURI
- * @param gsCredential
- * @param io
- * @throws GFacException
- */
- public void uploadFile(URI destURI, GSSCredential gsCredential, InputStream io) throws ToolsException {
- GridFTPClient ftpClient = null;
- GridFTPContactInfo contactInfo = new GridFTPContactInfo(destURI.getHost(), destURI.getPort());
-
- try {
-
- String remoteFile = destURI.getPath();
- log.info("The remote file is " + remoteFile);
-
- log.debug("Setup GridFTP Client");
-
- ftpClient = new GridFTPClient(contactInfo.hostName, contactInfo.port);
- ftpClient.setAuthorization(new HostAuthorization(GridFtp.HOST));
- ftpClient.authenticate(gsCredential);
- ftpClient.setDataChannelAuthentication(DataChannelAuthentication.SELF);
- makeFileTransferExternalConfigurations(null, ftpClient);
-
- log.info("Uploading file");
- if (checkBinaryExtensions(remoteFile)) {
- log.debug("Transfer mode is set to Binary for a file upload");
- ftpClient.setType(Session.TYPE_IMAGE);
- }
-
- ftpClient.put(remoteFile, new DataSourceStream(io), new MarkerListener() {
- public void markerArrived(Marker marker) {
- }
- });
-
- log.info("Upload file to:" + remoteFile + " is done");
-
- } catch (ServerException e) {
- throw new ToolsException("Cannot upload file to GridFTP:" + contactInfo.toString(), e);
- } catch (IOException e) {
- throw new ToolsException("Cannot upload file to GridFTP:" + contactInfo.toString(), e);
- } catch (ClientException e) {
- throw new ToolsException("Cannot upload file to GridFTP:" + contactInfo.toString(), e);
- } finally {
- if (ftpClient != null) {
- try {
- ftpClient.close();
- } catch (Exception e) {
- log.warn("Cannot close GridFTP client connection",e);
- }
- }
- }
- }
-
- public void uploadFile(URI srcURI, URI destURI, GSSCredential gsCredential) throws ToolsException {
- GridFTPClient srcClient = null;
- GridFTPContactInfo destContactInfo = new GridFTPContactInfo(destURI.getHost(), destURI.getPort());
- GridFTPContactInfo srcContactInfo = new GridFTPContactInfo(srcURI.getHost(),srcURI.getPort());
- try {
- String remoteFile = destURI.getPath();
- log.info("The remote file is " + remoteFile);
- log.debug("Setup GridFTP Client");
- srcClient = new GridFTPClient(srcContactInfo.hostName, srcContactInfo.port);
- srcClient.setAuthorization(new HostAuthorization(GridFtp.HOST));
- srcClient.authenticate(gsCredential);
- srcClient.setDataChannelAuthentication(DataChannelAuthentication.SELF);
-
- GridFTPClient destClient = new GridFTPClient(destContactInfo.hostName, destContactInfo.port);
- destClient.setAuthorization(new HostAuthorization(GridFtp.HOST));
- destClient.authenticate(gsCredential);
- destClient.setDataChannelAuthentication(DataChannelAuthentication.SELF);
- makeFileTransferExternalConfigurations(srcClient, destClient);
- log.debug("Uploading file");
- if (checkBinaryExtensions(remoteFile)) {
- log.debug("Transfer mode is set to Binary for a file upload");
- srcClient.setType(Session.TYPE_IMAGE);
- }
-
- srcClient.transfer(srcURI.getPath(),destClient, remoteFile, false, null);
-
- log.info("Upload file to:" + remoteFile + " is done");
-
- } catch (ServerException e) {
- throw new ToolsException("Cannot upload file to GridFTP:" + destContactInfo.toString(), e);
- } catch (IOException e) {
- throw new ToolsException("Cannot upload file to GridFTP:" + destContactInfo.toString(), e);
- } catch (ClientException e) {
- throw new ToolsException("Cannot upload file to GridFTP:" + destContactInfo.toString(), e);
- } finally {
- if (srcClient != null) {
- try {
- srcClient.close();
- } catch (Exception e) {
- log.warn("Cannot close GridFTP client connection",e);
- }
- }
- }
- }
-
- /**
- * Upload file to remote location
- *
- * @param destURI
- * @param gsCredential
- * @param localFile
- * @throws GFacException
- */
- public void uploadFile(URI destURI, GSSCredential gsCredential, File localFile) throws ToolsException {
- GridFTPClient ftpClient = null;
- GridFTPContactInfo contactInfo = new GridFTPContactInfo(destURI.getHost(), destURI.getPort());
- try {
-
- String remoteFile = destURI.getPath();
-
- log.info("The local temp file is " + localFile);
- log.info("the remote file is " + remoteFile);
-
- log.debug("Setup GridFTP Client");
-
- ftpClient = new GridFTPClient(contactInfo.hostName, contactInfo.port);
- ftpClient.setAuthorization(new HostAuthorization(GridFtp.HOST));
- ftpClient.authenticate(gsCredential);
- ftpClient.setDataChannelAuthentication(DataChannelAuthentication.SELF);
- makeFileTransferExternalConfigurations(null, ftpClient);
-
- log.debug("Uploading file");
- if (checkBinaryExtensions(remoteFile)) {
- log.debug("Transfer mode is set to Binary for a file upload");
- ftpClient.setType(Session.TYPE_IMAGE);
- }
-
-
- ftpClient.put(localFile, remoteFile, false);
-
- log.info("Upload file to:" + remoteFile + " is done");
-
- } catch (ServerException e) {
- throw new ToolsException("Cannot upload file to GridFTP:" + contactInfo.toString(), e);
- } catch (IOException e) {
- throw new ToolsException("Cannot upload file to GridFTP:" + contactInfo.toString(), e);
- } catch (ClientException e) {
- throw new ToolsException("Cannot upload file to GridFTP:" + contactInfo.toString(), e);
- } finally {
- if (ftpClient != null) {
- try {
- ftpClient.close();
- } catch (Exception e) {
- log.warn("Cannot close GridFTP client connection",e);
- }
- }
- }
- }
-
- /**
- * Download File from remote location
- *
- * @param destURI
- * @param gsCredential
- * @param localFile
- * @throws GFacException
- */
- public void downloadFile(URI destURI, GSSCredential gsCredential, File localFile) throws ToolsException {
- GridFTPClient ftpClient = null;
- GridFTPContactInfo contactInfo = new GridFTPContactInfo(destURI.getHost(), destURI.getPort());
- try {
- String remoteFile = destURI.getPath();
-
- log.info("The local temp file is " + localFile);
- log.info("the remote file is " + remoteFile);
-
- log.debug("Setup GridFTP Client");
-
- ftpClient = new GridFTPClient(contactInfo.hostName, contactInfo.port);
- ftpClient.setAuthorization(new HostAuthorization(GridFtp.HOST));
- ftpClient.authenticate(gsCredential);
- ftpClient.setDataChannelAuthentication(DataChannelAuthentication.SELF);
- makeFileTransferExternalConfigurations(ftpClient, null);
-
- log.debug("Downloading file");
- if (checkBinaryExtensions(remoteFile)) {
- log.debug("Transfer mode is set to Binary to download a file");
- ftpClient.setType(Session.TYPE_IMAGE);
- }
-
- ftpClient.get(remoteFile, localFile);
-
- log.info("Download file to:" + localFile + " is done");
-
- } catch (ServerException e) {
- throw new ToolsException("Cannot download file from GridFTP:" + contactInfo.toString(), e);
- } catch (IOException e) {
- throw new ToolsException("Cannot download file from GridFTP:" + contactInfo.toString(), e);
- } catch (ClientException e) {
- throw new ToolsException("Cannot download file from GridFTP:" + contactInfo.toString(), e);
- } finally {
- if (ftpClient != null) {
- try {
- //ftpClient.close();
- ftpClient.close(false);
- } catch (Exception e) {
- log.warn("Cannot close GridFTP client connection",e);
- }
- }
- }
- }
-
- /**
- * Stream remote file
- *
- * @param destURI
- * @param gsCredential
- * @param localFile
- * @return
- * @throws GFacException
- */
- public String readRemoteFile(URI destURI, GSSCredential gsCredential, File localFile) throws ToolsException {
- BufferedReader instream = null;
- File localTempfile = null;
- try {
-
- if (localFile == null) {
- localTempfile = File.createTempFile("stderr", "err");
- } else {
- localTempfile = localFile;
- }
-
- log.info("Local temporary file:" + localTempfile);
-
- downloadFile(destURI, gsCredential, localTempfile);
-
- instream = new BufferedReader(new FileReader(localTempfile));
- StringBuffer buff = new StringBuffer();
- String temp = null;
- while ((temp = instream.readLine()) != null) {
- buff.append(temp);
- buff.append(Constants.NEWLINE);
- }
-
- log.info("finish read file:" + localTempfile);
-
- return buff.toString();
- } catch (FileNotFoundException e) {
- throw new ToolsException("Cannot read localfile file:" + localTempfile, e);
- } catch (IOException e) {
- throw new ToolsException("Cannot read localfile file:" + localTempfile, e);
- } finally {
- if (instream != null) {
- try {
- instream.close();
- } catch (Exception e) {
- log.warn("Cannot close GridFTP client connection",e);
- }
- }
- }
- }
-
- /**
- * Transfer data from one GridFTp Endpoint to another GridFTP Endpoint
- *
- * @param srchost
- * @param desthost
- * @param gssCred
- * @param srcActive
- * @throws ServerException
- * @throws ClientException
- * @throws IOException
- */
- public void transfer(URI srchost, URI desthost, GSSCredential gssCred, boolean srcActive) throws ToolsException {
- GridFTPClient destClient = null;
- GridFTPClient srcClient = null;
-
- try {
- destClient = new GridFTPClient(desthost.getHost(), desthost.getPort());
- destClient.setAuthorization(new HostAuthorization(GridFtp.HOST));
- destClient.authenticate(gssCred);
-
- if (checkBinaryExtensions(desthost.getPath())) {
- log.debug("Transfer mode is set to Binary");
- destClient.setType(Session.TYPE_IMAGE);
- }
-
- srcClient = new GridFTPClient(srchost.getHost(), srchost.getPort());
- srcClient.setAuthorization(new HostAuthorization(GridFtp.HOST));
- srcClient.authenticate(gssCred);
- makeFileTransferExternalConfigurations(srcClient, destClient);
-
- if (checkBinaryExtensions(srchost.getPath())) {
- log.debug("Transfer mode is set to Binary");
- srcClient.setType(Session.TYPE_IMAGE);
- }
-
- if (srcActive) {
- log.debug("Set src active");
- HostPort hp = destClient.setPassive();
- srcClient.setActive(hp);
- } else {
- log.debug("Set dst active");
- HostPort hp = srcClient.setPassive();
- destClient.setActive(hp);
- }
-
- log.debug("Start transfer file from GridFTP:" + srchost.toString() + " to " + desthost.toString());
-
- /**
- * Transfer a file. The transfer() function blocks until the transfer is complete.
- */
- srcClient.transfer(srchost.getPath(), destClient, desthost.getPath(), false, null);
- if (srcClient.getSize(srchost.getPath()) == destClient.getSize(desthost.getPath())) {
- log.debug("CHECK SUM OK");
- } else {
- log.debug("****CHECK SUM FAILED****");
- }
-
- } catch (ServerException e) {
- throw new ToolsException("Cannot transfer file from GridFTP:" + srchost.toString() + " to "
- + desthost.toString(), e);
- } catch (IOException e) {
- throw new ToolsException("Cannot transfer file from GridFTP:" + srchost.toString() + " to "
- + desthost.toString(), e);
- } catch (ClientException e) {
- throw new ToolsException("Cannot transfer file from GridFTP:" + srchost.toString() + " to "
- + desthost.toString(), e);
- } finally {
- if (destClient != null) {
- try {
- destClient.close();
- } catch (Exception e) {
- log.warn("Cannot close GridFTP client connection at Desitnation:" + desthost.toString());
- }
- }
- if (srcClient != null) {
- try {
- srcClient.close();
- } catch (Exception e) {
- log.warn("Cannot close GridFTP client connection at Source:" + srchost.toString(),e);
- }
- }
- }
- }
-
- /**
- * List files in a GridFTP directory
- * @param dirURI
- * @param gssCred
- * @return
- * @throws ToolsException
- */
- @SuppressWarnings("unchecked")
- public List<String> listDir(URI dirURI, GSSCredential gssCred) throws ToolsException {
- List<String> files = new ArrayList<String>();
- GridFTPClient srcClient = null;
- try {
- GridFTPContactInfo contactInfo = new GridFTPContactInfo(dirURI.getHost(), dirURI.getPort());
-
- srcClient = new GridFTPClient(contactInfo.hostName, contactInfo.port);
- srcClient.setAuthorization(new HostAuthorization(GridFtp.HOST));
- srcClient.authenticate(gssCred);
- srcClient.setDataChannelAuthentication(DataChannelAuthentication.SELF);
- srcClient.setType(Session.TYPE_ASCII);
- srcClient.changeDir(dirURI.getPath());
- makelistDirExternalConfigurations(srcClient, srcClient.getCurrentDir());
-
- Vector<Object> fileInfo = null;
- try {
- fileInfo = srcClient.mlsd();
- } catch (Throwable e) {
- fileInfo = srcClient.list();
- }
-
- if (!fileInfo.isEmpty()) {
- for (int j = 0; j < fileInfo.size(); ++j) {
- String name = null;
- if (fileInfo.get(j) instanceof MlsxEntry) {
- name = ((MlsxEntry) fileInfo.get(j)).getFileName();
- } else if (fileInfo.get(j) instanceof FileInfo) {
- name = ((FileInfo) fileInfo.get(j)).getName();
- } else {
- throw new ToolsException("Unsupported type returned by gridftp " + fileInfo.get(j));
- }
-
- if (!name.equals(".") && !name.equals("..")) {
- URI uri = GFacUtils.createGsiftpURI(contactInfo.hostName, dirURI.getPath() + File.separator + name);
- files.add(uri.getPath());
- }
- }
- }
- return files;
- } catch (IOException e) {
- throw new ToolsException("Could not list directory: " + dirURI.toString() ,e);
- } catch (ServerException e) {
- throw new ToolsException("Could not list directory: " + dirURI.toString() ,e);
- } catch (ClientException e) {
- throw new ToolsException("Could not list directory: " + dirURI.toString() ,e);
- } catch (URISyntaxException e) {
- throw new ToolsException("Error creating URL of listed files: " + dirURI.toString() ,e);
- } finally {
- if (srcClient != null) {
- try {
- srcClient.close();
- } catch (Exception e) {
- log.warn("Cannot close GridFTP client connection", e);
- }
- }
- }
- }
- /**
- * Method to check file extension as binary to set transfer type
- * @param filePath
- * @return
- */
- private static boolean checkBinaryExtensions(String filePath){
- String extension = filePath.substring(filePath.lastIndexOf(".")+1,filePath.length());
- Set<String> extensions = new HashSet<String>(Arrays.asList(new String[] {"tar","zip","gz","tgz"}));
- if(extensions.contains(extension)){
- return true;
- }else{
- return false;
- }
-
- }
-
- /**
- * This function will call the external configuration handlers to configure the GridFTPClients
- * when file transferring is required
- * object.
- * @param client
- * @param source
- */
- private void makeFileTransferExternalConfigurations(GridFTPClient source, GridFTPClient destination) {
- GridConfigurationHandler[] handlers = GFacConfiguration.getGridConfigurationHandlers();
- for(GridConfigurationHandler handler:handlers){
- try {
- handler.handleFileTransferFTPClientConfigurations(source,destination);
- } catch (Exception e) {
- //TODO Right now we are just catching & ignoring the exception. But later on we need
- //to throw this exception to notify the user of configuration errors of their
- //custom configuration handlers.
- log.error("Error while external configurings for GridFTPClient(s) of "
- + (source!=null? " "+source.getHost():"")+" "+(destination!=null? " "+destination.getHost():""), e);
- }
- }
-
- }
-
- /**
- * This function will call the external configuration handlers to configure the GridFTPClients
- * when file transferring is required
- * object.
- * @param client
- * @param source
- */
- private void makeDirExternalConfigurations(GridFTPClient client, String dirPath) {
- GridConfigurationHandler[] handlers = GFacConfiguration.getGridConfigurationHandlers();
- for(GridConfigurationHandler handler:handlers){
- try {
- handler.handleMakeDirFTPClientConfigurations(client, dirPath);
- } catch (Exception e) {
- //TODO Right now we are just catching & ignoring the exception. But later on we need
- //to throw this exception to notify the user of configuration errors of their
- //custom configuration handlers.
- log.error("Error while external configurings for GridFTPClient(s) of " + client.getHost(), e);
- }
- }
-
- }
-
- /**
- * This function will call the external configuration handlers to configure the GridFTPClients
- * when file transferring is required
- * object.
- * @param client
- * @param source
- */
- private void makelistDirExternalConfigurations(GridFTPClient client, String dirPath) {
- GridConfigurationHandler[] handlers = GFacConfiguration.getGridConfigurationHandlers();
- for(GridConfigurationHandler handler:handlers){
- try {
- handler.handleListDirFTPClientConfigurations(client, dirPath);
- } catch (Exception e) {
- //TODO Right now we are just catching & ignoring the exception. But later on we need
- //to throw this exception to notify the user of configuration errors of their
- //custom configuration handlers.
- log.error("Error while external configurings for GridFTPClient(s) of " + client.getHost(), e);
- }
- }
-
- }
-
- public String gridFTPFileExist(URI inputDirectory,String fileName,GSSCredential gssCred) throws ToolsException {
- List<String> strings = listDir(inputDirectory, gssCred);
- for(String fileExist:strings){
- if(fileName.equals(fileExist)) {
- fileName = "duplicate_" + fileName;
- return fileName;
- }
- }
- return fileName;
- }
-}
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/handler/GramDirectorySetupHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/handler/GramDirectorySetupHandler.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/handler/GramDirectorySetupHandler.java
deleted file mode 100644
index 83af422..0000000
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/handler/GramDirectorySetupHandler.java
+++ /dev/null
@@ -1,135 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.handler;
-
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.util.Map;
-
-import org.apache.airavata.common.exception.ApplicationSettingsException;
-import org.apache.airavata.commons.gfac.type.ApplicationDescription;
-import org.apache.airavata.gfac.GFacException;
-import org.apache.airavata.gfac.context.JobExecutionContext;
-import org.apache.airavata.gfac.context.security.GSISecurityContext;
-import org.apache.airavata.gfac.external.GridFtp;
-import org.apache.airavata.gfac.utils.GFacUtils;
-import org.apache.airavata.gfac.utils.GramJobSubmissionListener;
-import org.apache.airavata.model.workspace.experiment.CorrectiveAction;
-import org.apache.airavata.model.workspace.experiment.DataTransferDetails;
-import org.apache.airavata.model.workspace.experiment.ErrorCategory;
-import org.apache.airavata.model.workspace.experiment.TransferState;
-import org.apache.airavata.model.workspace.experiment.TransferStatus;
-import org.apache.airavata.registry.cpi.ChildDataType;
-import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
-import org.apache.airavata.schemas.gfac.GlobusHostType;
-import org.apache.airavata.schemas.gfac.HostDescriptionType;
-import org.apache.airavata.schemas.gfac.UnicoreHostType;
-import org.ietf.jgss.GSSCredential;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class GramDirectorySetupHandler extends AbstractHandler {
- private static final Logger log = LoggerFactory.getLogger(GramJobSubmissionListener.class);
-
- public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException,GFacException {
- log.info("Invoking GramDirectorySetupHandler ...");
- super.invoke(jobExecutionContext);
- String[] gridFTPEndpointArray = null;
-
- //TODO: why it is tightly coupled with gridftp
-// GlobusHostType host = (GlobusHostType) jobExecutionContext.getApplicationContext().getHostDescription().getType();
-
- //TODO: make it more reusable
- HostDescriptionType hostType = jobExecutionContext.getApplicationContext().getHostDescription().getType();
-
-
-
- if(hostType instanceof GlobusHostType){
- gridFTPEndpointArray = ((GlobusHostType) hostType).getGridFTPEndPointArray();
- }
- else if (hostType instanceof UnicoreHostType){
- gridFTPEndpointArray = ((UnicoreHostType) hostType).getGridFTPEndPointArray();
- }
-
-
-
- ApplicationDescription applicationDeploymentDescription = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription();
- ApplicationDeploymentDescriptionType app = applicationDeploymentDescription.getType();
- GridFtp ftp = new GridFtp();
-
- try {
-
- GSSCredential gssCred = ((GSISecurityContext)jobExecutionContext.
- getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT)).getGssCredentials();
-
- if (gridFTPEndpointArray == null || gridFTPEndpointArray.length == 0) {
- gridFTPEndpointArray = new String[]{hostType.getHostAddress()};
- }
- boolean success = false;
- GFacHandlerException pe = null;// = new ProviderException("");
- for (String endpoint : gridFTPEndpointArray) {
- try {
-
- URI tmpdirURI = GFacUtils.createGsiftpURI(endpoint, app.getScratchWorkingDirectory());
- URI workingDirURI = GFacUtils.createGsiftpURI(endpoint, app.getStaticWorkingDirectory());
- URI inputURI = GFacUtils.createGsiftpURI(endpoint, app.getInputDataDirectory());
- URI outputURI = GFacUtils.createGsiftpURI(endpoint, app.getOutputDataDirectory());
-
- log.info("Host FTP = " + gridFTPEndpointArray[0]);
- log.info("temp directory = " + tmpdirURI);
- log.info("Working directory = " + workingDirURI);
- log.info("Input directory = " + inputURI);
- log.info("Output directory = " + outputURI);
- ftp.makeDir(tmpdirURI, gssCred);
- ftp.makeDir(workingDirURI, gssCred);
- ftp.makeDir(inputURI, gssCred);
- ftp.makeDir(outputURI, gssCred);
- success = true;
- DataTransferDetails detail = new DataTransferDetails();
- TransferStatus status = new TransferStatus();
- status.setTransferState(TransferState.DIRECTORY_SETUP);
- detail.setTransferStatus(status);
- detail.setTransferDescription("Working directory = " + workingDirURI);
- registry.add(ChildDataType.DATA_TRANSFER_DETAIL,detail, jobExecutionContext.getTaskData().getTaskID());
-
- break;
- } catch (URISyntaxException e) {
- pe = new GFacHandlerException("URI is malformatted:" + e.getMessage(), e);
-
- } catch (Exception e) {
- pe = new GFacHandlerException(e.getMessage(), e);
- }
- }
- if (success == false) {
- GFacUtils.saveErrorDetails(pe.getLocalizedMessage(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.FILE_SYSTEM_FAILURE, jobExecutionContext.getTaskData().getTaskID());
- throw pe;
- }
- } catch (SecurityException e) {
- throw new GFacHandlerException(e.getMessage(), e);
- } catch (ApplicationSettingsException e1) {
- throw new GFacHandlerException(e1.getMessage(), e1);
- }
- }
-
- public void initProperties(Map<String, String> properties) throws GFacHandlerException, GFacException {
-
- }
-}
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/handler/GridFTPInputHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/handler/GridFTPInputHandler.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/handler/GridFTPInputHandler.java
deleted file mode 100644
index 6876d59..0000000
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/handler/GridFTPInputHandler.java
+++ /dev/null
@@ -1,203 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.handler;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import org.apache.airavata.common.exception.ApplicationSettingsException;
-import org.apache.airavata.common.utils.StringUtil;
-import org.apache.airavata.commons.gfac.type.ActualParameter;
-import org.apache.airavata.commons.gfac.type.MappingFactory;
-import org.apache.airavata.gfac.GFacException;
-import org.apache.airavata.gfac.ToolsException;
-import org.apache.airavata.gfac.context.JobExecutionContext;
-import org.apache.airavata.gfac.context.MessageContext;
-import org.apache.airavata.gfac.context.security.GSISecurityContext;
-import org.apache.airavata.gfac.external.GridFtp;
-import org.apache.airavata.gfac.utils.GFacUtils;
-import org.apache.airavata.model.workspace.experiment.CorrectiveAction;
-import org.apache.airavata.model.workspace.experiment.DataTransferDetails;
-import org.apache.airavata.model.workspace.experiment.ErrorCategory;
-import org.apache.airavata.model.workspace.experiment.TransferState;
-import org.apache.airavata.model.workspace.experiment.TransferStatus;
-import org.apache.airavata.registry.cpi.ChildDataType;
-import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
-import org.apache.airavata.schemas.gfac.GlobusHostType;
-import org.apache.airavata.schemas.gfac.HostDescriptionType;
-import org.apache.airavata.schemas.gfac.URIArrayType;
-import org.apache.airavata.schemas.gfac.URIParameterType;
-import org.apache.airavata.schemas.gfac.UnicoreHostType;
-import org.ietf.jgss.GSSCredential;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class GridFTPInputHandler extends AbstractHandler {
- private static final Logger log = LoggerFactory.getLogger(AppDescriptorCheckHandler.class);
-
- public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException,GFacException {
- log.info("Invoking GridFTPInputHandler ...");
- super.invoke(jobExecutionContext);
- DataTransferDetails detail = new DataTransferDetails();
- TransferStatus status = new TransferStatus();
-
- MessageContext inputNew = new MessageContext();
- try {
- MessageContext input = jobExecutionContext.getInMessageContext();
- Set<String> parameters = input.getParameters().keySet();
- for (String paramName : parameters) {
- ActualParameter actualParameter = (ActualParameter) input.getParameters().get(paramName);
- String paramValue = MappingFactory.toString(actualParameter);
- //TODO: Review this with type
- if ("URI".equals(actualParameter.getType().getType().toString())) {
- ((URIParameterType) actualParameter.getType()).setValue(stageInputFiles(jobExecutionContext, paramValue));
- } else if ("URIArray".equals(actualParameter.getType().getType().toString())) {
- List<String> split = Arrays.asList(StringUtil.getElementsFromString(paramValue));
- List<String> newFiles = new ArrayList<String>();
- for (String paramValueEach : split) {
- String stageInputFiles = stageInputFiles(jobExecutionContext, paramValueEach);
- detail.setTransferDescription("Input Data Staged: " + stageInputFiles);
- status.setTransferState(TransferState.UPLOAD);
- detail.setTransferStatus(status);
- registry.add(ChildDataType.DATA_TRANSFER_DETAIL,detail, jobExecutionContext.getTaskData().getTaskID());
-
- newFiles.add(stageInputFiles);
- }
- ((URIArrayType) actualParameter.getType()).setValueArray(newFiles.toArray(new String[newFiles.size()]));
- }
- inputNew.getParameters().put(paramName, actualParameter);
-
- }
- } catch (Exception e) {
- try {
- status.setTransferState(TransferState.FAILED);
- detail.setTransferStatus(status);
- registry.add(ChildDataType.DATA_TRANSFER_DETAIL,detail, jobExecutionContext.getTaskData().getTaskID());
- GFacUtils.saveErrorDetails(e.getLocalizedMessage(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.FILE_SYSTEM_FAILURE, jobExecutionContext.getTaskData().getTaskID());
- } catch (Exception e1) {
- throw new GFacHandlerException("Error persisting status", e1, e1.getLocalizedMessage());
- }
- log.error(e.getMessage());
- throw new GFacHandlerException("Error while input File Staging", e, e.getLocalizedMessage());
- }
- jobExecutionContext.setInMessageContext(inputNew);
- }
-
- private static String stageInputFiles(JobExecutionContext jobExecutionContext, String paramValue) throws URISyntaxException, SecurityException, ToolsException, IOException,GFacException, ApplicationSettingsException {
- URI gridftpURL = new URI(paramValue);
-
- String[] gridFTPEndpointArray = null;
-
- // not to download input files to the input dir if its http / gsiftp
- // but if local then yes
- boolean isInputNonLocal = true;
-
- //TODO: why it is tightly coupled with gridftp
-// GlobusHostType host = (GlobusHostType) jobExecutionContext.getApplicationContext().getHostDescription().getType();
-
- //TODO: make it more reusable
- HostDescriptionType hostType = jobExecutionContext.getApplicationContext().getHostDescription().getType();
-
- if(jobExecutionContext.getApplicationContext().getHostDescription().getType() instanceof GlobusHostType){
- gridFTPEndpointArray = ((GlobusHostType) hostType).getGridFTPEndPointArray();
- }
- else if (jobExecutionContext.getApplicationContext().getHostDescription().getType() instanceof UnicoreHostType){
- gridFTPEndpointArray = ((UnicoreHostType) hostType).getGridFTPEndPointArray();
- isInputNonLocal = false;
- }
- else {
- //TODO
- }
-
-
- ApplicationDeploymentDescriptionType app = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription().getType();
- GridFtp ftp = new GridFtp();
- URI destURI = null;
- GSSCredential gssCred = ((GSISecurityContext)jobExecutionContext.getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT)).getGssCredentials();
-
- for (String endpoint : gridFTPEndpointArray) {
- URI inputURI = GFacUtils.createGsiftpURI(endpoint, app.getInputDataDirectory());
- String fileName = new File(gridftpURL.getPath()).getName();
- fileName = ftp.gridFTPFileExist(inputURI, fileName,gssCred);
-
- String destLocalPath = inputURI.getPath() + File.separator + fileName;
- //if user give a url just to refer an endpoint, not a web resource we are not doing any transfer
- if (fileName != null && !"".equals(fileName)) {
- destURI = GFacUtils.createGsiftpURI(endpoint, destLocalPath);
- if (paramValue.startsWith("gsiftp")) {
- // no need to do if it is unicore, as unicore will download this on user's behalf to the job space dir
- if(isInputNonLocal) ftp.uploadFile(gridftpURL, destURI, gssCred);
- else return paramValue;
- } else if (paramValue.startsWith("file")) {
- String localFile = paramValue.substring(paramValue.indexOf(":") + 1, paramValue.length());
- FileInputStream fis = null;
- try {
- fis = new FileInputStream(localFile);
- ftp.uploadFile(destURI, gssCred, fis);
- } catch (IOException e) {
- throw new GFacException("Unable to create file : " + localFile ,e);
- } finally {
- if (fis != null) {
- fis.close();
- }
- }
- } else if (paramValue.startsWith("http")) {
- // no need to do if it is unicore
- if(isInputNonLocal) {
- InputStream is = null;
- try {
- is = gridftpURL.toURL().openStream();
- ftp.uploadFile(destURI, gssCred, (is));
- }finally {
- is.close();
- }
- } else {
- // don't return destUri
- return paramValue;
- }
-
- } else {
- //todo throw exception telling unsupported protocol
- return paramValue;
- }
- } else {
- // When the given input is not a web resource but a URI type input, then we don't do any transfer just keep the same value as it isin the input
- return paramValue;
- }
- }
- return destURI.getPath();
- }
-
- public void initProperties(Map<String, String> properties) throws GFacHandlerException, GFacException {
-
- }
-
-
-}
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/handler/GridFTPOutputHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/handler/GridFTPOutputHandler.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/handler/GridFTPOutputHandler.java
deleted file mode 100644
index 247ef1f..0000000
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/handler/GridFTPOutputHandler.java
+++ /dev/null
@@ -1,346 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.handler;
-
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import org.apache.airavata.common.exception.ApplicationSettingsException;
-import org.apache.airavata.common.utils.StringUtil;
-import org.apache.airavata.commons.gfac.type.ActualParameter;
-import org.apache.airavata.commons.gfac.type.ApplicationDescription;
-import org.apache.airavata.commons.gfac.type.MappingFactory;
-import org.apache.airavata.gfac.GFacException;
-import org.apache.airavata.gfac.ToolsException;
-import org.apache.airavata.gfac.context.JobExecutionContext;
-import org.apache.airavata.gfac.context.MessageContext;
-import org.apache.airavata.gfac.context.security.GSISecurityContext;
-import org.apache.airavata.gfac.external.GridFtp;
-import org.apache.airavata.gfac.provider.GFacProviderException;
-import org.apache.airavata.gfac.utils.GFacUtils;
-import org.apache.airavata.gfac.utils.OutputUtils;
-import org.apache.airavata.model.workspace.experiment.CorrectiveAction;
-import org.apache.airavata.model.workspace.experiment.DataTransferDetails;
-import org.apache.airavata.model.workspace.experiment.ErrorCategory;
-import org.apache.airavata.model.workspace.experiment.TaskDetails;
-import org.apache.airavata.model.workspace.experiment.TransferState;
-import org.apache.airavata.model.workspace.experiment.TransferStatus;
-import org.apache.airavata.registry.cpi.ChildDataType;
-import org.apache.airavata.registry.cpi.Registry;
-import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
-import org.apache.airavata.schemas.gfac.GlobusHostType;
-import org.apache.airavata.schemas.gfac.HostDescriptionType;
-import org.apache.airavata.schemas.gfac.StringArrayType;
-import org.apache.airavata.schemas.gfac.URIArrayType;
-import org.apache.airavata.schemas.gfac.URIParameterType;
-import org.apache.airavata.schemas.gfac.UnicoreHostType;
-import org.ietf.jgss.GSSCredential;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-
-public class GridFTPOutputHandler extends AbstractHandler {
- private static final Logger log = LoggerFactory.getLogger(GridFTPOutputHandler.class);
- private Registry registry;
-
-
- public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException,GFacException {
- log.info("Invoking GridFTPOutputHandler ...");
- super.invoke(jobExecutionContext);
-
- ApplicationDeploymentDescriptionType app = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription().getType();
-
- HostDescriptionType hostType = jobExecutionContext.getApplicationContext().getHostDescription().getType();
- String[] gridFTPEndpointArray = null;
- String hostName = null;
-
- if(jobExecutionContext.getApplicationContext().getHostDescription().getType() instanceof GlobusHostType){
- gridFTPEndpointArray = ((GlobusHostType) hostType).getGridFTPEndPointArray();
- hostName = ((GlobusHostType) hostType).getHostName();
-
- }
- else if (jobExecutionContext.getApplicationContext().getHostDescription().getType() instanceof UnicoreHostType){
- gridFTPEndpointArray = ((UnicoreHostType) hostType).getGridFTPEndPointArray();
- hostName = ((UnicoreHostType) hostType).getHostName();
- }
- else {
- //TODO
- }
-
- GridFtp ftp = new GridFtp();
- File localStdErrFile = null;
- Map<String, ActualParameter> stringMap = new HashMap<String, ActualParameter>();
- DataTransferDetails detail = new DataTransferDetails();
- TransferStatus status = new TransferStatus();
-
- try {
- GSSCredential gssCred = ((GSISecurityContext)jobExecutionContext.getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT)).getGssCredentials();
- String[] hostgridFTP = gridFTPEndpointArray;
- if (hostgridFTP == null || hostgridFTP.length == 0) {
- hostgridFTP = new String[]{hostName};
- }
- for (String endpoint : gridFTPEndpointArray) {
- try {
- /*
- * Read Stdout and Stderror
- */
- URI stdoutURI = GFacUtils.createGsiftpURI(endpoint, app.getStandardOutput());
- URI stderrURI = GFacUtils.createGsiftpURI(endpoint, app.getStandardError());
- status.setTransferState(TransferState.COMPLETE);
- detail.setTransferStatus(status);
- detail.setTransferDescription("STDOUT:" + stdoutURI.toString());
- registry.add(ChildDataType.DATA_TRANSFER_DETAIL,detail, jobExecutionContext.getTaskData().getTaskID());
- status.setTransferState(TransferState.COMPLETE);
- detail.setTransferStatus(status);
- detail.setTransferDescription("STDERR:" + stderrURI.toString());
- registry.add(ChildDataType.DATA_TRANSFER_DETAIL,detail, jobExecutionContext.getTaskData().getTaskID());
-
- log.info("STDOUT:" + stdoutURI.toString());
- log.info("STDERR:" + stderrURI.toString());
-
- File logDir = new File("./service_logs");
- if (!logDir.exists()) {
- logDir.mkdir();
- }
-
- String timeStampedServiceName = GFacUtils.createUniqueNameForService(jobExecutionContext
- .getServiceName());
- File localStdOutFile = File.createTempFile(timeStampedServiceName, "stdout");
- localStdErrFile = File.createTempFile(timeStampedServiceName, "stderr");
-
-
- String stdout = null;
- String stderr = null;
-
- // TODO: what if job is failed
- // and this handler is not able to find std* files?
- try {
- stdout = ftp.readRemoteFile(stdoutURI, gssCred, localStdOutFile);
- stderr = ftp.readRemoteFile(stderrURI, gssCred, localStdErrFile);
- //TODO: do we also need to set them as output parameters for another job
- ApplicationDescription application = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription();
- ApplicationDeploymentDescriptionType appDesc = application.getType();
- appDesc.setStandardOutput(stdout);
- appDesc.setStandardError(stderr);
- jobExecutionContext.getApplicationContext().setApplicationDeploymentDescription(application);
- }
- catch(ToolsException e) {
- log.error("Cannot download stdout/err files. One reason could be the job is not successfully finished: "+e.getMessage());
- }
-
-
- Map<String, Object> output = jobExecutionContext.getOutMessageContext().getParameters();
- Set<String> keys = output.keySet();
- for (String paramName : keys) {
- ActualParameter actualParameter = (ActualParameter) output.get(paramName);
- if ("URIArray".equals(actualParameter.getType().getType().toString())) {
- URI outputURI = GFacUtils.createGsiftpURI(endpoint, app.getOutputDataDirectory());
- List<String> outputList = ftp.listDir(outputURI, gssCred);
- String[] valueList = outputList.toArray(new String[outputList.size()]);
- ((URIArrayType) actualParameter.getType()).setValueArray(valueList);
- // why to instantiate new instance?
-// stringMap = new HashMap<String, ActualParameter>();
- stringMap.put(paramName, actualParameter);
- }else if ("StringArray".equals(actualParameter.getType().getType().toString())) {
- String[] valueList = OutputUtils.parseStdoutArray(stdout, paramName);
- ((StringArrayType) actualParameter.getType()).setValueArray(valueList);
-// stringMap = new HashMap<String, ActualParameter>();
- stringMap.put(paramName, actualParameter);
- } else if ("URI".equals(actualParameter.getType().getType().toString())) {
- URI outputURI = GFacUtils.createGsiftpURI(endpoint, app.getOutputDataDirectory());
- List<String> outputList = ftp.listDir(outputURI, gssCred);
- if (outputList.size() == 0 || outputList.get(0).isEmpty()) {
- stringMap = OutputUtils.fillOutputFromStdout(output, stdout, stderr);
- } else {
- String valueList = outputList.get(0);
- ((URIParameterType) actualParameter.getType()).setValue(valueList);
- stringMap = new HashMap<String, ActualParameter>();
- stringMap.put(paramName, actualParameter);
- }
- }
- else {
- // This is to handle exception during the output parsing.
- stringMap = OutputUtils.fillOutputFromStdout(output, stdout, stderr);
- }
- status.setTransferState(TransferState.DOWNLOAD);
- detail.setTransferStatus(status);
- detail.setTransferDescription("Output: " + stringMap.get(paramName).toString());
- registry.add(ChildDataType.DATA_TRANSFER_DETAIL,detail, jobExecutionContext.getTaskData().getTaskID());
-
- }
- if (stringMap == null || stringMap.isEmpty()) {
- throw new GFacHandlerException("Empty Output returned from the Application, Double check the application" +
- "and ApplicationDescriptor output Parameter Names");
- }
- // If users has given an output Data path to download the output files this will download the file on machine where GFac is installed
- TaskDetails taskData = jobExecutionContext.getTaskData();
- if(taskData != null && taskData.getAdvancedOutputDataHandling() != null){
- String outputDataDirectory = taskData.getAdvancedOutputDataHandling().getOutputDataDir();
- if(outputDataDirectory != null && !"".equals(outputDataDirectory)){
- stageOutputFiles(jobExecutionContext,outputDataDirectory);
- }
- }
- } catch (ToolsException e) {
- log.error(e.getMessage());
- throw new GFacHandlerException(e.getMessage() + "\n StdError Data: \n" +readLastLinesofStdOut(localStdErrFile.getPath(), 20),e);
- } catch (URISyntaxException e) {
- log.error(e.getMessage());
- throw new GFacHandlerException("URI is malformatted:" + e.getMessage(), e, readLastLinesofStdOut(localStdErrFile.getPath(), 20));
- }
- }
- } catch (Exception e) {
- try {
- status.setTransferState(TransferState.FAILED);
- detail.setTransferStatus(status);
- registry.add(ChildDataType.DATA_TRANSFER_DETAIL,detail, jobExecutionContext.getTaskData().getTaskID());
- GFacUtils.saveErrorDetails(e.getLocalizedMessage(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.FILE_SYSTEM_FAILURE, jobExecutionContext.getTaskData().getTaskID());
- } catch (Exception e1) {
- throw new GFacHandlerException("Error persisting status", e1, e1.getLocalizedMessage());
- }
- log.error(e.getMessage());
- throw new GFacHandlerException(e.getMessage(), e, readLastLinesofStdOut(localStdErrFile.getPath(), 20));
- }
-
- }
-
- private static String readLastLinesofStdOut(String path, int count) {
- StringBuffer buffer = new StringBuffer();
- FileInputStream in = null;
- try {
- in = new FileInputStream(path);
- } catch (FileNotFoundException e) {
- e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
- }
- BufferedReader br = new BufferedReader(new InputStreamReader(in));
- List<String> strLine = new ArrayList<String>();
- String tmp = null;
- int numberofLines = 0;
- try {
- while ((tmp = br.readLine()) != null) {
- strLine.add(tmp);
- numberofLines++;
- }
- } catch (IOException e) {
- e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
- }
- if (numberofLines > count) {
- for (int i = numberofLines - count; i < numberofLines; i++) {
- buffer.append(strLine.get(i));
- buffer.append("\n");
- }
- } else {
- for (int i = 0; i < numberofLines; i++) {
- buffer.append(strLine.get(i));
- buffer.append("\n");
- }
- }
- try {
- in.close();
- } catch (IOException e) {
- e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
- }
- return buffer.toString();
- }
-
- private static void stageOutputFiles(JobExecutionContext jobExecutionContext, String outputFileStagingPath) throws GFacProviderException,GFacException, ApplicationSettingsException {
-
-
- HostDescriptionType hostType = jobExecutionContext.getApplicationContext().getHostDescription().getType();
- String[] gridFTPEndpointArray = null;
-
- if(jobExecutionContext.getApplicationContext().getHostDescription().getType() instanceof GlobusHostType){
- gridFTPEndpointArray = ((GlobusHostType) hostType).getGridFTPEndPointArray();
- }
- else if (jobExecutionContext.getApplicationContext().getHostDescription().getType() instanceof UnicoreHostType){
- gridFTPEndpointArray = ((UnicoreHostType) hostType).getGridFTPEndPointArray();
- }
- else {
- //TODO
- }
-
-
- MessageContext outputNew = new MessageContext();
- MessageContext output = jobExecutionContext.getOutMessageContext();
- Map<String, Object> parameters = output.getParameters();
- for (String paramName : parameters.keySet()) {
- ActualParameter actualParameter = (ActualParameter) parameters
- .get(paramName);
-
- GridFtp ftp = new GridFtp();
- GSSCredential gssCred = ((GSISecurityContext)jobExecutionContext.getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT)).getGssCredentials();
- try {
- if ("URI".equals(actualParameter.getType().getType().toString())) {
- for (String endpoint : gridFTPEndpointArray) {
- ((URIParameterType) actualParameter.getType()).setValue(doStaging(outputFileStagingPath,
- MappingFactory.toString(actualParameter), ftp, gssCred, endpoint));
- }
- } else if ("URIArray".equals(actualParameter.getType().getType().toString())) {
- List<String> split = Arrays.asList(StringUtil.getElementsFromString(MappingFactory.toString(actualParameter)));
- List<String> newFiles = new ArrayList<String>();
- for (String endpoint : gridFTPEndpointArray) {
- for (String paramValueEach : split) {
- newFiles.add(doStaging(outputFileStagingPath, paramValueEach, ftp, gssCred, endpoint));
- }
- ((URIArrayType) actualParameter.getType()).setValueArray(newFiles.toArray(new String[newFiles.size()]));
- }
-
- }
- } catch (URISyntaxException e) {
- log.error(e.getMessage());
- throw new GFacProviderException(e.getMessage(), e);
- } catch (ToolsException e) {
- log.error(e.getMessage());
- throw new GFacProviderException(e.getMessage(), e);
- }
- outputNew.getParameters().put(paramName, actualParameter);
- }
- jobExecutionContext.setOutMessageContext(outputNew);
- }
-
- private static String doStaging(String outputFileStagingPath, String paramValue, GridFtp ftp, GSSCredential gssCred, String endpoint) throws URISyntaxException, ToolsException {
- URI srcURI = GFacUtils.createGsiftpURI(endpoint, paramValue);
- String fileName = new File(srcURI.getPath()).getName();
- File outputpath = new File(outputFileStagingPath);
- if(!outputpath.exists()){
- outputpath.mkdirs();
- }
- File outputFile = new File(outputpath.getAbsolutePath() + File.separator + fileName);
- ftp.readRemoteFile(srcURI,
- gssCred, outputFile);
- return outputFileStagingPath + File.separator + fileName;
- }
-
- public void initProperties(Map<String, String> properties) throws GFacHandlerException, GFacException {
-
- }
-}
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/persistence/DBJobPersistenceManager.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/persistence/DBJobPersistenceManager.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/persistence/DBJobPersistenceManager.java
deleted file mode 100644
index 26ea689..0000000
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/persistence/DBJobPersistenceManager.java
+++ /dev/null
@@ -1,220 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.persistence;
-
-import org.apache.airavata.common.utils.DBUtil;
-import org.apache.airavata.gfac.GFacException;
-import org.apache.log4j.Logger;
-import org.globus.gram.internal.GRAMConstants;
-
-import java.sql.*;
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * User: AmilaJ (amilaj@apache.org)
- * Date: 6/18/13
- * Time: 4:16 PM
- * Database based job persistence manager. Current default implementation.
- */
-
-public class DBJobPersistenceManager implements JobPersistenceManager {
-
- private DBUtil dbUtil;
-
- private static final Logger log = Logger.getLogger(DBJobPersistenceManager.class);
-
-
- public DBJobPersistenceManager(DBUtil db) {
- this.dbUtil = db;
- }
-
- public synchronized void updateJobStatus(JobData jobData) throws GFacException {
-
- if (jobData.getState() == GRAMConstants.STATUS_UNSUBMITTED) {
- insertJob(jobData);
- } else {
-
- String sql = "update gram_job set status = ? where job_id = ?";
-
- Connection connection = null;
- PreparedStatement stmt = null;
-
- try {
- connection = getConnection();
- stmt = connection.prepareStatement(sql);
- stmt.setInt(1, jobData.getState());
- stmt.setString(2, jobData.getJobId());
-
- stmt.executeUpdate();
- connection.commit();
-
- } catch (SQLException e) {
- throw new GFacException(e);
- } finally {
- try {
- if (stmt != null) {
- stmt.close();
- }
-
- if (connection != null) {
- connection.close();
- }
-
- } catch (SQLException e) {
- log.error("Error closing streams", e);
- }
- }
- }
- }
-
- private void insertJob(JobData jobData) throws GFacException {
-
- String sql = "insert into gram_job values (?, ?)";
-
- PreparedStatement stmt = null;
- Connection connection = null;
-
- try {
- connection = getConnection();
- stmt = connection.prepareStatement(sql);
- stmt.setString(1, jobData.getJobId());
- stmt.setInt(2, jobData.getState());
-
- stmt.executeUpdate();
- } catch (SQLException e) {
- throw new GFacException(e);
- } finally {
- try {
- if (stmt != null) {
- stmt.close();
- }
-
- if (connection != null) {
- connection.close();
- }
-
- } catch (SQLException e) {
- log.error("Error closing streams", e);
- }
- }
-
- }
-
- public List<JobData> getRunningJobs() throws GFacException {
-
- String sql = "select * from gram_job where status not in (?, ?, ?)";
-
- int[] statuses = new int[3];
- statuses[0] = GRAMConstants.STATUS_UNSUBMITTED;
- statuses[1] = GRAMConstants.STATUS_DONE;
- statuses[2] = GRAMConstants.STATUS_FAILED;
-
- return getJobs(sql, statuses);
- }
-
- public List<JobData> getFailedJobs() throws GFacException {
-
- String sql = "select * from gram_job where status in (?)";
-
- int[] statuses = new int[1];
- statuses[0] = GRAMConstants.STATUS_FAILED;
-
- return getJobs(sql, statuses);
- }
-
- public List<JobData> getUnSubmittedJobs() throws GFacException {
-
- String sql = "select * from gram_job where status in (?)";
-
- int[] statuses = new int[1];
- statuses[0] = GRAMConstants.STATUS_UNSUBMITTED;
-
- return getJobs(sql, statuses);
- }
-
- public List<JobData> getSuccessfullyCompletedJobs() throws GFacException {
-
- String sql = "select * from gram_job where status in (?)";
-
- int[] statuses = new int[1];
- statuses[0] = GRAMConstants.STATUS_DONE;
-
- return getJobs(sql, statuses);
-
- }
-
-
- protected List<JobData> getJobs(String sql, int[] statuses) throws GFacException {
-
- List<JobData> jobs = new ArrayList<JobData>();
-
- PreparedStatement preparedStatement = null;
- Connection connection = null;
-
- try {
- connection = getConnection();
- preparedStatement = connection.prepareStatement(sql);
-
- int index = 1;
- for (int status : statuses) {
- preparedStatement.setInt(index, status);
- ++index;
- }
-
- ResultSet resultSet = preparedStatement.executeQuery();
-
- while (resultSet.next()) {
-
- String jobId = resultSet.getString("job_id");
- int state = resultSet.getInt("status");
-
- jobs.add(new JobData(jobId, state));
- }
-
- } catch (SQLException e) {
- throw new GFacException(e);
- } finally {
- try {
- if (preparedStatement != null) {
- preparedStatement.close();
- }
-
- if (connection != null) {
- connection.close();
- }
-
- } catch (SQLException e) {
- log.error("Error closing connection", e);
- }
- }
-
- return jobs;
- }
-
- private synchronized Connection getConnection() throws SQLException {
- Connection connection = dbUtil.getConnection();
- connection.setAutoCommit(true);
-
- return connection;
- }
-}
[11/11] git commit: Merge branch 'master' of
https://git-wip-us.apache.org/repos/asf/airavata
Posted by la...@apache.org.
Merge branch 'master' of https://git-wip-us.apache.org/repos/asf/airavata
Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/7be9daea
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/7be9daea
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/7be9daea
Branch: refs/heads/master
Commit: 7be9daea61cb90f29127b80c4ed2aaa33b3b034d
Parents: 13b505a 053ce56
Author: lahiru <la...@apache.org>
Authored: Thu Apr 24 16:51:41 2014 -0400
Committer: lahiru <la...@apache.org>
Committed: Thu Apr 24 16:51:41 2014 -0400
----------------------------------------------------------------------
modules/integration-tests/pom.xml | 2 +-
.../airavata/integration/DataRetrievalIT.java | 18 ++++++++++++------
2 files changed, 13 insertions(+), 7 deletions(-)
----------------------------------------------------------------------
[10/11] git commit: creating gfac-bes and gfac-gram out from gfac-core
Posted by la...@apache.org.
creating gfac-bes and gfac-gram out from gfac-core
Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/13b505ae
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/13b505ae
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/13b505ae
Branch: refs/heads/master
Commit: 13b505aee64d7d91319038b6f6baa410f8aaff9d
Parents: 8c6a9a5
Author: lahiru <la...@apache.org>
Authored: Thu Apr 24 16:51:18 2014 -0400
Committer: lahiru <la...@apache.org>
Committed: Thu Apr 24 16:51:18 2014 -0400
----------------------------------------------------------------------
modules/gfac/gfac-bes/pom.xml | 154 +++++
.../context/security/GSISecurityContext.java | 288 +++++++++
.../gfac/provider/impl/BESProvider.java | 567 +++++++++++++++++
.../gfac/utils/ApplicationProcessor.java | 252 ++++++++
.../gfac/utils/DataStagingProcessor.java | 235 +++++++
.../airavata/gfac/utils/DataTransferrer.java | 241 +++++++
.../airavata/gfac/utils/FileDownloader.java | 256 ++++++++
.../airavata/gfac/utils/FileTransferBase.java | 227 +++++++
.../airavata/gfac/utils/FileUploader.java | 245 +++++++
.../airavata/gfac/utils/JSDLGenerator.java | 105 +++
.../apache/airavata/gfac/utils/JSDLUtils.java | 540 ++++++++++++++++
.../org/apache/airavata/gfac/utils/Mode.java | 45 ++
.../airavata/gfac/utils/OSRequirement.java | 108 ++++
.../org/apache/airavata/gfac/utils/OSType.java | 124 ++++
.../gfac/utils/ProcessorRequirement.java | 61 ++
.../airavata/gfac/utils/RangeValueType.java | 274 ++++++++
.../airavata/gfac/utils/ResourceProcessor.java | 152 +++++
.../airavata/gfac/utils/SPMDProcessor.java | 33 +
.../airavata/gfac/utils/SPMDVariations.java | 52 ++
.../airavata/gfac/utils/StorageCreator.java | 211 +++++++
.../gfac/utils/UASDataStagingProcessor.java | 225 +++++++
.../apache/airavata/gfac/utils/URIUtils.java | 119 ++++
.../src/main/resources/errors.properties | 197 ++++++
.../src/main/resources/service.properties | 58 ++
.../impl/JSDLGeneratorTestWithMyProxyAuth.java | 318 ++++++++++
.../src/test/resources/PBSTemplate.xslt | 73 +++
.../gfac-bes/src/test/resources/gfac-config.xml | 33 +
.../src/test/resources/logging.properties | 42 ++
modules/gfac/gfac-core/pom.xml | 79 +--
.../apache/airavata/gfac/GFacConfiguration.java | 30 -
.../context/security/GSISecurityContext.java | 289 ---------
.../org/apache/airavata/gfac/cpi/GFacImpl.java | 120 ----
.../apache/airavata/gfac/external/GridFtp.java | 631 -------------------
.../gfac/handler/GramDirectorySetupHandler.java | 135 ----
.../gfac/handler/GridFTPInputHandler.java | 203 ------
.../gfac/handler/GridFTPOutputHandler.java | 346 ----------
.../persistence/DBJobPersistenceManager.java | 220 -------
.../gfac/provider/impl/BESProvider.java | 568 -----------------
.../gfac/provider/impl/GramProvider.java | 527 ----------------
.../provider/utils/ApplicationProcessor.java | 252 --------
.../provider/utils/DataStagingProcessor.java | 236 -------
.../gfac/provider/utils/DataTransferrer.java | 241 -------
.../gfac/provider/utils/FileDownloader.java | 256 --------
.../gfac/provider/utils/FileTransferBase.java | 227 -------
.../gfac/provider/utils/FileUploader.java | 245 -------
.../gfac/provider/utils/JSDLGenerator.java | 103 ---
.../airavata/gfac/provider/utils/JSDLUtils.java | 540 ----------------
.../airavata/gfac/provider/utils/Mode.java | 45 --
.../gfac/provider/utils/OSRequirement.java | 106 ----
.../airavata/gfac/provider/utils/OSType.java | 124 ----
.../provider/utils/ProcessorRequirement.java | 61 --
.../gfac/provider/utils/RangeValueType.java | 272 --------
.../gfac/provider/utils/ResourceProcessor.java | 152 -----
.../gfac/provider/utils/SPMDProcessor.java | 33 -
.../gfac/provider/utils/SPMDVariations.java | 52 --
.../gfac/provider/utils/StorageCreator.java | 211 -------
.../provider/utils/UASDataStagingProcessor.java | 225 -------
.../airavata/gfac/provider/utils/URIUtils.java | 107 ----
.../apache/airavata/gfac/utils/GFacUtils.java | 45 +-
.../gfac/utils/GramJobSubmissionListener.java | 141 -----
.../airavata/gfac/utils/GramProviderUtils.java | 54 --
.../airavata/gfac/utils/GramRSLGenerator.java | 216 -------
.../gfac/utils/GridConfigurationHandler.java | 79 ---
.../airavata/gfac/utils/GridFTPContactInfo.java | 61 --
.../impl/GFacBaseTestWithMyProxyAuth.java | 115 ----
.../impl/GramProviderTestWithMyProxyAuth.java | 225 -------
.../impl/JSDLGeneratorTestWithMyProxyAuth.java | 318 ----------
modules/gfac/gfac-gram/pom.xml | 135 ++++
.../context/security/GSISecurityContext.java | 288 +++++++++
.../apache/airavata/gfac/external/GridFtp.java | 558 ++++++++++++++++
.../gfac/handler/GramDirectorySetupHandler.java | 135 ++++
.../gfac/handler/GridFTPInputHandler.java | 204 ++++++
.../gfac/handler/GridFTPOutputHandler.java | 347 ++++++++++
.../persistence/DBJobPersistenceManager.java | 223 +++++++
.../gfac/provider/impl/GramProvider.java | 526 ++++++++++++++++
.../gfac/util/GramJobSubmissionListener.java | 141 +++++
.../airavata/gfac/util/GramProviderUtils.java | 114 ++++
.../airavata/gfac/util/GramRSLGenerator.java | 211 +++++++
.../airavata/gfac/util/GridFTPContactInfo.java | 61 ++
.../src/main/resources/errors.properties | 197 ++++++
.../src/main/resources/service.properties | 58 ++
.../impl/GFacBaseTestWithMyProxyAuth.java | 115 ++++
.../impl/GramProviderTestWithMyProxyAuth.java | 225 +++++++
.../src/test/resources/PBSTemplate.xslt | 73 +++
.../src/test/resources/gfac-config.xml | 33 +
.../src/test/resources/logging.properties | 42 ++
modules/gfac/gfac-gsissh/pom.xml | 24 -
modules/gfac/gfac-ssh/pom.xml | 23 -
.../airavata/gfac/handler/SSHInputHandler.java | 8 +-
.../airavata/gfac/handler/SSHOutputHandler.java | 8 +-
.../gfac/provider/impl/SSHProvider.java | 1 -
modules/gfac/pom.xml | 2 +
92 files changed, 8929 insertions(+), 7948 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-bes/pom.xml
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-bes/pom.xml b/modules/gfac/gfac-bes/pom.xml
new file mode 100644
index 0000000..1694560
--- /dev/null
+++ b/modules/gfac/gfac-bes/pom.xml
@@ -0,0 +1,154 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!--Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under
+ the Apache License, Version 2.0 (theÏ "License"); you may not use this file except in compliance with the License. You may
+ obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to
+ in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+ ANY ~ KIND, either express or implied. See the License for the specific language governing permissions and limitations under
+ the License. -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <parent>
+ <groupId>org.apache.airavata</groupId>
+ <artifactId>gfac</artifactId>
+ <version>0.12-SNAPSHOT</version>
+ <relativePath>../pom.xml</relativePath>
+ </parent>
+
+ <modelVersion>4.0.0</modelVersion>
+ <artifactId>airavata-gfac-bes</artifactId>
+ <name>Airavata GFac BES implementation</name>
+ <description>This is the extension of GFAC to use GRAM </description>
+ <url>http://airavata.apache.org/</url>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.jglobus</groupId>
+ <artifactId>gss</artifactId>
+ <version>${jglobus.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.jglobus</groupId>
+ <artifactId>gram</artifactId>
+ <version>${jglobus.version}</version>
+ <exclusions>
+ <exclusion>
+ <groupId>org.bouncycastle</groupId>
+ <artifactId>bcprov-jdk16</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>org.bouncycastle</groupId>
+ <artifactId>bcprov-jdk16</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.jglobus</groupId>
+ <artifactId>myproxy</artifactId>
+ <version>${jglobus.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.jglobus</groupId>
+ <artifactId>gridftp</artifactId>
+ <version>${jglobus.version}</version>
+ </dependency>
+
+ <!-- Logging -->
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-api</artifactId>
+ </dependency>
+
+ <!-- GFAC schemas -->
+ <dependency>
+ <groupId>org.apache.airavata</groupId>
+ <artifactId>airavata-gfac-core</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <!-- Credential Store -->
+ <dependency>
+ <groupId>org.apache.airavata</groupId>
+ <artifactId>airavata-credential-store</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.airavata</groupId>
+ <artifactId>airavata-server-configuration</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.airavata</groupId>
+ <artifactId>airavata-client-configuration</artifactId>
+ <scope>test</scope>
+ </dependency>
+
+
+ <!-- Test -->
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.testng</groupId>
+ <artifactId>testng</artifactId>
+ <version>6.1.1</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>jcl-over-slf4j</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-log4j12</artifactId>
+ <scope>test</scope>
+ </dependency>
+
+ <!-- gsi-ssh api dependencies -->
+ <dependency>
+ <groupId>org.apache.airavata</groupId>
+ <artifactId>gsissh</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.airavata</groupId>
+ <artifactId>airavata-data-models</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>com.jcraft</groupId>
+ <artifactId>jsch</artifactId>
+ <version>0.1.50</version>
+ </dependency>
+ <dependency>
+ <groupId>org.ogce</groupId>
+ <artifactId>bcgss</artifactId>
+ <version>146</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.xmlbeans</groupId>
+ <artifactId>xmlbeans</artifactId>
+ <version>${xmlbeans.version}</version>
+ </dependency>
+ <!-- Unicore dependencies -->
+ <dependency>
+ <groupId>eu.unicore</groupId>
+ <artifactId>ogsabes-client</artifactId>
+ <version>1.7.0-rc</version>
+ <exclusions>
+ <exclusion>
+ <groupId>org.apache.santuario</groupId>
+ <artifactId>xmlsec</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.bouncycastle</groupId>
+ <artifactId>bcprov-jdk16</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ </dependencies>
+</project>
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/context/security/GSISecurityContext.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/context/security/GSISecurityContext.java b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/context/security/GSISecurityContext.java
new file mode 100644
index 0000000..3eb020f
--- /dev/null
+++ b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/context/security/GSISecurityContext.java
@@ -0,0 +1,288 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+*/
+package org.apache.airavata.gfac.context.security;
+
+import org.apache.airavata.common.exception.ApplicationSettingsException;
+import org.apache.airavata.common.utils.ServerSettings;
+import org.apache.airavata.credential.store.credential.Credential;
+import org.apache.airavata.credential.store.credential.impl.certificate.CertificateCredential;
+import org.apache.airavata.credential.store.store.CredentialReader;
+import org.apache.airavata.gfac.AbstractSecurityContext;
+import org.apache.airavata.gfac.Constants;
+import org.apache.airavata.gfac.GFacException;
+import org.apache.airavata.gfac.RequestData;
+import org.apache.airavata.gsi.ssh.api.Cluster;
+import org.globus.gsi.X509Credential;
+import org.globus.gsi.gssapi.GlobusGSSCredentialImpl;
+import org.globus.gsi.provider.GlobusProvider;
+import org.globus.myproxy.GetParams;
+import org.globus.myproxy.MyProxy;
+import org.globus.myproxy.MyProxyException;
+import org.gridforum.jgss.ExtendedGSSCredential;
+import org.ietf.jgss.GSSCredential;
+import org.ietf.jgss.GSSException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.security.Security;
+import java.security.cert.X509Certificate;
+
+/**
+ * Handles GRID related security.
+ */
+public class GSISecurityContext extends AbstractSecurityContext {
+
+ protected static final Logger log = LoggerFactory.getLogger(GSISecurityContext.class);
+ /*
+ * context name
+ */
+ public static final String GSI_SECURITY_CONTEXT = "gsi";
+
+ public static int CREDENTIAL_RENEWING_THRESH_HOLD = 10 * 90;
+
+ private GSSCredential gssCredentials = null;
+
+ private Cluster pbsCluster = null;
+
+ // Set trusted cert path and add provider
+ static {
+ Security.addProvider(new GlobusProvider());
+ try {
+ setUpTrustedCertificatePath();
+ } catch (ApplicationSettingsException e) {
+ log.error(e.getLocalizedMessage(), e);
+ }
+ }
+
+ public static void setUpTrustedCertificatePath(String trustedCertificatePath) {
+
+ File file = new File(trustedCertificatePath);
+
+ if (!file.exists() || !file.canRead()) {
+ File f = new File(".");
+ log.info("Current directory " + f.getAbsolutePath());
+ throw new RuntimeException("Cannot read trusted certificate path " + trustedCertificatePath);
+ } else {
+ System.setProperty(Constants.TRUSTED_CERTIFICATE_SYSTEM_PROPERTY, file.getAbsolutePath());
+ }
+ }
+
+ private static void setUpTrustedCertificatePath() throws ApplicationSettingsException {
+
+ String trustedCertificatePath = ServerSettings.getSetting(Constants.TRUSTED_CERT_LOCATION);
+
+ setUpTrustedCertificatePath(trustedCertificatePath);
+ }
+
+ /**
+ * Gets the trusted certificate path. Trusted certificate path is stored in "X509_CERT_DIR"
+ * system property.
+ * @return The trusted certificate path as a string.
+ */
+ public static String getTrustedCertificatePath() {
+ return System.getProperty(Constants.TRUSTED_CERTIFICATE_SYSTEM_PROPERTY);
+ }
+
+
+ public GSISecurityContext(CredentialReader credentialReader, RequestData requestData) {
+ super(credentialReader, requestData);
+ }
+
+
+ public GSISecurityContext(Cluster pbsCluster) {
+ this.setPbsCluster(pbsCluster);
+ }
+
+ /**
+ * Gets GSSCredentials. The process is as follows;
+ * If credentials were queried for the first time create credentials.
+ * 1. Try creating credentials using certificates stored in the credential store
+ * 2. If 1 fails use user name and password to create credentials
+ * If credentials are already created check the remaining life time of the credential. If
+ * remaining life time is less than CREDENTIAL_RENEWING_THRESH_HOLD, then renew credentials.
+ * @return GSSCredentials to be used.
+ * @throws org.apache.airavata.gfac.GFacException If an error occurred while creating credentials.
+ * @throws org.apache.airavata.common.exception.ApplicationSettingsException
+ */
+ public GSSCredential getGssCredentials() throws GFacException, ApplicationSettingsException {
+
+ if (gssCredentials == null) {
+
+ try {
+ gssCredentials = getCredentialsFromStore();
+ } catch (Exception e) {
+ log.error("An exception occurred while retrieving credentials from the credential store. " +
+ "Will continue with my proxy user name and password.", e);
+ }
+
+ // If store does not have credentials try to get from user name and password
+ if (gssCredentials == null) {
+ gssCredentials = getDefaultCredentials();
+ }
+
+ // if still null, throw an exception
+ if (gssCredentials == null) {
+ throw new GFacException("Unable to retrieve my proxy credentials to continue operation.");
+ }
+ } else {
+ try {
+ if (gssCredentials.getRemainingLifetime() < CREDENTIAL_RENEWING_THRESH_HOLD) {
+ return renewCredentials();
+ }
+ } catch (GSSException e) {
+ throw new GFacException("Unable to retrieve remaining life time from credentials.", e);
+ }
+ }
+
+ return gssCredentials;
+ }
+
+ /**
+ * Renews credentials. First try to renew credentials as a trusted renewer. If that failed
+ * use user name and password to renew credentials.
+ * @return Renewed credentials.
+ * @throws org.apache.airavata.gfac.GFacException If an error occurred while renewing credentials.
+ * @throws org.apache.airavata.common.exception.ApplicationSettingsException
+ */
+ public GSSCredential renewCredentials() throws GFacException, ApplicationSettingsException {
+
+ // First try to renew credentials as a trusted renewer
+ try {
+ gssCredentials = renewCredentialsAsATrustedHost();
+ } catch (Exception e) {
+ log.warn("Renewing credentials as a trusted renewer failed", e);
+ gssCredentials = getProxyCredentials();
+ }
+
+ return gssCredentials;
+ }
+
+ /**
+ * Reads the credentials from credential store.
+ * @return If token is found in the credential store, will return a valid credential. Else returns null.
+ * @throws Exception If an error occurred while retrieving credentials.
+ */
+ public GSSCredential getCredentialsFromStore() throws Exception {
+
+ if (getCredentialReader() == null) {
+ return null;
+ }
+
+ Credential credential = getCredentialReader().getCredential(getRequestData().getGatewayId(),
+ getRequestData().getTokenId());
+
+ if (credential != null) {
+ if (credential instanceof CertificateCredential) {
+
+ log.info("Successfully found credentials for token id - " + getRequestData().getTokenId() +
+ " gateway id - " + getRequestData().getGatewayId());
+
+ CertificateCredential certificateCredential = (CertificateCredential) credential;
+
+ X509Certificate[] certificates = certificateCredential.getCertificates();
+ X509Credential newCredential = new X509Credential(certificateCredential.getPrivateKey(), certificates);
+
+ GlobusGSSCredentialImpl cred = new GlobusGSSCredentialImpl(newCredential, GSSCredential.INITIATE_AND_ACCEPT);
+ System.out.print(cred.export(ExtendedGSSCredential.IMPEXP_OPAQUE));
+ return cred;
+ //return new GlobusGSSCredentialImpl(newCredential,
+ // GSSCredential.INITIATE_AND_ACCEPT);
+ } else {
+ log.info("Credential type is not CertificateCredential. Cannot create mapping globus credentials. " +
+ "Credential type - " + credential.getClass().getName());
+ }
+ } else {
+ log.info("Could not find credentials for token - " + getRequestData().getTokenId() + " and "
+ + "gateway id - " + getRequestData().getGatewayId());
+ }
+
+ return null;
+ }
+
+ /**
+ * Gets the default proxy certificate.
+ * @return Default my proxy credentials.
+ * @throws org.apache.airavata.gfac.GFacException If an error occurred while retrieving credentials.
+ * @throws org.apache.airavata.common.exception.ApplicationSettingsException
+ */
+ public GSSCredential getDefaultCredentials() throws GFacException, ApplicationSettingsException{
+ MyProxy myproxy = new MyProxy(getRequestData().getMyProxyServerUrl(), getRequestData().getMyProxyPort());
+ try {
+ return myproxy.get(getRequestData().getMyProxyUserName(), getRequestData().getMyProxyPassword(),
+ getRequestData().getMyProxyLifeTime());
+ } catch (MyProxyException e) {
+ throw new GFacException("An error occurred while retrieving default security credentials.", e);
+ }
+ }
+
+ /**
+ * Gets a new proxy certificate given current credentials.
+ * @return The short lived GSSCredentials
+ * @throws org.apache.airavata.gfac.GFacException If an error is occurred while retrieving credentials.
+ * @throws org.apache.airavata.common.exception.ApplicationSettingsException
+ */
+ public GSSCredential getProxyCredentials() throws GFacException, ApplicationSettingsException {
+
+ MyProxy myproxy = new MyProxy(getRequestData().getMyProxyServerUrl(), getRequestData().getMyProxyPort());
+ try {
+ return myproxy.get(gssCredentials, getRequestData().getMyProxyUserName(), getRequestData().getMyProxyPassword(),
+ getRequestData().getMyProxyLifeTime());
+ } catch (MyProxyException e) {
+ throw new GFacException("An error occurred while renewing security credentials using user/password.", e);
+ }
+ }
+
+ /**
+ * Renew GSSCredentials.
+ * Before executing we need to add current host as a trusted renewer. Note to renew credentials
+ * we dont need user name and password.
+ * To do that execute following command
+ * > myproxy-logon -t <LIFETIME></LIFETIME> -s <MY PROXY SERVER> -l <USER NAME>
+ * E.g :- > myproxy-logon -t 264 -s myproxy.teragrid.org -l us3
+ * Enter MyProxy pass phrase:
+ * A credential has been received for user us3 in /tmp/x509up_u501.
+ * > myproxy-init -A --cert /tmp/x509up_u501 --key /tmp/x509up_u501 -l ogce -s myproxy.teragrid.org
+ * @return Renewed credentials.
+ * @throws org.apache.airavata.gfac.GFacException If an error occurred while renewing credentials.
+ * @throws org.apache.airavata.common.exception.ApplicationSettingsException
+ */
+ public GSSCredential renewCredentialsAsATrustedHost() throws GFacException, ApplicationSettingsException {
+ MyProxy myproxy = new MyProxy(getRequestData().getMyProxyServerUrl(), getRequestData().getMyProxyPort());
+ GetParams getParams = new GetParams();
+ getParams.setAuthzCreds(gssCredentials);
+ getParams.setUserName(getRequestData().getMyProxyUserName());
+ getParams.setLifetime(getRequestData().getMyProxyLifeTime());
+ try {
+ return myproxy.get(gssCredentials, getParams);
+ } catch (MyProxyException e) {
+ throw new GFacException("An error occurred while renewing security credentials.", e);
+ }
+ }
+
+ public Cluster getPbsCluster() {
+ return pbsCluster;
+ }
+
+ public void setPbsCluster(Cluster pbsCluster) {
+ this.pbsCluster = pbsCluster;
+ }
+}
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/provider/impl/BESProvider.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/provider/impl/BESProvider.java b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/provider/impl/BESProvider.java
new file mode 100644
index 0000000..2ace533
--- /dev/null
+++ b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/provider/impl/BESProvider.java
@@ -0,0 +1,567 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+*/
+package org.apache.airavata.gfac.provider.impl;
+
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.math.BigInteger;
+import java.security.InvalidKeyException;
+import java.security.KeyPair;
+import java.security.KeyPairGenerator;
+import java.security.PrivateKey;
+import java.security.cert.X509Certificate;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Random;
+import java.util.Set;
+
+import javax.security.auth.x500.X500Principal;
+
+import org.apache.airavata.gfac.Constants;
+import org.apache.airavata.gfac.GFacException;
+import org.apache.airavata.gfac.context.JobExecutionContext;
+import org.apache.airavata.gfac.context.security.GSISecurityContext;
+import org.apache.airavata.gfac.notification.events.StatusChangeEvent;
+import org.apache.airavata.gfac.notification.events.UnicoreJobIDEvent;
+import org.apache.airavata.gfac.provider.GFacProviderException;
+import org.apache.airavata.gfac.utils.DataTransferrer;
+import org.apache.airavata.gfac.utils.JSDLGenerator;
+import org.apache.airavata.gfac.utils.StorageCreator;
+import org.apache.airavata.gfac.utils.GFacUtils;
+import org.apache.airavata.model.workspace.experiment.JobState;
+import org.apache.airavata.registry.api.workflow.ApplicationJob;
+import org.apache.airavata.registry.api.workflow.ApplicationJob.ApplicationJobStatus;
+import org.apache.airavata.schemas.gfac.UnicoreHostType;
+import org.apache.xmlbeans.XmlCursor;
+import org.bouncycastle.asn1.ASN1InputStream;
+import org.bouncycastle.asn1.x500.X500Name;
+import org.bouncycastle.asn1.x500.style.BCStyle;
+import org.bouncycastle.asn1.x509.AlgorithmIdentifier;
+import org.bouncycastle.asn1.x509.SubjectPublicKeyInfo;
+import org.ggf.schemas.bes.x2006.x08.besFactory.ActivityStateEnumeration;
+import org.ggf.schemas.bes.x2006.x08.besFactory.ActivityStateEnumeration.Enum;
+import org.ggf.schemas.bes.x2006.x08.besFactory.ActivityStatusType;
+import org.ggf.schemas.bes.x2006.x08.besFactory.CreateActivityDocument;
+import org.ggf.schemas.bes.x2006.x08.besFactory.CreateActivityResponseDocument;
+import org.ggf.schemas.bes.x2006.x08.besFactory.GetActivityStatusesDocument;
+import org.ggf.schemas.bes.x2006.x08.besFactory.GetActivityStatusesResponseDocument;
+import org.ggf.schemas.jsdl.x2005.x11.jsdl.JobDefinitionDocument;
+import org.ggf.schemas.jsdl.x2005.x11.jsdl.JobDefinitionType;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.w3.x2005.x08.addressing.EndpointReferenceType;
+
+import de.fzj.unicore.bes.client.FactoryClient;
+import de.fzj.unicore.bes.faults.UnknownActivityIdentifierFault;
+import de.fzj.unicore.uas.client.StorageClient;
+import de.fzj.unicore.wsrflite.xmlbeans.WSUtilities;
+import eu.emi.security.authn.x509.helpers.CertificateHelpers;
+import eu.emi.security.authn.x509.helpers.proxy.X509v3CertificateBuilder;
+import eu.emi.security.authn.x509.impl.CertificateUtils;
+import eu.emi.security.authn.x509.impl.CertificateUtils.Encoding;
+import eu.emi.security.authn.x509.impl.DirectoryCertChainValidator;
+import eu.emi.security.authn.x509.impl.KeyAndCertCredential;
+import eu.emi.security.authn.x509.impl.X500NameUtils;
+import eu.unicore.util.httpclient.DefaultClientConfiguration;
+
+
+
+public class BESProvider extends AbstractProvider{
+ protected final Logger log = LoggerFactory.getLogger(this.getClass());
+
+ private DefaultClientConfiguration secProperties;
+
+ private String jobId;
+
+
+
+ public void initialize(JobExecutionContext jobExecutionContext)
+ throws GFacProviderException, GFacException {
+ log.info("Initializing UNICORE Provider");
+ super.initialize(jobExecutionContext);
+ initSecurityProperties(jobExecutionContext);
+ log.debug("initialized security properties");
+ }
+
+
+ public void execute(JobExecutionContext jobExecutionContext)
+ throws GFacProviderException {
+ UnicoreHostType host = (UnicoreHostType) jobExecutionContext.getApplicationContext().getHostDescription()
+ .getType();
+
+ String factoryUrl = host.getUnicoreBESEndPointArray()[0];
+
+ EndpointReferenceType eprt = EndpointReferenceType.Factory.newInstance();
+ eprt.addNewAddress().setStringValue(factoryUrl);
+
+ String userDN = getUserName(jobExecutionContext);
+
+ if (userDN == null || userDN.equalsIgnoreCase("admin")) {
+ userDN = "CN=zdv575, O=Ultrascan Gateway, C=DE";
+ }
+
+ String xlogin = getCNFromUserDN(userDN);
+ // create storage
+ StorageCreator storageCreator = new StorageCreator(secProperties, factoryUrl, 5, xlogin);
+
+ StorageClient sc = null;
+ try {
+ try {
+ sc = storageCreator.createStorage();
+ } catch (Exception e2) {
+ log.error("Cannot create storage..");
+ throw new GFacProviderException("Cannot create storage..", e2);
+ }
+
+ CreateActivityDocument cad = CreateActivityDocument.Factory.newInstance();
+ JobDefinitionDocument jobDefDoc = JobDefinitionDocument.Factory.newInstance();
+
+ JobDefinitionType jobDefinition = jobDefDoc.addNewJobDefinition();
+ try {
+ jobDefinition = JSDLGenerator.buildJSDLInstance(jobExecutionContext, sc.getUrl()).getJobDefinition();
+ cad.addNewCreateActivity().addNewActivityDocument().setJobDefinition(jobDefinition);
+
+ log.info("JSDL" + jobDefDoc.toString());
+ } catch (Exception e1) {
+ throw new GFacProviderException("Cannot generate JSDL instance from the JobExecutionContext.", e1);
+ }
+
+ // upload files if any
+ DataTransferrer dt = new DataTransferrer(jobExecutionContext, sc);
+ dt.uploadLocalFiles();
+
+ FactoryClient factory = null;
+ try {
+ factory = new FactoryClient(eprt, secProperties);
+ } catch (Exception e) {
+ throw new GFacProviderException(e.getLocalizedMessage(), e);
+ }
+
+ CreateActivityResponseDocument response = null;
+ try {
+ log.info(String.format("Activity Submitting to %s ... \n", factoryUrl));
+ response = factory.createActivity(cad);
+ log.info(String.format("Activity Submitted to %s \n", factoryUrl));
+ } catch (Exception e) {
+ throw new GFacProviderException("Cannot create activity.", e);
+ }
+ EndpointReferenceType activityEpr = response.getCreateActivityResponse().getActivityIdentifier();
+
+ log.info("Activity : " + activityEpr.getAddress().getStringValue() + " Submitted.");
+
+ // factory.waitWhileActivityIsDone(activityEpr, 1000);
+ jobId = WSUtilities.extractResourceID(activityEpr);
+ if (jobId == null) {
+ jobId = new Long(Calendar.getInstance().getTimeInMillis()).toString();
+ }
+ log.info("JobID: " + jobId);
+ jobExecutionContext.getNotifier().publish(new UnicoreJobIDEvent(jobId));
+ saveApplicationJob(jobExecutionContext, jobDefinition, activityEpr.toString());
+
+ factory.getActivityStatus(activityEpr);
+ log.info(formatStatusMessage(activityEpr.getAddress().getStringValue(),
+ factory.getActivityStatus(activityEpr).toString()));
+
+ // TODO publish the status messages to the message bus
+ while ((factory.getActivityStatus(activityEpr) != ActivityStateEnumeration.FINISHED)
+ && (factory.getActivityStatus(activityEpr) != ActivityStateEnumeration.FAILED)
+ && (factory.getActivityStatus(activityEpr) != ActivityStateEnumeration.CANCELLED)) {
+
+ ActivityStatusType activityStatus = null;
+ try {
+ activityStatus = getStatus(factory, activityEpr);
+ JobState jobStatus = getApplicationJobStatus(activityStatus);
+ String jobStatusMessage = "Status of job " + jobId + "is " + jobStatus;
+ jobExecutionContext.getNotifier().publish(new StatusChangeEvent(jobStatusMessage));
+ details.setJobID(jobId);
+ GFacUtils.updateJobStatus(details, jobStatus);
+ } catch (UnknownActivityIdentifierFault e) {
+ throw new GFacProviderException(e.getMessage(), e.getCause());
+ }catch (GFacException e) {
+ throw new GFacProviderException(e.getMessage(), e.getCause());
+ }
+
+ try {
+ Thread.sleep(5000);
+ } catch (InterruptedException e) {
+ }
+ continue;
+ }
+
+ ActivityStatusType activityStatus = null;
+ try {
+ activityStatus = getStatus(factory, activityEpr);
+ } catch (UnknownActivityIdentifierFault e) {
+ throw new GFacProviderException(e.getMessage(), e.getCause());
+ }
+
+ log.info(formatStatusMessage(activityEpr.getAddress().getStringValue(), activityStatus.getState()
+ .toString()));
+
+ if ((activityStatus.getState() == ActivityStateEnumeration.FAILED)) {
+ String error = activityStatus.getFault().getFaultcode().getLocalPart() + "\n"
+ + activityStatus.getFault().getFaultstring() + "\n EXITCODE: " + activityStatus.getExitCode();
+ log.info(error);
+ try {
+ Thread.sleep(5000);
+ } catch (InterruptedException e) {
+ }
+ dt.downloadStdOuts();
+ } else if (activityStatus.getState() == ActivityStateEnumeration.CANCELLED) {
+ String experimentID = (String) jobExecutionContext.getProperty(Constants.PROP_TOPIC);
+ JobState jobStatus = JobState.CANCELED;
+ String jobStatusMessage = "Status of job " + jobId + "is " + jobStatus;
+ jobExecutionContext.getNotifier().publish(new StatusChangeEvent(jobStatusMessage));
+ details.setJobID(jobId);
+ try {
+ GFacUtils.saveJobStatus(details, jobStatus, jobExecutionContext.getTaskData().getTaskID());
+ } catch (GFacException e) {
+ throw new GFacProviderException(e.getLocalizedMessage(),e);
+ }
+ throw new GFacProviderException(experimentID + "Job Canceled");
+ }
+
+ else if (activityStatus.getState() == ActivityStateEnumeration.FINISHED) {
+ try {
+ Thread.sleep(5000);
+ } catch (InterruptedException e) {
+ }
+ if (activityStatus.getExitCode() == 0) {
+ dt.downloadRemoteFiles();
+ } else {
+ dt.downloadStdOuts();
+ }
+ }
+
+ } catch (UnknownActivityIdentifierFault e1) {
+ throw new GFacProviderException(e1.getLocalizedMessage(), e1);
+ } finally {
+ // destroy sms instance
+ try {
+ if (sc != null) {
+ sc.destroy();
+ }
+ } catch (Exception e) {
+ log.warn("Cannot destroy temporary SMS instance:" + sc.getUrl(), e);
+ }
+ }
+ }
+
+ private JobState getApplicationJobStatus(ActivityStatusType activityStatus){
+ if (activityStatus == null) {
+ return JobState.UNKNOWN;
+ }
+ Enum state = activityStatus.getState();
+ String status = null;
+ XmlCursor acursor = activityStatus.newCursor();
+ try {
+ if (acursor.toFirstChild()) {
+ if (acursor.getName().getNamespaceURI().equals("http://schemas.ogf.org/hpcp/2007/01/fs")) {
+ status = acursor.getName().getLocalPart();
+ }
+ }
+ if (status != null) {
+ if (status.equalsIgnoreCase("Queued") || status.equalsIgnoreCase("Starting")
+ || status.equalsIgnoreCase("Ready")) {
+ return JobState.QUEUED;
+ } else if (status.equalsIgnoreCase("Staging-In")) {
+ return JobState.SUBMITTED;
+ } else if (status.equalsIgnoreCase("Staging-Out") || status.equalsIgnoreCase("FINISHED")) {
+ return JobState.COMPLETE;
+ } else if (status.equalsIgnoreCase("Executing")) {
+ return JobState.ACTIVE;
+ } else if (status.equalsIgnoreCase("FAILED")) {
+ return JobState.FAILED;
+ } else if (status.equalsIgnoreCase("CANCELLED")) {
+ return JobState.CANCELED;
+ }
+ } else {
+ if (ActivityStateEnumeration.CANCELLED.equals(state)) {
+ return JobState.CANCELED;
+ } else if (ActivityStateEnumeration.FAILED.equals(state)) {
+ return JobState.FAILED;
+ } else if (ActivityStateEnumeration.FINISHED.equals(state)) {
+ return JobState.COMPLETE;
+ } else if (ActivityStateEnumeration.RUNNING.equals(state)) {
+ return JobState.ACTIVE;
+ }
+ }
+ } finally {
+ if (acursor != null)
+ acursor.dispose();
+ }
+ return JobState.UNKNOWN;
+ }
+
+ private void saveApplicationJob(JobExecutionContext jobExecutionContext, JobDefinitionType jobDefinition,
+ String metadata) {
+ ApplicationJob appJob = GFacUtils.createApplicationJob(jobExecutionContext);
+ appJob.setJobId(jobId);
+ appJob.setJobData(jobDefinition.toString());
+ appJob.setSubmittedTime(Calendar.getInstance().getTime());
+ appJob.setStatus(ApplicationJobStatus.SUBMITTED);
+ appJob.setStatusUpdateTime(appJob.getSubmittedTime());
+ appJob.setMetadata(metadata);
+ GFacUtils.recordApplicationJob(jobExecutionContext, appJob);
+ }
+
+ public void dispose(JobExecutionContext jobExecutionContext) throws GFacProviderException {
+ secProperties = null;
+ }
+
+ /**
+ * EndpointReference need to be saved to make cancel work.
+ *
+ * @param activityEpr
+ * @param jobExecutionContext
+ * @throws GFacProviderException
+ */
+ public void cancelJob(String activityEpr, JobExecutionContext jobExecutionContext) throws GFacProviderException {
+ try {
+ initSecurityProperties(jobExecutionContext);
+ EndpointReferenceType eprt = EndpointReferenceType.Factory.parse(activityEpr);
+ UnicoreHostType host = (UnicoreHostType) jobExecutionContext.getApplicationContext().getHostDescription()
+ .getType();
+
+ String factoryUrl = host.getUnicoreBESEndPointArray()[0];
+ EndpointReferenceType epr = EndpointReferenceType.Factory.newInstance();
+ epr.addNewAddress().setStringValue(factoryUrl);
+
+ FactoryClient factory = new FactoryClient(epr, secProperties);
+ factory.terminateActivity(eprt);
+ } catch (Exception e) {
+ throw new GFacProviderException(e.getLocalizedMessage(),e);
+ }
+
+ }
+
+ protected void downloadOffline(String smsEpr, JobExecutionContext jobExecutionContext) throws GFacProviderException {
+ try {
+ initSecurityProperties(jobExecutionContext);
+ EndpointReferenceType eprt = EndpointReferenceType.Factory.parse(smsEpr);
+ StorageClient sms = new StorageClient(eprt, secProperties);
+ DataTransferrer dt = new DataTransferrer(jobExecutionContext, sms);
+ // there must be output files there
+ // this is also possible if client is re-connected, the jobs are
+ // still
+ // running and no output is produced
+ dt.downloadRemoteFiles();
+
+ // may be use the below method before downloading for checking
+ // the number of entries
+ // sms.listDirectory(".");
+
+ } catch (Exception e) {
+ throw new GFacProviderException(e.getLocalizedMessage(), e);
+ }
+ }
+
+ protected void initSecurityProperties(JobExecutionContext jobExecutionContext) throws GFacProviderException,
+ GFacException {
+
+ if (secProperties != null)
+ return;
+
+ GSISecurityContext gssContext = (GSISecurityContext) jobExecutionContext
+ .getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT);
+
+ try {
+ String certLocation = gssContext.getTrustedCertificatePath();
+ List<String> trustedCert = new ArrayList<String>();
+ trustedCert.add(certLocation + "/*.0");
+ trustedCert.add(certLocation + "/*.pem");
+
+ DirectoryCertChainValidator dcValidator = new DirectoryCertChainValidator(trustedCert, Encoding.PEM, -1,
+ 60000, null);
+
+ String userID = getUserName(jobExecutionContext);
+
+ if ( userID == null || "".equals(userID) || userID.equalsIgnoreCase("admin") ) {
+ userID = "CN=zdv575, O=Ultrascan Gateway, C=DE";
+ }
+
+ String userDN = userID.replaceAll("^\"|\"$", "");
+
+ // TODO: should be changed to default airavata server locations
+ KeyAndCertCredential cred = generateShortLivedCertificate(userDN, certLocation
+ + "/cacert.pem", certLocation
+ + "/cakey.pem", "ultrascan3");
+ secProperties = new DefaultClientConfiguration(dcValidator, cred);
+
+ // secProperties.doSSLAuthn();
+ secProperties.getETDSettings().setExtendTrustDelegation(true);
+
+ secProperties.setDoSignMessage(true);
+
+ String[] outHandlers = secProperties.getOutHandlerClassNames();
+
+ Set<String> outHandlerLst = null;
+
+ // timeout in milliseconds
+ Properties p = secProperties.getExtraSettings();
+ p.setProperty("http.connection.timeout", "300000");
+ p.setProperty("http.socket.timeout", "300000");
+
+ if (outHandlers == null) {
+ outHandlerLst = new HashSet<String>();
+ } else {
+ outHandlerLst = new HashSet<String>(Arrays.asList(outHandlers));
+ }
+
+ outHandlerLst.add("de.fzj.unicore.uas.security.ProxyCertOutHandler");
+
+ secProperties.setOutHandlerClassNames(outHandlerLst.toArray(new String[outHandlerLst.size()]));
+
+ } catch (Exception e) {
+ throw new GFacProviderException(e.getMessage(), e);
+ }
+ }
+
+ //FIXME: Get user details
+ private String getUserName(JobExecutionContext context) {
+// if (context.getConfigurationData()!= null) {
+// return context.getConfigurationData().getBasicMetadata().getUserName();
+// } else {
+ return "";
+// }
+ }
+
+ protected ActivityStatusType getStatus(FactoryClient fc, EndpointReferenceType activityEpr)
+ throws UnknownActivityIdentifierFault {
+
+ GetActivityStatusesDocument stats = GetActivityStatusesDocument.Factory.newInstance();
+
+ stats.addNewGetActivityStatuses().setActivityIdentifierArray(new EndpointReferenceType[] { activityEpr });
+
+ GetActivityStatusesResponseDocument resDoc = fc.getActivityStatuses(stats);
+
+ ActivityStatusType activityStatus = resDoc.getGetActivityStatusesResponse().getResponseArray()[0]
+ .getActivityStatus();
+ return activityStatus;
+ }
+
+ protected String formatStatusMessage(String activityUrl, String status) {
+ return String.format("Activity %s is %s.\n", activityUrl, status);
+ }
+
+ protected String subStatusAsString(ActivityStatusType statusType) {
+
+ StringBuffer sb = new StringBuffer();
+
+ sb.append(statusType.getState().toString());
+
+ XmlCursor acursor = statusType.newCursor();
+ if (acursor.toFirstChild()) {
+ do {
+ if (acursor.getName().getNamespaceURI().equals("http://schemas.ogf.org/hpcp/2007/01/fs")) {
+ sb.append(":");
+ sb.append(acursor.getName().getLocalPart());
+ }
+ } while (acursor.toNextSibling());
+ acursor.dispose();
+ return sb.toString();
+ } else {
+ acursor.dispose();
+ return sb.toString();
+ }
+
+ }
+
+ public void initProperties(Map<String, String> properties) throws GFacProviderException, GFacException {
+
+ }
+
+ protected KeyAndCertCredential generateShortLivedCertificate(String userDN, String caCertPath, String caKeyPath,
+ String caPwd) throws Exception {
+ final long CredentialGoodFromOffset = 1000L * 60L * 15L; // 15 minutes
+ // ago
+
+ final long startTime = System.currentTimeMillis() - CredentialGoodFromOffset;
+ final long endTime = startTime + 30 * 3600 * 1000;
+
+ String keyLengthProp = "1024";
+ int keyLength = Integer.parseInt(keyLengthProp);
+ String signatureAlgorithm = "SHA1withRSA";
+
+ KeyAndCertCredential caCred = getCACredential(caCertPath, caKeyPath, caPwd);
+
+ KeyPairGenerator kpg = KeyPairGenerator.getInstance(caCred.getKey().getAlgorithm());
+ kpg.initialize(keyLength);
+ KeyPair pair = kpg.generateKeyPair();
+
+ X500Principal subjectDN = new X500Principal(userDN);
+ Random rand = new Random();
+
+ SubjectPublicKeyInfo publicKeyInfo;
+ try {
+ publicKeyInfo = SubjectPublicKeyInfo.getInstance(new ASN1InputStream(pair.getPublic().getEncoded())
+ .readObject());
+ } catch (IOException e) {
+ throw new InvalidKeyException("Can not parse the public key"
+ + "being included in the short lived certificate", e);
+ }
+
+ X500Name issuerX500Name = CertificateHelpers.toX500Name(caCred.getCertificate().getSubjectX500Principal());
+
+ X500Name subjectX500Name = CertificateHelpers.toX500Name(subjectDN);
+
+ X509v3CertificateBuilder certBuilder = new X509v3CertificateBuilder(issuerX500Name, new BigInteger(20, rand),
+ new Date(startTime), new Date(endTime), subjectX500Name, publicKeyInfo);
+
+ AlgorithmIdentifier sigAlgId = X509v3CertificateBuilder.extractAlgorithmId(caCred.getCertificate());
+
+ X509Certificate certificate = certBuilder.build(caCred.getKey(), sigAlgId, signatureAlgorithm, null, null);
+
+ certificate.checkValidity(new Date());
+ certificate.verify(caCred.getCertificate().getPublicKey());
+ KeyAndCertCredential result = new KeyAndCertCredential(pair.getPrivate(), new X509Certificate[] { certificate,
+ caCred.getCertificate() });
+
+ return result;
+ }
+
+ private KeyAndCertCredential getCACredential(String caCertPath, String caKeyPath, String password) throws Exception {
+ InputStream isKey = new FileInputStream(caKeyPath);
+ PrivateKey pk = CertificateUtils.loadPrivateKey(isKey, Encoding.PEM, password.toCharArray());
+
+ InputStream isCert = new FileInputStream(caCertPath);
+ X509Certificate caCert = CertificateUtils.loadCertificate(isCert, Encoding.PEM);
+
+ if (isKey != null)
+ isKey.close();
+ if (isCert != null)
+ isCert.close();
+
+ return new KeyAndCertCredential(pk, new X509Certificate[] { caCert });
+ }
+
+ private String getCNFromUserDN(String userDN) {
+ return X500NameUtils.getAttributeValues(userDN, BCStyle.CN)[0];
+
+ }
+}
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/ApplicationProcessor.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/ApplicationProcessor.java b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/ApplicationProcessor.java
new file mode 100644
index 0000000..76e9818
--- /dev/null
+++ b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/ApplicationProcessor.java
@@ -0,0 +1,252 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+
+package org.apache.airavata.gfac.utils;
+
+import org.apache.airavata.gfac.context.JobExecutionContext;
+import org.apache.airavata.schemas.gfac.ExtendedKeyValueType;
+import org.apache.airavata.schemas.gfac.HpcApplicationDeploymentType;
+import org.apache.airavata.schemas.gfac.JobTypeType;
+import org.apache.airavata.schemas.gfac.NameValuePairType;
+import org.ggf.schemas.jsdl.x2005.x11.jsdl.ApplicationType;
+import org.ggf.schemas.jsdl.x2005.x11.jsdl.JobDefinitionType;
+import org.ggf.schemas.jsdl.x2005.x11.jsdlPosix.EnvironmentType;
+import org.ggf.schemas.jsdl.x2005.x11.jsdlPosix.FileNameType;
+import org.ggf.schemas.jsdl.x2005.x11.jsdlPosix.UserNameType;
+import org.ogf.schemas.jsdl.x2007.x02.jsdlSpmd.NumberOfProcessesType;
+import org.ogf.schemas.jsdl.x2007.x02.jsdlSpmd.ProcessesPerHostType;
+import org.ogf.schemas.jsdl.x2007.x02.jsdlSpmd.ThreadsPerProcessType;
+
+import java.io.File;
+
+
+public class ApplicationProcessor {
+
+ public static void generateJobSpecificAppElements(JobDefinitionType value, JobExecutionContext context){
+
+ String userName = getUserNameFromContext(context);
+ if (userName.equalsIgnoreCase("admin")){
+ userName = "CN=zdv575, O=Ultrascan Gateway, C=DE";
+ }
+
+ HpcApplicationDeploymentType appDepType = (HpcApplicationDeploymentType) context
+ .getApplicationContext().getApplicationDeploymentDescription()
+ .getType();
+
+ createGenericApplication(value, appDepType);
+
+ if (appDepType.getApplicationEnvironmentArray().length > 0) {
+ createApplicationEnvironment(value,
+ appDepType.getApplicationEnvironmentArray(), appDepType);
+ }
+
+
+ if (appDepType.getExecutableLocation() != null) {
+ FileNameType fNameType = FileNameType.Factory.newInstance();
+ fNameType.setStringValue(appDepType.getExecutableLocation());
+ if(isParallelJob(appDepType)) {
+ JSDLUtils.getOrCreateSPMDApplication(value).setExecutable(fNameType);
+ JSDLUtils.getSPMDApplication(value).setSPMDVariation(getSPMDVariation(appDepType));
+
+ if(getValueFromMap(appDepType, JSDLUtils.NUMBEROFPROCESSES)!=null){
+ NumberOfProcessesType num = NumberOfProcessesType.Factory.newInstance();
+ num.setStringValue(getValueFromMap(appDepType, JSDLUtils.NUMBEROFPROCESSES));
+ JSDLUtils.getSPMDApplication(value).setNumberOfProcesses(num);
+ }
+
+ if(getValueFromMap(appDepType, JSDLUtils.PROCESSESPERHOST)!=null){
+ ProcessesPerHostType pph = ProcessesPerHostType.Factory.newInstance();
+ pph.setStringValue(getValueFromMap(appDepType, JSDLUtils.PROCESSESPERHOST));
+ JSDLUtils.getSPMDApplication(value).setProcessesPerHost(pph);
+ }
+
+ if(getValueFromMap(appDepType, JSDLUtils.THREADSPERHOST)!=null){
+ ThreadsPerProcessType tpp = ThreadsPerProcessType.Factory.newInstance();
+ tpp.setStringValue(getValueFromMap(appDepType, JSDLUtils.THREADSPERHOST));
+ JSDLUtils.getSPMDApplication(value).setThreadsPerProcess(tpp);
+
+ }
+
+ if(userName != null) {
+ UserNameType userNameType = UserNameType.Factory.newInstance();
+ userNameType.setStringValue(userName);
+ JSDLUtils.getSPMDApplication(value).setUserName(userNameType);
+ }
+ }
+ else {
+ JSDLUtils.getOrCreatePOSIXApplication(value).setExecutable(fNameType);
+ if(userName != null) {
+ UserNameType userNameType = UserNameType.Factory.newInstance();
+ userNameType.setStringValue(userName);
+ JSDLUtils.getOrCreatePOSIXApplication(value).setUserName(userNameType);
+ }
+ }
+ }
+
+
+ String stdout = (appDepType.getStandardOutput() != null) ? new File(appDepType.getStandardOutput()).getName(): "stdout";
+ ApplicationProcessor.setApplicationStdOut(value, appDepType, stdout);
+
+
+ String stderr = (appDepType.getStandardError() != null) ? new File(appDepType.getStandardError()).getName() : "stderr";
+ ApplicationProcessor.setApplicationStdErr(value, appDepType, stderr);
+
+ }
+
+ public static String getUserNameFromContext(JobExecutionContext jobContext) {
+ if(jobContext.getTaskData() == null)
+ return null;
+ //FIXME: Discuss to get user and change this
+ return "admin";
+ }
+ public static boolean isParallelJob(HpcApplicationDeploymentType appDepType) {
+
+ boolean isParallel = false;
+
+ if (appDepType.getJobType() != null) {
+ // TODO set data output directory
+ int status = appDepType.getJobType().intValue();
+
+ switch (status) {
+ // TODO: this check should be done outside this class
+ case JobTypeType.INT_MPI:
+ case JobTypeType.INT_OPEN_MP:
+ isParallel = true;
+ break;
+
+ case JobTypeType.INT_SERIAL:
+ case JobTypeType.INT_SINGLE:
+ isParallel = false;
+ break;
+
+ default:
+ isParallel = false;
+ break;
+ }
+ }
+ return isParallel;
+ }
+
+
+ public static void createApplicationEnvironment(JobDefinitionType value, NameValuePairType[] nameValuePairs, HpcApplicationDeploymentType appDepType) {
+
+ if(isParallelJob(appDepType)) {
+ for (NameValuePairType nv : nameValuePairs) {
+ EnvironmentType envType = JSDLUtils.getOrCreateSPMDApplication(value).addNewEnvironment();
+ envType.setName(nv.getName());
+ envType.setStringValue(nv.getValue());
+ }
+ }
+ else {
+ for (NameValuePairType nv : nameValuePairs) {
+ EnvironmentType envType = JSDLUtils.getOrCreatePOSIXApplication(value).addNewEnvironment();
+ envType.setName(nv.getName());
+ envType.setStringValue(nv.getValue());
+ }
+ }
+
+ }
+
+
+ public static String getSPMDVariation (HpcApplicationDeploymentType appDepType) {
+
+ String variation = null;
+
+ if (appDepType.getJobType() != null) {
+ // TODO set data output directory
+ int status = appDepType.getJobType().intValue();
+
+ switch (status) {
+ // TODO: this check should be done outside this class
+ case JobTypeType.INT_MPI:
+ variation = SPMDVariations.MPI.value();
+ break;
+
+ case JobTypeType.INT_OPEN_MP:
+ variation = SPMDVariations.OpenMPI.value();
+ break;
+
+ }
+ }
+ return variation;
+ }
+
+
+ public static void addApplicationArgument(JobDefinitionType value, HpcApplicationDeploymentType appDepType, String stringPrm) {
+ if(isParallelJob(appDepType))
+ JSDLUtils.getOrCreateSPMDApplication(value)
+ .addNewArgument().setStringValue(stringPrm);
+ else
+ JSDLUtils.getOrCreatePOSIXApplication(value)
+ .addNewArgument().setStringValue(stringPrm);
+
+ }
+
+ public static void setApplicationStdErr(JobDefinitionType value, HpcApplicationDeploymentType appDepType, String stderr) {
+ FileNameType fName = FileNameType.Factory.newInstance();
+ fName.setStringValue(stderr);
+ if (isParallelJob(appDepType))
+ JSDLUtils.getOrCreateSPMDApplication(value).setError(fName);
+ else
+ JSDLUtils.getOrCreatePOSIXApplication(value).setError(fName);
+ }
+
+ public static void setApplicationStdOut(JobDefinitionType value, HpcApplicationDeploymentType appDepType, String stderr) {
+ FileNameType fName = FileNameType.Factory.newInstance();
+ fName.setStringValue(stderr);
+ if (isParallelJob(appDepType))
+ JSDLUtils.getOrCreateSPMDApplication(value).setOutput(fName);
+ else
+ JSDLUtils.getOrCreatePOSIXApplication(value).setOutput(fName);
+ }
+
+ public static String getApplicationStdOut(JobDefinitionType value, HpcApplicationDeploymentType appDepType) throws RuntimeException {
+ if (isParallelJob(appDepType)) return JSDLUtils.getOrCreateSPMDApplication(value).getOutput().getStringValue();
+ else return JSDLUtils.getOrCreatePOSIXApplication(value).getOutput().getStringValue();
+ }
+
+ public static String getApplicationStdErr(JobDefinitionType value, HpcApplicationDeploymentType appDepType) throws RuntimeException {
+ if (isParallelJob(appDepType)) return JSDLUtils.getOrCreateSPMDApplication(value).getError().getStringValue();
+ else return JSDLUtils.getOrCreatePOSIXApplication(value).getError().getStringValue();
+ }
+
+ public static void createGenericApplication(JobDefinitionType value, HpcApplicationDeploymentType appDepType) {
+ if (appDepType.getApplicationName() != null) {
+ ApplicationType appType = JSDLUtils.getOrCreateApplication(value);
+ String appName = appDepType.getApplicationName()
+ .getStringValue();
+ appType.setApplicationName(appName);
+ JSDLUtils.getOrCreateJobIdentification(value).setJobName(appName);
+ }
+ }
+
+
+ public static String getValueFromMap(HpcApplicationDeploymentType appDepType, String name) {
+ ExtendedKeyValueType[] extended = appDepType.getKeyValuePairsArray();
+ for(ExtendedKeyValueType e: extended) {
+ if(e.getName().equalsIgnoreCase(name)) {
+ return e.getStringValue();
+ }
+ }
+ return null;
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/DataStagingProcessor.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/DataStagingProcessor.java b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/DataStagingProcessor.java
new file mode 100644
index 0000000..adf6836
--- /dev/null
+++ b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/DataStagingProcessor.java
@@ -0,0 +1,235 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+
+package org.apache.airavata.gfac.utils;
+
+import java.io.File;
+import java.net.URI;
+import java.util.Map;
+
+import org.apache.airavata.commons.gfac.type.ActualParameter;
+import org.apache.airavata.gfac.context.JobExecutionContext;
+import org.apache.airavata.schemas.gfac.HpcApplicationDeploymentType;
+import org.apache.airavata.schemas.gfac.StringArrayType;
+import org.apache.airavata.schemas.gfac.StringParameterType;
+import org.apache.airavata.schemas.gfac.URIArrayType;
+import org.apache.airavata.schemas.gfac.URIParameterType;
+import org.apache.airavata.schemas.gfac.UnicoreHostType;
+import org.ggf.schemas.jsdl.x2005.x11.jsdl.JobDefinitionType;
+
+public class DataStagingProcessor {
+
+ public static void generateDataStagingElements(JobDefinitionType value, JobExecutionContext context) throws Exception{
+
+ HpcApplicationDeploymentType appDepType = (HpcApplicationDeploymentType) context
+ .getApplicationContext().getApplicationDeploymentDescription()
+ .getType();
+
+
+ String gridftpEndpoint = ((UnicoreHostType) context.getApplicationContext().getHostDescription().getType())
+ .getGridFTPEndPointArray()[0];
+
+
+ if (context.getInMessageContext().getParameters().size() > 0) {
+ buildDataStagingFromInputContext(context, value, gridftpEndpoint, appDepType);
+ }
+
+ if (context.getOutMessageContext().getParameters().size() > 0) {
+ buildFromOutputContext(context, value, gridftpEndpoint, appDepType);
+ }
+
+ createStdOutURIs(value, appDepType, gridftpEndpoint, isUnicoreEndpoint(context));
+
+ }
+
+ private static void createInURIElement(JobDefinitionType value,
+ String endpoint, String inputDir, ActualParameter inParam)
+ throws Exception {
+
+ String uri = ((URIParameterType) inParam.getType()).getValue();
+ String fileName = new File(uri).getName();
+ if (uri.startsWith("file")) {
+ URI gridFTPInputDirURI = URIUtils.createGsiftpURI(endpoint,
+ inputDir);
+ String filePath = gridFTPInputDirURI.toString() + File.separator
+ + fileName;
+ JSDLUtils
+ .addDataStagingSourceElement(value, filePath, null, fileName);
+ } else if (uri.startsWith("gsiftp") || uri.startsWith("http")
+ || uri.startsWith("rns")) {
+ // no need to stage-in those files to the input
+ // directory
+ JSDLUtils.addDataStagingSourceElement(value, uri, null, fileName);
+ }
+
+ }
+
+ private static void createStdOutURIs(JobDefinitionType value,
+ HpcApplicationDeploymentType appDepType, String endpoint,
+ boolean isUnicore) throws Exception {
+
+ URI remoteOutputDir = URIUtils.createGsiftpURI(endpoint,
+ appDepType.getOutputDataDirectory());
+
+ String stdout = ApplicationProcessor.getApplicationStdOut(value, appDepType);
+
+ String stderr = ApplicationProcessor.getApplicationStdErr(value, appDepType);
+
+ String stdoutFileName = (stdout == null || stdout.equals("")) ? "stdout"
+ : stdout;
+ String stdoutURI = GFacUtils.createGsiftpURIAsString(
+ remoteOutputDir.toString(), stdoutFileName);
+ JSDLUtils.addDataStagingTargetElement(value, null, stdoutFileName,
+ stdoutURI);
+
+ String stderrFileName = (stdout == null || stderr.equals("")) ? "stderr"
+ : stderr;
+ String stderrURI = GFacUtils.createGsiftpURIAsString(
+ remoteOutputDir.toString(), stderrFileName);
+ JSDLUtils.addDataStagingTargetElement(value, null, stderrFileName,
+ stderrURI);
+
+ if(isUnicore) {
+ String scriptExitCodeFName = "UNICORE_SCRIPT_EXIT_CODE";
+ String scriptExitCode = GFacUtils.createGsiftpURIAsString(
+ remoteOutputDir.toString(), scriptExitCodeFName);
+ JSDLUtils.addDataStagingTargetElement(value, null,
+ scriptExitCodeFName, scriptExitCode.toString());
+ }
+
+ }
+
+
+ private static void createOutStringElements(JobDefinitionType value,
+ HpcApplicationDeploymentType appDeptype, String endpoint, String prmValue) throws Exception {
+
+ if(prmValue == null || "".equals(prmValue)) return;
+
+
+ String outputUri = GFacUtils.createGsiftpURIAsString(endpoint, appDeptype.getOutputDataDirectory());
+
+ URI finalOutputUri = URIUtils.createGsiftpURI(outputUri, prmValue);
+ JSDLUtils.addDataStagingTargetElement(value, null, prmValue, finalOutputUri.toString());
+ }
+
+
+ private static void createOutURIElement(JobDefinitionType value,
+ String prmValue) throws Exception {
+ String fileName = new File(prmValue.toString()).getName();
+ JSDLUtils.addDataStagingTargetElement(value, null, fileName, prmValue);
+ }
+
+
+ private static JobDefinitionType buildFromOutputContext(JobExecutionContext context,
+ JobDefinitionType value, String gridftpEndpoint,
+ HpcApplicationDeploymentType appDepType) throws Exception {
+
+ Map<String, Object> outputParams = context.getOutMessageContext()
+ .getParameters();
+
+ for (String paramKey : outputParams.keySet()) {
+
+ ActualParameter outParam = (ActualParameter) outputParams
+ .get(paramKey);
+
+ // if single urls then convert each url into jsdl source
+ // elements,
+ // that are formed by concat of gridftpurl+inputdir+filename
+
+ String paramDataType = outParam.getType().getType().toString();
+
+ if ("URI".equals(paramDataType)) {
+ String uriPrm = ((URIParameterType) outParam.getType())
+ .getValue();
+ createOutURIElement(value, uriPrm);
+ }
+
+ // string params are converted into the job arguments
+
+ else if (("URIArray").equals(paramDataType)) {
+ String[] uriArray = ((URIArrayType) outParam.getType())
+ .getValueArray();
+ for (String u : uriArray) {
+
+ createOutURIElement(value, u);
+ }
+
+ }
+ else if ("String".equals(paramDataType)) {
+ String stringPrm = ((StringParameterType) outParam
+ .getType()).getValue();
+ createOutStringElements(value, appDepType, gridftpEndpoint, stringPrm);
+ }
+
+ else if ("StringArray".equals(paramDataType)) {
+ String[] valueArray = ((StringArrayType) outParam.getType())
+ .getValueArray();
+ for (String v : valueArray) {
+ createOutStringElements(value, appDepType, gridftpEndpoint, v);
+ }
+ }
+ }
+
+ return value;
+ }
+
+
+ private static void buildDataStagingFromInputContext(JobExecutionContext context, JobDefinitionType value, String gridftpEndpoint, HpcApplicationDeploymentType appDepType)
+ throws Exception {
+
+ // TODO set data directory
+ Map<String, Object> inputParams = context.getInMessageContext()
+ .getParameters();
+
+ for (String paramKey : inputParams.keySet()) {
+
+ ActualParameter inParam = (ActualParameter) inputParams
+ .get(paramKey);
+
+ // if single urls then convert each url into jsdl source
+ // elements,
+ // that are formed by concat of gridftpurl+inputdir+filename
+
+ String paramDataType = inParam.getType().getType().toString();
+
+ if ("URI".equals(paramDataType)) {
+ createInURIElement(value, gridftpEndpoint,
+ appDepType.getInputDataDirectory(), inParam);
+ }
+
+ // string params are converted into the job arguments
+
+ else if ("String".equals(paramDataType)) {
+ String stringPrm = ((StringParameterType) inParam.getType())
+ .getValue();
+ ApplicationProcessor.addApplicationArgument(value, appDepType, stringPrm);
+ }
+ }
+
+ }
+
+
+ public static boolean isUnicoreEndpoint(JobExecutionContext context) {
+ return ( (context.getApplicationContext().getHostDescription().getType() instanceof UnicoreHostType)?true:false );
+ }
+
+
+}
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/DataTransferrer.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/DataTransferrer.java b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/DataTransferrer.java
new file mode 100644
index 0000000..6f844c7
--- /dev/null
+++ b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/DataTransferrer.java
@@ -0,0 +1,241 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+
+package org.apache.airavata.gfac.utils;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.airavata.commons.gfac.type.ActualParameter;
+import org.apache.airavata.commons.gfac.type.ApplicationDescription;
+import org.apache.airavata.gfac.Constants;
+import org.apache.airavata.gfac.context.JobExecutionContext;
+import org.apache.airavata.gfac.provider.GFacProviderException;
+import org.apache.airavata.model.workspace.experiment.TaskDetails;
+import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
+import org.apache.airavata.schemas.gfac.HpcApplicationDeploymentType;
+import org.apache.airavata.schemas.gfac.StringArrayType;
+import org.apache.airavata.schemas.gfac.StringParameterType;
+import org.apache.airavata.schemas.gfac.URIParameterType;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import de.fzj.unicore.uas.client.StorageClient;
+
+
+public class DataTransferrer {
+ protected final Logger log = LoggerFactory.getLogger(this.getClass());
+
+ private JobExecutionContext jobContext;
+
+ private StorageClient storageClient;
+
+ public DataTransferrer(JobExecutionContext jobContext, StorageClient storageClient) {
+ this.jobContext = jobContext;
+ this.storageClient = storageClient;
+ }
+
+
+ public void uploadLocalFiles() throws GFacProviderException {
+ Map<String, Object> inputParams = jobContext.getInMessageContext()
+ .getParameters();
+ for (String paramKey : inputParams.keySet()) {
+ ActualParameter inParam = (ActualParameter) inputParams
+ .get(paramKey);
+ String paramDataType = inParam.getType().getType().toString();
+ if("URI".equals(paramDataType)) {
+ String uri = ((URIParameterType) inParam.getType()).getValue();
+ String fileName = new File(uri).getName();
+ if (uri.startsWith("file")) {
+ try {
+ String uriWithoutProtocol = uri.substring(
+ uri.lastIndexOf("://") + 1, uri.length());
+ FileUploader fileUploader = new FileUploader(
+ uriWithoutProtocol, "input/" + fileName,
+ Mode.overwrite);
+ fileUploader.perform(storageClient);
+ } catch (FileNotFoundException e3) {
+ throw new GFacProviderException(
+ "Error while staging-in, local file "+fileName+" not found", e3);
+ } catch (Exception e) {
+ throw new GFacProviderException("Cannot upload files", e);
+
+ }
+
+ }
+ }
+ }
+
+ }
+
+ /**
+ * This method will download all the remote files specified according to the output
+ * context of a job.
+ * */
+ public void downloadRemoteFiles() throws GFacProviderException {
+
+ String downloadLocation = getDownloadLocation();
+
+ File file = new File(downloadLocation);
+ if(!file.exists()){
+ file.mkdirs();
+ }
+
+ Map<String, ActualParameter> stringMap = new HashMap<String, ActualParameter>();
+
+ Map<String, Object> outputParams = jobContext.getOutMessageContext()
+ .getParameters();
+
+ for (String paramKey : outputParams.keySet()) {
+
+ ActualParameter outParam = (ActualParameter) outputParams
+ .get(paramKey);
+
+ // if single urls then convert each url into jsdl source
+ // elements,
+ // that are formed by concat of gridftpurl+inputdir+filename
+
+ String paramDataType = outParam.getType().getType().toString();
+
+ if ("String".equals(paramDataType)) {
+ String stringPrm = ((StringParameterType) outParam
+ .getType()).getValue();
+ String localFileName = null;
+ //TODO: why analysis.tar? it wont scale to gateways..
+ if(stringPrm == null || stringPrm.isEmpty()){
+ localFileName = "analysis-results.tar";
+ }else{
+ localFileName = stringPrm.substring(stringPrm.lastIndexOf("/")+1);
+ }
+ String outputLocation = downloadLocation+File.separator+localFileName;
+ FileDownloader fileDownloader = new FileDownloader("output/"+stringPrm,outputLocation, Mode.overwrite);
+ try {
+ fileDownloader.perform(storageClient);
+ ((StringParameterType) outParam.getType()).setValue(outputLocation);
+ stringMap.put(paramKey, outParam);
+ } catch (Exception e) {
+ throw new GFacProviderException(e.getLocalizedMessage(),e);
+ }
+ }
+
+ else if ("StringArray".equals(paramDataType)) {
+ String[] valueArray = ((StringArrayType) outParam.getType())
+ .getValueArray();
+ for (String v : valueArray) {
+ String localFileName = v.substring(v.lastIndexOf("/")+1);;
+ String outputLocation = downloadLocation+File.separator+localFileName;
+ FileDownloader fileDownloader = new FileDownloader("output/"+v,outputLocation, Mode.overwrite);
+ try {
+ fileDownloader.perform(storageClient);
+ ((StringParameterType) outParam.getType()).setValue(outputLocation);
+ stringMap.put(paramKey, outParam);
+ } catch (Exception e) {
+ throw new GFacProviderException(e.getLocalizedMessage(),e);
+ }
+ }
+ }
+ }
+ if (stringMap == null || stringMap.isEmpty()) {
+ throw new GFacProviderException("Empty Output returned from the Application, Double check the application" +
+ "and ApplicationDescriptor output Parameter Names");
+ }
+
+ downloadStdOuts();
+ }
+
+
+ public void downloadStdOuts() throws GFacProviderException{
+ String downloadLocation = getDownloadLocation();
+ File file = new File(downloadLocation);
+ if(!file.exists()){
+ file.mkdirs();
+ }
+
+ HpcApplicationDeploymentType appDepType = (HpcApplicationDeploymentType) jobContext
+ .getApplicationContext().getApplicationDeploymentDescription()
+ .getType();
+
+ String stdout = appDepType.getStandardOutput();
+ String stderr = appDepType.getStandardError();
+ if(stdout != null) {
+ stdout = stdout.substring(stdout.lastIndexOf('/')+1);
+ }
+
+ if(stderr != null) {
+ stderr = stderr.substring(stderr.lastIndexOf('/')+1);
+ }
+
+ String stdoutFileName = (stdout == null || stdout.equals("")) ? "stdout"
+ : stdout;
+ String stderrFileName = (stdout == null || stderr.equals("")) ? "stderr"
+ : stderr;
+
+ ApplicationDescription application = jobContext.getApplicationContext().getApplicationDeploymentDescription();
+ ApplicationDeploymentDescriptionType appDesc = application.getType();
+
+ String stdoutLocation = downloadLocation+File.separator+stdoutFileName;
+ FileDownloader f1 = new FileDownloader("output/"+stdoutFileName,stdoutLocation, Mode.overwrite);
+ try {
+ f1.perform(storageClient);
+ String stdoutput = readFile(stdoutLocation);
+ appDesc.setStandardOutput(stdoutput);
+ } catch (Exception e) {
+ throw new GFacProviderException(e.getLocalizedMessage(),e);
+ }
+ String stderrLocation = downloadLocation+File.separator+stderrFileName;
+ FileDownloader f2 = new FileDownloader("output/"+stderrFileName,stderrLocation, Mode.overwrite);
+ try {
+ f2.perform(storageClient);
+ String stderror = readFile(stderrLocation);
+ appDesc.setStandardError(stderror);
+ } catch (Exception e) {
+ throw new GFacProviderException(e.getLocalizedMessage(),e);
+ }
+ }
+
+ private String readFile(String localFile) throws IOException {
+ BufferedReader instream = new BufferedReader(new FileReader(localFile));
+ StringBuffer buff = new StringBuffer();
+ String temp = null;
+ while ((temp = instream.readLine()) != null) {
+ buff.append(temp);
+ buff.append(Constants.NEWLINE);
+ }
+
+ log.info("finish read file:" + localFile);
+
+ return buff.toString();
+ }
+
+ private String getDownloadLocation() {
+ TaskDetails taskData = jobContext.getTaskData();
+ if (taskData != null && taskData.getAdvancedOutputDataHandling() != null) {
+ String outputDataDirectory = taskData.getAdvancedOutputDataHandling().getOutputDataDir();
+ return outputDataDirectory;
+ }
+ return null;
+ }
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/FileDownloader.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/FileDownloader.java b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/FileDownloader.java
new file mode 100644
index 0000000..1eabb12
--- /dev/null
+++ b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/FileDownloader.java
@@ -0,0 +1,256 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+
+package org.apache.airavata.gfac.utils;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.util.Map;
+
+import org.unigrids.services.atomic.types.GridFileType;
+import org.unigrids.services.atomic.types.ProtocolType;
+
+import de.fzj.unicore.uas.client.FileTransferClient;
+import de.fzj.unicore.uas.client.StorageClient;
+import de.fzj.unicore.uas.client.UFTPConstants;
+import de.fzj.unicore.uas.client.UFTPFileTransferClient;
+import de.fzj.unicore.uas.fts.FiletransferOptions.IMonitorable;
+import de.fzj.unicore.uas.fts.FiletransferOptions.SupportsPartialRead;
+
+/**
+ * helper that exports remote files from a UNICORE Storage
+ * to the local client machine.<br/>
+ * Simple wildcards ("*" and "?") and download of
+ * directories are supported.
+ *
+ * TODO this should be refactored so the single-file download logic
+ * is separated from the wildcard/directory/provided outputStream logic
+ *
+ * @author schuller
+ */
+public class FileDownloader extends FileTransferBase{
+
+ private boolean showProgress=true;
+
+ private boolean forceFileOnly=false;
+
+ private OutputStream targetStream=null;
+
+ public FileDownloader(String from, String to, Mode mode){
+ this(from,to,mode,true);
+ }
+
+ public FileDownloader(String from, String to, Mode mode, boolean failOnError){
+ this.to=to;
+ this.from=from;
+ this.mode=mode;
+ this.failOnError=failOnError;
+ }
+
+ public void perform(StorageClient sms)throws Exception{
+ boolean isWildcard=hasWildCards(from);
+ boolean isDirectory=false;
+ GridFileType gridSource=null;
+ if(isWildcard){
+ performWildCardExport(sms);
+ }
+ else {
+ //check if source is a directory
+ gridSource=sms.listProperties(from);
+ isDirectory=gridSource.getIsDirectory();
+ if(isDirectory){
+ if(forceFileOnly){
+ throw new IOException("Source is a directory");
+ }
+ performDirectoryExport(gridSource, new File(to), sms);
+ }
+ else{
+ download(gridSource,new File(to),sms);
+ }
+ }
+ }
+
+ protected void performDirectoryExport(GridFileType directory, File targetDirectory, StorageClient sms)throws Exception{
+ if(!targetDirectory.exists()|| !targetDirectory.canWrite()){
+ throw new IOException("Target directory <"+to+"> does not exist or is not writable!");
+ }
+ if(!targetDirectory.isDirectory()){
+ throw new IOException("Target <"+to+"> is not a directory!");
+ }
+ GridFileType[]gridFiles=sms.listDirectory(directory.getPath());
+ for(GridFileType file: gridFiles){
+ if(file.getIsDirectory()){
+ if(!recurse) {
+ System.out.println("Skipping directory "+file.getPath());
+ continue;
+ }
+ else{
+ File newTargetDirectory=new File(targetDirectory,getName(file.getPath()));
+ boolean success=newTargetDirectory.mkdirs();
+ if(!success)throw new IOException("Can create directory: "+newTargetDirectory.getAbsolutePath());
+ performDirectoryExport(file, newTargetDirectory, sms);
+ continue;
+ }
+ }
+ download(file, new File(targetDirectory,getName(file.getPath())), sms);
+ }
+ }
+
+ protected void performWildCardExport(StorageClient sms)throws Exception{
+ String dir=getDir(from);
+ if(dir==null)dir="/";
+ GridFileType[] files=sms.find(dir, false, from, false, null, null);
+ File targetDir=targetStream==null?new File(to):null;
+ if(targetStream==null){
+ if(!targetDir.isDirectory())throw new IOException("Target is not a directory.");
+ }
+ for(GridFileType f: files){
+ download(f, targetDir, sms);
+ }
+ }
+
+ private String getDir(String path){
+ return new File(path).getParent();
+ }
+
+ private String getName(String path){
+ return new File(path).getName();
+ }
+
+ /**
+ * download a single regular file
+ *
+ * @param source - grid file descriptor
+ * @param localFile - local file or directory to write to
+ * @param sms
+ * @throws Exception
+ */
+ private void download(GridFileType source, File localFile, StorageClient sms)throws Exception{
+ if(source==null || source.getIsDirectory()){
+ throw new IllegalStateException("Source="+source);
+ }
+
+ OutputStream os=targetStream!=null?targetStream:null;
+ FileTransferClient ftc=null;
+ try{
+ String path=source.getPath();
+ if(targetStream==null){
+ if(localFile.isDirectory()){
+ localFile=new File(localFile,getName(source.getPath()));
+ }
+ if(mode.equals(Mode.nooverwrite) && localFile.exists()){
+ System.out.println("File exists and creation mode was set to 'nooverwrite'.");
+ return;
+ }
+ System.out.println("Downloading remote file '"+sms.getUrl()+"#/"+path+"' -> "+localFile.getAbsolutePath());
+ os=new FileOutputStream(localFile.getAbsolutePath(), mode.equals(Mode.append));
+ }
+
+ chosenProtocol=sms.findSupportedProtocol(preferredProtocols.toArray(new ProtocolType.Enum[preferredProtocols.size()]));
+ Map<String,String>extraParameters=makeExtraParameters(chosenProtocol);
+ ftc=sms.getExport(path,extraParameters,chosenProtocol);
+ configure(ftc, extraParameters);
+ System.out.println("DEB:File transfer URL : "+ftc.getUrl());
+// ProgressBar p=null;
+ if(ftc instanceof IMonitorable && showProgress){
+ long size=ftc.getSourceFileSize();
+ if(isRange()){
+ size=getRangeSize();
+ }
+// p=new ProgressBar(localFile.getName(),size,msg);
+// ((IMonitorable) ftc).setProgressListener(p);
+ }
+ long startTime=System.currentTimeMillis();
+ if(isRange()){
+ if(!(ftc instanceof SupportsPartialRead)){
+ throw new Exception("Byte range is defined but protocol does not allow " +
+ "partial read! Please choose a different protocol!");
+ }
+ System.out.println("Byte range: "+startByte+" - "+(getRangeSize()>0?endByte:""));
+ SupportsPartialRead pReader=(SupportsPartialRead)ftc;
+ pReader.readPartial(startByte, endByte-startByte+1, os);
+ }
+ else{
+ ftc.readAllData(os);
+ }
+// if(p!=null){
+// p.finish();
+// }
+ if(timing){
+ long duration=System.currentTimeMillis()-startTime;
+ double rate=(double)localFile.length()/(double)duration;
+ System.out.println("Rate: " +rate+ " kB/sec.");
+ }
+ if(targetStream==null)copyProperties(source, localFile);
+ }
+ finally{
+ try{
+ if(targetStream==null && os!=null){
+ os.close();
+ }
+ }catch(Exception ignored){}
+ if(ftc!=null){
+ try{
+ ftc.destroy();
+ }catch(Exception e1){
+// System.out.println("Could not destroy the filetransfer client",e1);
+ }
+ }
+ }
+ }
+
+ /**
+ * if possible, copy the remote executable flag to the local file
+ * @throws Exception
+ */
+ private void copyProperties(GridFileType source, File localFile)throws Exception{
+ try{
+ localFile.setExecutable(source.getPermissions().getExecutable());
+ }
+ catch(Exception ex){
+ //TODO: logging
+// ("Can't set 'executable' flag for "+localFile.getName(), ex);
+ }
+ }
+
+ private void configure(FileTransferClient ftc, Map<String,String>params){
+ if(ftc instanceof UFTPFileTransferClient){
+ UFTPFileTransferClient u=(UFTPFileTransferClient)ftc;
+ String secret=params.get(UFTPConstants.PARAM_SECRET);
+ u.setSecret(secret);
+ }
+ }
+
+ public void setShowProgress(boolean showProgress) {
+ this.showProgress = showProgress;
+ }
+
+ public void setForceFileOnly(boolean forceFileOnly) {
+ this.forceFileOnly = forceFileOnly;
+ }
+
+ public void setTargetStream(OutputStream targetStream) {
+ this.targetStream = targetStream;
+ }
+
+}
[03/11] creating gfac-bes and gfac-gram out from gfac-core
Posted by la...@apache.org.
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/external/GridFtp.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/external/GridFtp.java b/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/external/GridFtp.java
new file mode 100644
index 0000000..5be087e
--- /dev/null
+++ b/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/external/GridFtp.java
@@ -0,0 +1,558 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+
+package org.apache.airavata.gfac.external;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+import java.util.Vector;
+
+import org.apache.airavata.gfac.Constants;
+import org.apache.airavata.gfac.GFacException;
+import org.apache.airavata.gfac.ToolsException;
+import org.apache.airavata.gfac.util.GramProviderUtils;
+import org.apache.airavata.gfac.util.GridFTPContactInfo;
+import org.globus.ftp.DataChannelAuthentication;
+import org.globus.ftp.DataSourceStream;
+import org.globus.ftp.FileInfo;
+import org.globus.ftp.GridFTPClient;
+import org.globus.ftp.HostPort;
+import org.globus.ftp.Marker;
+import org.globus.ftp.MarkerListener;
+import org.globus.ftp.MlsxEntry;
+import org.globus.ftp.Session;
+import org.globus.ftp.exception.ClientException;
+import org.globus.ftp.exception.ServerException;
+import org.globus.gsi.gssapi.auth.HostAuthorization;
+import org.ietf.jgss.GSSCredential;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * GridFTP tools
+ */
+public class GridFtp {
+ public static final Logger log = LoggerFactory.getLogger(GridFtp.class);
+
+ public static final String GSIFTP_SCHEME = "gsiftp";
+ public static final String HOST = "host";
+
+ /**
+ * Make directory at remote location
+ *
+ * @param destURI
+ * @param gssCred
+ * @throws ServerException
+ * @throws IOException
+ */
+ public void makeDir(URI destURI, GSSCredential gssCred) throws ToolsException {
+ GridFTPClient destClient = null;
+ GridFTPContactInfo destHost = new GridFTPContactInfo(destURI.getHost(), destURI.getPort());
+ try {
+
+ String destPath = destURI.getPath();
+ log.info(("Creating Directory = " + destHost + "=" + destPath));
+
+ destClient = new GridFTPClient(destHost.hostName, destHost.port);
+
+ int tryCount = 0;
+ while (true) {
+ try {
+ destClient.setAuthorization(new HostAuthorization(GridFtp.HOST));
+ destClient.authenticate(gssCred);
+ destClient.setDataChannelAuthentication(DataChannelAuthentication.SELF);
+
+ if (!destClient.exists(destPath)) {
+ destClient.makeDir(destPath);
+ }
+ break;
+ } catch (ServerException e) {
+ tryCount++;
+ if (tryCount >= 3) {
+ throw new ToolsException(e.getMessage(), e);
+ }
+ Thread.sleep(10000);
+ } catch (IOException e) {
+ tryCount++;
+ if (tryCount >= 3) {
+ throw new ToolsException(e.getMessage(), e);
+ }
+ Thread.sleep(10000);
+ }
+ }
+ } catch (ServerException e) {
+ throw new ToolsException("Cannot Create GridFTP Client to:" + destHost.toString(), e);
+ } catch (IOException e) {
+ throw new ToolsException("Cannot Create GridFTP Client to:" + destHost.toString(), e);
+ } catch (InterruptedException e) {
+ throw new ToolsException("Internal Error cannot sleep", e);
+ } finally {
+ if (destClient != null) {
+ try {
+ destClient.close();
+ } catch (Exception e) {
+ log.warn("Cannot close GridFTP client connection",e);
+ }
+ }
+ }
+ }
+
+ /**
+ * Upload file from stream
+ *
+ * @param destURI
+ * @param gsCredential
+ * @param io
+ * @throws GFacException
+ */
+ public void uploadFile(URI destURI, GSSCredential gsCredential, InputStream io) throws ToolsException {
+ GridFTPClient ftpClient = null;
+ GridFTPContactInfo contactInfo = new GridFTPContactInfo(destURI.getHost(), destURI.getPort());
+
+ try {
+
+ String remoteFile = destURI.getPath();
+ log.info("The remote file is " + remoteFile);
+
+ log.debug("Setup GridFTP Client");
+
+ ftpClient = new GridFTPClient(contactInfo.hostName, contactInfo.port);
+ ftpClient.setAuthorization(new HostAuthorization(GridFtp.HOST));
+ ftpClient.authenticate(gsCredential);
+ ftpClient.setDataChannelAuthentication(DataChannelAuthentication.SELF);
+
+ log.info("Uploading file");
+ if (checkBinaryExtensions(remoteFile)) {
+ log.debug("Transfer mode is set to Binary for a file upload");
+ ftpClient.setType(Session.TYPE_IMAGE);
+ }
+
+ ftpClient.put(remoteFile, new DataSourceStream(io), new MarkerListener() {
+ public void markerArrived(Marker marker) {
+ }
+ });
+
+ log.info("Upload file to:" + remoteFile + " is done");
+
+ } catch (ServerException e) {
+ throw new ToolsException("Cannot upload file to GridFTP:" + contactInfo.toString(), e);
+ } catch (IOException e) {
+ throw new ToolsException("Cannot upload file to GridFTP:" + contactInfo.toString(), e);
+ } catch (ClientException e) {
+ throw new ToolsException("Cannot upload file to GridFTP:" + contactInfo.toString(), e);
+ } finally {
+ if (ftpClient != null) {
+ try {
+ ftpClient.close();
+ } catch (Exception e) {
+ log.warn("Cannot close GridFTP client connection",e);
+ }
+ }
+ }
+ }
+
+ public void uploadFile(URI srcURI, URI destURI, GSSCredential gsCredential) throws ToolsException {
+ GridFTPClient srcClient = null;
+ GridFTPContactInfo destContactInfo = new GridFTPContactInfo(destURI.getHost(), destURI.getPort());
+ GridFTPContactInfo srcContactInfo = new GridFTPContactInfo(srcURI.getHost(),srcURI.getPort());
+ try {
+ String remoteFile = destURI.getPath();
+ log.info("The remote file is " + remoteFile);
+ log.debug("Setup GridFTP Client");
+ srcClient = new GridFTPClient(srcContactInfo.hostName, srcContactInfo.port);
+ srcClient.setAuthorization(new HostAuthorization(GridFtp.HOST));
+ srcClient.authenticate(gsCredential);
+ srcClient.setDataChannelAuthentication(DataChannelAuthentication.SELF);
+
+ GridFTPClient destClient = new GridFTPClient(destContactInfo.hostName, destContactInfo.port);
+ destClient.setAuthorization(new HostAuthorization(GridFtp.HOST));
+ destClient.authenticate(gsCredential);
+ destClient.setDataChannelAuthentication(DataChannelAuthentication.SELF);
+ log.debug("Uploading file");
+ if (checkBinaryExtensions(remoteFile)) {
+ log.debug("Transfer mode is set to Binary for a file upload");
+ srcClient.setType(Session.TYPE_IMAGE);
+ }
+
+ srcClient.transfer(srcURI.getPath(),destClient, remoteFile, false, null);
+
+ log.info("Upload file to:" + remoteFile + " is done");
+
+ } catch (ServerException e) {
+ throw new ToolsException("Cannot upload file to GridFTP:" + destContactInfo.toString(), e);
+ } catch (IOException e) {
+ throw new ToolsException("Cannot upload file to GridFTP:" + destContactInfo.toString(), e);
+ } catch (ClientException e) {
+ throw new ToolsException("Cannot upload file to GridFTP:" + destContactInfo.toString(), e);
+ } finally {
+ if (srcClient != null) {
+ try {
+ srcClient.close();
+ } catch (Exception e) {
+ log.warn("Cannot close GridFTP client connection",e);
+ }
+ }
+ }
+ }
+
+ /**
+ * Upload file to remote location
+ *
+ * @param destURI
+ * @param gsCredential
+ * @param localFile
+ * @throws GFacException
+ */
+ public void uploadFile(URI destURI, GSSCredential gsCredential, File localFile) throws ToolsException {
+ GridFTPClient ftpClient = null;
+ GridFTPContactInfo contactInfo = new GridFTPContactInfo(destURI.getHost(), destURI.getPort());
+ try {
+
+ String remoteFile = destURI.getPath();
+
+ log.info("The local temp file is " + localFile);
+ log.info("the remote file is " + remoteFile);
+
+ log.debug("Setup GridFTP Client");
+
+ ftpClient = new GridFTPClient(contactInfo.hostName, contactInfo.port);
+ ftpClient.setAuthorization(new HostAuthorization(GridFtp.HOST));
+ ftpClient.authenticate(gsCredential);
+ ftpClient.setDataChannelAuthentication(DataChannelAuthentication.SELF);
+
+ log.debug("Uploading file");
+ if (checkBinaryExtensions(remoteFile)) {
+ log.debug("Transfer mode is set to Binary for a file upload");
+ ftpClient.setType(Session.TYPE_IMAGE);
+ }
+
+
+ ftpClient.put(localFile, remoteFile, false);
+
+ log.info("Upload file to:" + remoteFile + " is done");
+
+ } catch (ServerException e) {
+ throw new ToolsException("Cannot upload file to GridFTP:" + contactInfo.toString(), e);
+ } catch (IOException e) {
+ throw new ToolsException("Cannot upload file to GridFTP:" + contactInfo.toString(), e);
+ } catch (ClientException e) {
+ throw new ToolsException("Cannot upload file to GridFTP:" + contactInfo.toString(), e);
+ } finally {
+ if (ftpClient != null) {
+ try {
+ ftpClient.close();
+ } catch (Exception e) {
+ log.warn("Cannot close GridFTP client connection",e);
+ }
+ }
+ }
+ }
+
+ /**
+ * Download File from remote location
+ *
+ * @param destURI
+ * @param gsCredential
+ * @param localFile
+ * @throws GFacException
+ */
+ public void downloadFile(URI destURI, GSSCredential gsCredential, File localFile) throws ToolsException {
+ GridFTPClient ftpClient = null;
+ GridFTPContactInfo contactInfo = new GridFTPContactInfo(destURI.getHost(), destURI.getPort());
+ try {
+ String remoteFile = destURI.getPath();
+
+ log.info("The local temp file is " + localFile);
+ log.info("the remote file is " + remoteFile);
+
+ log.debug("Setup GridFTP Client");
+
+ ftpClient = new GridFTPClient(contactInfo.hostName, contactInfo.port);
+ ftpClient.setAuthorization(new HostAuthorization(GridFtp.HOST));
+ ftpClient.authenticate(gsCredential);
+ ftpClient.setDataChannelAuthentication(DataChannelAuthentication.SELF);
+
+ log.debug("Downloading file");
+ if (checkBinaryExtensions(remoteFile)) {
+ log.debug("Transfer mode is set to Binary to download a file");
+ ftpClient.setType(Session.TYPE_IMAGE);
+ }
+
+ ftpClient.get(remoteFile, localFile);
+
+ log.info("Download file to:" + localFile + " is done");
+
+ } catch (ServerException e) {
+ throw new ToolsException("Cannot download file from GridFTP:" + contactInfo.toString(), e);
+ } catch (IOException e) {
+ throw new ToolsException("Cannot download file from GridFTP:" + contactInfo.toString(), e);
+ } catch (ClientException e) {
+ throw new ToolsException("Cannot download file from GridFTP:" + contactInfo.toString(), e);
+ } finally {
+ if (ftpClient != null) {
+ try {
+ //ftpClient.close();
+ ftpClient.close(false);
+ } catch (Exception e) {
+ log.warn("Cannot close GridFTP client connection",e);
+ }
+ }
+ }
+ }
+
+ /**
+ * Stream remote file
+ *
+ * @param destURI
+ * @param gsCredential
+ * @param localFile
+ * @return
+ * @throws GFacException
+ */
+ public String readRemoteFile(URI destURI, GSSCredential gsCredential, File localFile) throws ToolsException {
+ BufferedReader instream = null;
+ File localTempfile = null;
+ try {
+
+ if (localFile == null) {
+ localTempfile = File.createTempFile("stderr", "err");
+ } else {
+ localTempfile = localFile;
+ }
+
+ log.info("Local temporary file:" + localTempfile);
+
+ downloadFile(destURI, gsCredential, localTempfile);
+
+ instream = new BufferedReader(new FileReader(localTempfile));
+ StringBuffer buff = new StringBuffer();
+ String temp = null;
+ while ((temp = instream.readLine()) != null) {
+ buff.append(temp);
+ buff.append(Constants.NEWLINE);
+ }
+
+ log.info("finish read file:" + localTempfile);
+
+ return buff.toString();
+ } catch (FileNotFoundException e) {
+ throw new ToolsException("Cannot read localfile file:" + localTempfile, e);
+ } catch (IOException e) {
+ throw new ToolsException("Cannot read localfile file:" + localTempfile, e);
+ } finally {
+ if (instream != null) {
+ try {
+ instream.close();
+ } catch (Exception e) {
+ log.warn("Cannot close GridFTP client connection",e);
+ }
+ }
+ }
+ }
+
+ /**
+ * Transfer data from one GridFTp Endpoint to another GridFTP Endpoint
+ *
+ * @param srchost
+ * @param desthost
+ * @param gssCred
+ * @param srcActive
+ * @throws ServerException
+ * @throws ClientException
+ * @throws IOException
+ */
+ public void transfer(URI srchost, URI desthost, GSSCredential gssCred, boolean srcActive) throws ToolsException {
+ GridFTPClient destClient = null;
+ GridFTPClient srcClient = null;
+
+ try {
+ destClient = new GridFTPClient(desthost.getHost(), desthost.getPort());
+ destClient.setAuthorization(new HostAuthorization(GridFtp.HOST));
+ destClient.authenticate(gssCred);
+
+ if (checkBinaryExtensions(desthost.getPath())) {
+ log.debug("Transfer mode is set to Binary");
+ destClient.setType(Session.TYPE_IMAGE);
+ }
+
+ srcClient = new GridFTPClient(srchost.getHost(), srchost.getPort());
+ srcClient.setAuthorization(new HostAuthorization(GridFtp.HOST));
+ srcClient.authenticate(gssCred);
+
+ if (checkBinaryExtensions(srchost.getPath())) {
+ log.debug("Transfer mode is set to Binary");
+ srcClient.setType(Session.TYPE_IMAGE);
+ }
+
+ if (srcActive) {
+ log.debug("Set src active");
+ HostPort hp = destClient.setPassive();
+ srcClient.setActive(hp);
+ } else {
+ log.debug("Set dst active");
+ HostPort hp = srcClient.setPassive();
+ destClient.setActive(hp);
+ }
+
+ log.debug("Start transfer file from GridFTP:" + srchost.toString() + " to " + desthost.toString());
+
+ /**
+ * Transfer a file. The transfer() function blocks until the transfer is complete.
+ */
+ srcClient.transfer(srchost.getPath(), destClient, desthost.getPath(), false, null);
+ if (srcClient.getSize(srchost.getPath()) == destClient.getSize(desthost.getPath())) {
+ log.debug("CHECK SUM OK");
+ } else {
+ log.debug("****CHECK SUM FAILED****");
+ }
+
+ } catch (ServerException e) {
+ throw new ToolsException("Cannot transfer file from GridFTP:" + srchost.toString() + " to "
+ + desthost.toString(), e);
+ } catch (IOException e) {
+ throw new ToolsException("Cannot transfer file from GridFTP:" + srchost.toString() + " to "
+ + desthost.toString(), e);
+ } catch (ClientException e) {
+ throw new ToolsException("Cannot transfer file from GridFTP:" + srchost.toString() + " to "
+ + desthost.toString(), e);
+ } finally {
+ if (destClient != null) {
+ try {
+ destClient.close();
+ } catch (Exception e) {
+ log.warn("Cannot close GridFTP client connection at Desitnation:" + desthost.toString());
+ }
+ }
+ if (srcClient != null) {
+ try {
+ srcClient.close();
+ } catch (Exception e) {
+ log.warn("Cannot close GridFTP client connection at Source:" + srchost.toString(),e);
+ }
+ }
+ }
+ }
+
+ /**
+ * List files in a GridFTP directory
+ * @param dirURI
+ * @param gssCred
+ * @return
+ * @throws ToolsException
+ */
+ @SuppressWarnings("unchecked")
+ public List<String> listDir(URI dirURI, GSSCredential gssCred) throws ToolsException {
+ List<String> files = new ArrayList<String>();
+ GridFTPClient srcClient = null;
+ try {
+ GridFTPContactInfo contactInfo = new GridFTPContactInfo(dirURI.getHost(), dirURI.getPort());
+
+ srcClient = new GridFTPClient(contactInfo.hostName, contactInfo.port);
+ srcClient.setAuthorization(new HostAuthorization(GridFtp.HOST));
+ srcClient.authenticate(gssCred);
+ srcClient.setDataChannelAuthentication(DataChannelAuthentication.SELF);
+ srcClient.setType(Session.TYPE_ASCII);
+ srcClient.changeDir(dirURI.getPath());
+
+ Vector<Object> fileInfo = null;
+ try {
+ fileInfo = srcClient.mlsd();
+ } catch (Throwable e) {
+ fileInfo = srcClient.list();
+ }
+
+ if (!fileInfo.isEmpty()) {
+ for (int j = 0; j < fileInfo.size(); ++j) {
+ String name = null;
+ if (fileInfo.get(j) instanceof MlsxEntry) {
+ name = ((MlsxEntry) fileInfo.get(j)).getFileName();
+ } else if (fileInfo.get(j) instanceof FileInfo) {
+ name = ((FileInfo) fileInfo.get(j)).getName();
+ } else {
+ throw new ToolsException("Unsupported type returned by gridftp " + fileInfo.get(j));
+ }
+
+ if (!name.equals(".") && !name.equals("..")) {
+ URI uri = GramProviderUtils.createGsiftpURI(contactInfo.hostName, dirURI.getPath() + File.separator + name);
+ files.add(uri.getPath());
+ }
+ }
+ }
+ return files;
+ } catch (IOException e) {
+ throw new ToolsException("Could not list directory: " + dirURI.toString() ,e);
+ } catch (ServerException e) {
+ throw new ToolsException("Could not list directory: " + dirURI.toString() ,e);
+ } catch (ClientException e) {
+ throw new ToolsException("Could not list directory: " + dirURI.toString() ,e);
+ } catch (URISyntaxException e) {
+ throw new ToolsException("Error creating URL of listed files: " + dirURI.toString() ,e);
+ } finally {
+ if (srcClient != null) {
+ try {
+ srcClient.close();
+ } catch (Exception e) {
+ log.warn("Cannot close GridFTP client connection", e);
+ }
+ }
+ }
+ }
+ /**
+ * Method to check file extension as binary to set transfer type
+ * @param filePath
+ * @return
+ */
+ private static boolean checkBinaryExtensions(String filePath){
+ String extension = filePath.substring(filePath.lastIndexOf(".")+1,filePath.length());
+ Set<String> extensions = new HashSet<String>(Arrays.asList(new String[] {"tar","zip","gz","tgz"}));
+ if(extensions.contains(extension)){
+ return true;
+ }else{
+ return false;
+ }
+
+ }
+
+
+
+
+ public String gridFTPFileExist(URI inputDirectory,String fileName,GSSCredential gssCred) throws ToolsException {
+ List<String> strings = listDir(inputDirectory, gssCred);
+ for(String fileExist:strings){
+ if(fileName.equals(fileExist)) {
+ fileName = "duplicate_" + fileName;
+ return fileName;
+ }
+ }
+ return fileName;
+ }
+}
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/handler/GramDirectorySetupHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/handler/GramDirectorySetupHandler.java b/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/handler/GramDirectorySetupHandler.java
new file mode 100644
index 0000000..feadd72
--- /dev/null
+++ b/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/handler/GramDirectorySetupHandler.java
@@ -0,0 +1,135 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+*/
+package org.apache.airavata.gfac.handler;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.Map;
+
+import org.apache.airavata.common.exception.ApplicationSettingsException;
+import org.apache.airavata.commons.gfac.type.ApplicationDescription;
+import org.apache.airavata.gfac.GFacException;
+import org.apache.airavata.gfac.context.JobExecutionContext;
+import org.apache.airavata.gfac.context.security.GSISecurityContext;
+import org.apache.airavata.gfac.external.GridFtp;
+import org.apache.airavata.gfac.util.GramProviderUtils;
+import org.apache.airavata.gfac.utils.GFacUtils;
+import org.apache.airavata.model.workspace.experiment.CorrectiveAction;
+import org.apache.airavata.model.workspace.experiment.DataTransferDetails;
+import org.apache.airavata.model.workspace.experiment.ErrorCategory;
+import org.apache.airavata.model.workspace.experiment.TransferState;
+import org.apache.airavata.model.workspace.experiment.TransferStatus;
+import org.apache.airavata.registry.cpi.ChildDataType;
+import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
+import org.apache.airavata.schemas.gfac.GlobusHostType;
+import org.apache.airavata.schemas.gfac.HostDescriptionType;
+import org.apache.airavata.schemas.gfac.UnicoreHostType;
+import org.ietf.jgss.GSSCredential;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class GramDirectorySetupHandler extends AbstractHandler {
+ private static final Logger log = LoggerFactory.getLogger(GramDirectorySetupHandler.class);
+
+ public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException,GFacException {
+ log.info("Invoking GramDirectorySetupHandler ...");
+ super.invoke(jobExecutionContext);
+ String[] gridFTPEndpointArray = null;
+
+ //TODO: why it is tightly coupled with gridftp
+// GlobusHostType host = (GlobusHostType) jobExecutionContext.getApplicationContext().getHostDescription().getType();
+
+ //TODO: make it more reusable
+ HostDescriptionType hostType = jobExecutionContext.getApplicationContext().getHostDescription().getType();
+
+
+
+ if(hostType instanceof GlobusHostType){
+ gridFTPEndpointArray = ((GlobusHostType) hostType).getGridFTPEndPointArray();
+ }
+ else if (hostType instanceof UnicoreHostType){
+ gridFTPEndpointArray = ((UnicoreHostType) hostType).getGridFTPEndPointArray();
+ }
+
+
+
+ ApplicationDescription applicationDeploymentDescription = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription();
+ ApplicationDeploymentDescriptionType app = applicationDeploymentDescription.getType();
+ GridFtp ftp = new GridFtp();
+
+ try {
+
+ GSSCredential gssCred = ((GSISecurityContext)jobExecutionContext.
+ getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT)).getGssCredentials();
+
+ if (gridFTPEndpointArray == null || gridFTPEndpointArray.length == 0) {
+ gridFTPEndpointArray = new String[]{hostType.getHostAddress()};
+ }
+ boolean success = false;
+ GFacHandlerException pe = null;// = new ProviderException("");
+ for (String endpoint : gridFTPEndpointArray) {
+ try {
+
+ URI tmpdirURI = GramProviderUtils.createGsiftpURI(endpoint, app.getScratchWorkingDirectory());
+ URI workingDirURI = GramProviderUtils.createGsiftpURI(endpoint, app.getStaticWorkingDirectory());
+ URI inputURI = GramProviderUtils.createGsiftpURI(endpoint, app.getInputDataDirectory());
+ URI outputURI = GramProviderUtils.createGsiftpURI(endpoint, app.getOutputDataDirectory());
+
+ log.info("Host FTP = " + gridFTPEndpointArray[0]);
+ log.info("temp directory = " + tmpdirURI);
+ log.info("Working directory = " + workingDirURI);
+ log.info("Input directory = " + inputURI);
+ log.info("Output directory = " + outputURI);
+ ftp.makeDir(tmpdirURI, gssCred);
+ ftp.makeDir(workingDirURI, gssCred);
+ ftp.makeDir(inputURI, gssCred);
+ ftp.makeDir(outputURI, gssCred);
+ success = true;
+ DataTransferDetails detail = new DataTransferDetails();
+ TransferStatus status = new TransferStatus();
+ status.setTransferState(TransferState.DIRECTORY_SETUP);
+ detail.setTransferStatus(status);
+ detail.setTransferDescription("Working directory = " + workingDirURI);
+ registry.add(ChildDataType.DATA_TRANSFER_DETAIL,detail, jobExecutionContext.getTaskData().getTaskID());
+
+ break;
+ } catch (URISyntaxException e) {
+ pe = new GFacHandlerException("URI is malformatted:" + e.getMessage(), e);
+
+ } catch (Exception e) {
+ pe = new GFacHandlerException(e.getMessage(), e);
+ }
+ }
+ if (success == false) {
+ GFacUtils.saveErrorDetails(pe.getLocalizedMessage(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.FILE_SYSTEM_FAILURE, jobExecutionContext.getTaskData().getTaskID());
+ throw pe;
+ }
+ } catch (SecurityException e) {
+ throw new GFacHandlerException(e.getMessage(), e);
+ } catch (ApplicationSettingsException e1) {
+ throw new GFacHandlerException(e1.getMessage(), e1);
+ }
+ }
+
+ public void initProperties(Map<String, String> properties) throws GFacHandlerException, GFacException {
+
+ }
+}
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/handler/GridFTPInputHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/handler/GridFTPInputHandler.java b/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/handler/GridFTPInputHandler.java
new file mode 100644
index 0000000..4b1beab
--- /dev/null
+++ b/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/handler/GridFTPInputHandler.java
@@ -0,0 +1,204 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+*/
+package org.apache.airavata.gfac.handler;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.airavata.common.exception.ApplicationSettingsException;
+import org.apache.airavata.common.utils.StringUtil;
+import org.apache.airavata.commons.gfac.type.ActualParameter;
+import org.apache.airavata.commons.gfac.type.MappingFactory;
+import org.apache.airavata.gfac.GFacException;
+import org.apache.airavata.gfac.ToolsException;
+import org.apache.airavata.gfac.context.JobExecutionContext;
+import org.apache.airavata.gfac.context.MessageContext;
+import org.apache.airavata.gfac.context.security.GSISecurityContext;
+import org.apache.airavata.gfac.external.GridFtp;
+import org.apache.airavata.gfac.util.GramProviderUtils;
+import org.apache.airavata.gfac.utils.GFacUtils;
+import org.apache.airavata.model.workspace.experiment.CorrectiveAction;
+import org.apache.airavata.model.workspace.experiment.DataTransferDetails;
+import org.apache.airavata.model.workspace.experiment.ErrorCategory;
+import org.apache.airavata.model.workspace.experiment.TransferState;
+import org.apache.airavata.model.workspace.experiment.TransferStatus;
+import org.apache.airavata.registry.cpi.ChildDataType;
+import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
+import org.apache.airavata.schemas.gfac.GlobusHostType;
+import org.apache.airavata.schemas.gfac.HostDescriptionType;
+import org.apache.airavata.schemas.gfac.URIArrayType;
+import org.apache.airavata.schemas.gfac.URIParameterType;
+import org.apache.airavata.schemas.gfac.UnicoreHostType;
+import org.ietf.jgss.GSSCredential;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class GridFTPInputHandler extends AbstractHandler {
+ private static final Logger log = LoggerFactory.getLogger(AppDescriptorCheckHandler.class);
+
+ public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException,GFacException {
+ log.info("Invoking GridFTPInputHandler ...");
+ super.invoke(jobExecutionContext);
+ DataTransferDetails detail = new DataTransferDetails();
+ TransferStatus status = new TransferStatus();
+
+ MessageContext inputNew = new MessageContext();
+ try {
+ MessageContext input = jobExecutionContext.getInMessageContext();
+ Set<String> parameters = input.getParameters().keySet();
+ for (String paramName : parameters) {
+ ActualParameter actualParameter = (ActualParameter) input.getParameters().get(paramName);
+ String paramValue = MappingFactory.toString(actualParameter);
+ //TODO: Review this with type
+ if ("URI".equals(actualParameter.getType().getType().toString())) {
+ ((URIParameterType) actualParameter.getType()).setValue(stageInputFiles(jobExecutionContext, paramValue));
+ } else if ("URIArray".equals(actualParameter.getType().getType().toString())) {
+ List<String> split = Arrays.asList(StringUtil.getElementsFromString(paramValue));
+ List<String> newFiles = new ArrayList<String>();
+ for (String paramValueEach : split) {
+ String stageInputFiles = stageInputFiles(jobExecutionContext, paramValueEach);
+ detail.setTransferDescription("Input Data Staged: " + stageInputFiles);
+ status.setTransferState(TransferState.UPLOAD);
+ detail.setTransferStatus(status);
+ registry.add(ChildDataType.DATA_TRANSFER_DETAIL,detail, jobExecutionContext.getTaskData().getTaskID());
+
+ newFiles.add(stageInputFiles);
+ }
+ ((URIArrayType) actualParameter.getType()).setValueArray(newFiles.toArray(new String[newFiles.size()]));
+ }
+ inputNew.getParameters().put(paramName, actualParameter);
+
+ }
+ } catch (Exception e) {
+ try {
+ status.setTransferState(TransferState.FAILED);
+ detail.setTransferStatus(status);
+ registry.add(ChildDataType.DATA_TRANSFER_DETAIL,detail, jobExecutionContext.getTaskData().getTaskID());
+ GFacUtils.saveErrorDetails(e.getLocalizedMessage(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.FILE_SYSTEM_FAILURE, jobExecutionContext.getTaskData().getTaskID());
+ } catch (Exception e1) {
+ throw new GFacHandlerException("Error persisting status", e1, e1.getLocalizedMessage());
+ }
+ log.error(e.getMessage());
+ throw new GFacHandlerException("Error while input File Staging", e, e.getLocalizedMessage());
+ }
+ jobExecutionContext.setInMessageContext(inputNew);
+ }
+
+ private static String stageInputFiles(JobExecutionContext jobExecutionContext, String paramValue) throws URISyntaxException, SecurityException, ToolsException, IOException,GFacException, ApplicationSettingsException {
+ URI gridftpURL = new URI(paramValue);
+
+ String[] gridFTPEndpointArray = null;
+
+ // not to download input files to the input dir if its http / gsiftp
+ // but if local then yes
+ boolean isInputNonLocal = true;
+
+ //TODO: why it is tightly coupled with gridftp
+// GlobusHostType host = (GlobusHostType) jobExecutionContext.getApplicationContext().getHostDescription().getType();
+
+ //TODO: make it more reusable
+ HostDescriptionType hostType = jobExecutionContext.getApplicationContext().getHostDescription().getType();
+
+ if(jobExecutionContext.getApplicationContext().getHostDescription().getType() instanceof GlobusHostType){
+ gridFTPEndpointArray = ((GlobusHostType) hostType).getGridFTPEndPointArray();
+ }
+ else if (jobExecutionContext.getApplicationContext().getHostDescription().getType() instanceof UnicoreHostType){
+ gridFTPEndpointArray = ((UnicoreHostType) hostType).getGridFTPEndPointArray();
+ isInputNonLocal = false;
+ }
+ else {
+ //TODO
+ }
+
+
+ ApplicationDeploymentDescriptionType app = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription().getType();
+ GridFtp ftp = new GridFtp();
+ URI destURI = null;
+ GSSCredential gssCred = ((GSISecurityContext)jobExecutionContext.getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT)).getGssCredentials();
+
+ for (String endpoint : gridFTPEndpointArray) {
+ URI inputURI = GramProviderUtils.createGsiftpURI(endpoint, app.getInputDataDirectory());
+ String fileName = new File(gridftpURL.getPath()).getName();
+ fileName = ftp.gridFTPFileExist(inputURI, fileName,gssCred);
+
+ String destLocalPath = inputURI.getPath() + File.separator + fileName;
+ //if user give a url just to refer an endpoint, not a web resource we are not doing any transfer
+ if (fileName != null && !"".equals(fileName)) {
+ destURI = GramProviderUtils.createGsiftpURI(endpoint, destLocalPath);
+ if (paramValue.startsWith("gsiftp")) {
+ // no need to do if it is unicore, as unicore will download this on user's behalf to the job space dir
+ if(isInputNonLocal) ftp.uploadFile(gridftpURL, destURI, gssCred);
+ else return paramValue;
+ } else if (paramValue.startsWith("file")) {
+ String localFile = paramValue.substring(paramValue.indexOf(":") + 1, paramValue.length());
+ FileInputStream fis = null;
+ try {
+ fis = new FileInputStream(localFile);
+ ftp.uploadFile(destURI, gssCred, fis);
+ } catch (IOException e) {
+ throw new GFacException("Unable to create file : " + localFile ,e);
+ } finally {
+ if (fis != null) {
+ fis.close();
+ }
+ }
+ } else if (paramValue.startsWith("http")) {
+ // no need to do if it is unicore
+ if(isInputNonLocal) {
+ InputStream is = null;
+ try {
+ is = gridftpURL.toURL().openStream();
+ ftp.uploadFile(destURI, gssCred, (is));
+ }finally {
+ is.close();
+ }
+ } else {
+ // don't return destUri
+ return paramValue;
+ }
+
+ } else {
+ //todo throw exception telling unsupported protocol
+ return paramValue;
+ }
+ } else {
+ // When the given input is not a web resource but a URI type input, then we don't do any transfer just keep the same value as it isin the input
+ return paramValue;
+ }
+ }
+ return destURI.getPath();
+ }
+
+ public void initProperties(Map<String, String> properties) throws GFacHandlerException, GFacException {
+
+ }
+
+
+}
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/handler/GridFTPOutputHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/handler/GridFTPOutputHandler.java b/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/handler/GridFTPOutputHandler.java
new file mode 100644
index 0000000..e0cb0f8
--- /dev/null
+++ b/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/handler/GridFTPOutputHandler.java
@@ -0,0 +1,347 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+*/
+package org.apache.airavata.gfac.handler;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.airavata.common.exception.ApplicationSettingsException;
+import org.apache.airavata.common.utils.StringUtil;
+import org.apache.airavata.commons.gfac.type.ActualParameter;
+import org.apache.airavata.commons.gfac.type.ApplicationDescription;
+import org.apache.airavata.commons.gfac.type.MappingFactory;
+import org.apache.airavata.gfac.GFacException;
+import org.apache.airavata.gfac.ToolsException;
+import org.apache.airavata.gfac.context.JobExecutionContext;
+import org.apache.airavata.gfac.context.MessageContext;
+import org.apache.airavata.gfac.context.security.GSISecurityContext;
+import org.apache.airavata.gfac.external.GridFtp;
+import org.apache.airavata.gfac.provider.GFacProviderException;
+import org.apache.airavata.gfac.util.GramProviderUtils;
+import org.apache.airavata.gfac.utils.GFacUtils;
+import org.apache.airavata.gfac.utils.OutputUtils;
+import org.apache.airavata.model.workspace.experiment.CorrectiveAction;
+import org.apache.airavata.model.workspace.experiment.DataTransferDetails;
+import org.apache.airavata.model.workspace.experiment.ErrorCategory;
+import org.apache.airavata.model.workspace.experiment.TaskDetails;
+import org.apache.airavata.model.workspace.experiment.TransferState;
+import org.apache.airavata.model.workspace.experiment.TransferStatus;
+import org.apache.airavata.registry.cpi.ChildDataType;
+import org.apache.airavata.registry.cpi.Registry;
+import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
+import org.apache.airavata.schemas.gfac.GlobusHostType;
+import org.apache.airavata.schemas.gfac.HostDescriptionType;
+import org.apache.airavata.schemas.gfac.StringArrayType;
+import org.apache.airavata.schemas.gfac.URIArrayType;
+import org.apache.airavata.schemas.gfac.URIParameterType;
+import org.apache.airavata.schemas.gfac.UnicoreHostType;
+import org.ietf.jgss.GSSCredential;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+
+public class GridFTPOutputHandler extends AbstractHandler {
+ private static final Logger log = LoggerFactory.getLogger(GridFTPOutputHandler.class);
+ private Registry registry;
+
+
+ public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException,GFacException {
+ log.info("Invoking GridFTPOutputHandler ...");
+ super.invoke(jobExecutionContext);
+
+ ApplicationDeploymentDescriptionType app = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription().getType();
+
+ HostDescriptionType hostType = jobExecutionContext.getApplicationContext().getHostDescription().getType();
+ String[] gridFTPEndpointArray = null;
+ String hostName = null;
+
+ if(jobExecutionContext.getApplicationContext().getHostDescription().getType() instanceof GlobusHostType){
+ gridFTPEndpointArray = ((GlobusHostType) hostType).getGridFTPEndPointArray();
+ hostName = ((GlobusHostType) hostType).getHostName();
+
+ }
+ else if (jobExecutionContext.getApplicationContext().getHostDescription().getType() instanceof UnicoreHostType){
+ gridFTPEndpointArray = ((UnicoreHostType) hostType).getGridFTPEndPointArray();
+ hostName = ((UnicoreHostType) hostType).getHostName();
+ }
+ else {
+ //TODO
+ }
+
+ GridFtp ftp = new GridFtp();
+ File localStdErrFile = null;
+ Map<String, ActualParameter> stringMap = new HashMap<String, ActualParameter>();
+ DataTransferDetails detail = new DataTransferDetails();
+ TransferStatus status = new TransferStatus();
+
+ try {
+ GSSCredential gssCred = ((GSISecurityContext)jobExecutionContext.getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT)).getGssCredentials();
+ String[] hostgridFTP = gridFTPEndpointArray;
+ if (hostgridFTP == null || hostgridFTP.length == 0) {
+ hostgridFTP = new String[]{hostName};
+ }
+ for (String endpoint : gridFTPEndpointArray) {
+ try {
+ /*
+ * Read Stdout and Stderror
+ */
+ URI stdoutURI = GramProviderUtils.createGsiftpURI(endpoint, app.getStandardOutput());
+ URI stderrURI = GramProviderUtils.createGsiftpURI(endpoint, app.getStandardError());
+ status.setTransferState(TransferState.COMPLETE);
+ detail.setTransferStatus(status);
+ detail.setTransferDescription("STDOUT:" + stdoutURI.toString());
+ registry.add(ChildDataType.DATA_TRANSFER_DETAIL,detail, jobExecutionContext.getTaskData().getTaskID());
+ status.setTransferState(TransferState.COMPLETE);
+ detail.setTransferStatus(status);
+ detail.setTransferDescription("STDERR:" + stderrURI.toString());
+ registry.add(ChildDataType.DATA_TRANSFER_DETAIL,detail, jobExecutionContext.getTaskData().getTaskID());
+
+ log.info("STDOUT:" + stdoutURI.toString());
+ log.info("STDERR:" + stderrURI.toString());
+
+ File logDir = new File("./service_logs");
+ if (!logDir.exists()) {
+ logDir.mkdir();
+ }
+
+ String timeStampedServiceName = GFacUtils.createUniqueNameForService(jobExecutionContext
+ .getServiceName());
+ File localStdOutFile = File.createTempFile(timeStampedServiceName, "stdout");
+ localStdErrFile = File.createTempFile(timeStampedServiceName, "stderr");
+
+
+ String stdout = null;
+ String stderr = null;
+
+ // TODO: what if job is failed
+ // and this handler is not able to find std* files?
+ try {
+ stdout = ftp.readRemoteFile(stdoutURI, gssCred, localStdOutFile);
+ stderr = ftp.readRemoteFile(stderrURI, gssCred, localStdErrFile);
+ //TODO: do we also need to set them as output parameters for another job
+ ApplicationDescription application = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription();
+ ApplicationDeploymentDescriptionType appDesc = application.getType();
+ appDesc.setStandardOutput(stdout);
+ appDesc.setStandardError(stderr);
+ jobExecutionContext.getApplicationContext().setApplicationDeploymentDescription(application);
+ }
+ catch(ToolsException e) {
+ log.error("Cannot download stdout/err files. One reason could be the job is not successfully finished: "+e.getMessage());
+ }
+
+
+ Map<String, Object> output = jobExecutionContext.getOutMessageContext().getParameters();
+ Set<String> keys = output.keySet();
+ for (String paramName : keys) {
+ ActualParameter actualParameter = (ActualParameter) output.get(paramName);
+ if ("URIArray".equals(actualParameter.getType().getType().toString())) {
+ URI outputURI = GramProviderUtils.createGsiftpURI(endpoint, app.getOutputDataDirectory());
+ List<String> outputList = ftp.listDir(outputURI, gssCred);
+ String[] valueList = outputList.toArray(new String[outputList.size()]);
+ ((URIArrayType) actualParameter.getType()).setValueArray(valueList);
+ // why to instantiate new instance?
+// stringMap = new HashMap<String, ActualParameter>();
+ stringMap.put(paramName, actualParameter);
+ }else if ("StringArray".equals(actualParameter.getType().getType().toString())) {
+ String[] valueList = OutputUtils.parseStdoutArray(stdout, paramName);
+ ((StringArrayType) actualParameter.getType()).setValueArray(valueList);
+// stringMap = new HashMap<String, ActualParameter>();
+ stringMap.put(paramName, actualParameter);
+ } else if ("URI".equals(actualParameter.getType().getType().toString())) {
+ URI outputURI = GramProviderUtils.createGsiftpURI(endpoint, app.getOutputDataDirectory());
+ List<String> outputList = ftp.listDir(outputURI, gssCred);
+ if (outputList.size() == 0 || outputList.get(0).isEmpty()) {
+ stringMap = OutputUtils.fillOutputFromStdout(output, stdout, stderr);
+ } else {
+ String valueList = outputList.get(0);
+ ((URIParameterType) actualParameter.getType()).setValue(valueList);
+ stringMap = new HashMap<String, ActualParameter>();
+ stringMap.put(paramName, actualParameter);
+ }
+ }
+ else {
+ // This is to handle exception during the output parsing.
+ stringMap = OutputUtils.fillOutputFromStdout(output, stdout, stderr);
+ }
+ status.setTransferState(TransferState.DOWNLOAD);
+ detail.setTransferStatus(status);
+ detail.setTransferDescription("Output: " + stringMap.get(paramName).toString());
+ registry.add(ChildDataType.DATA_TRANSFER_DETAIL,detail, jobExecutionContext.getTaskData().getTaskID());
+
+ }
+ if (stringMap == null || stringMap.isEmpty()) {
+ throw new GFacHandlerException("Empty Output returned from the Application, Double check the application" +
+ "and ApplicationDescriptor output Parameter Names");
+ }
+ // If users has given an output Data path to download the output files this will download the file on machine where GFac is installed
+ TaskDetails taskData = jobExecutionContext.getTaskData();
+ if(taskData != null && taskData.getAdvancedOutputDataHandling() != null){
+ String outputDataDirectory = taskData.getAdvancedOutputDataHandling().getOutputDataDir();
+ if(outputDataDirectory != null && !"".equals(outputDataDirectory)){
+ stageOutputFiles(jobExecutionContext,outputDataDirectory);
+ }
+ }
+ } catch (ToolsException e) {
+ log.error(e.getMessage());
+ throw new GFacHandlerException(e.getMessage() + "\n StdError Data: \n" +readLastLinesofStdOut(localStdErrFile.getPath(), 20),e);
+ } catch (URISyntaxException e) {
+ log.error(e.getMessage());
+ throw new GFacHandlerException("URI is malformatted:" + e.getMessage(), e, readLastLinesofStdOut(localStdErrFile.getPath(), 20));
+ }
+ }
+ } catch (Exception e) {
+ try {
+ status.setTransferState(TransferState.FAILED);
+ detail.setTransferStatus(status);
+ registry.add(ChildDataType.DATA_TRANSFER_DETAIL,detail, jobExecutionContext.getTaskData().getTaskID());
+ GFacUtils.saveErrorDetails(e.getLocalizedMessage(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.FILE_SYSTEM_FAILURE, jobExecutionContext.getTaskData().getTaskID());
+ } catch (Exception e1) {
+ throw new GFacHandlerException("Error persisting status", e1, e1.getLocalizedMessage());
+ }
+ log.error(e.getMessage());
+ throw new GFacHandlerException(e.getMessage(), e, readLastLinesofStdOut(localStdErrFile.getPath(), 20));
+ }
+
+ }
+
+ private static String readLastLinesofStdOut(String path, int count) {
+ StringBuffer buffer = new StringBuffer();
+ FileInputStream in = null;
+ try {
+ in = new FileInputStream(path);
+ } catch (FileNotFoundException e) {
+ e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
+ }
+ BufferedReader br = new BufferedReader(new InputStreamReader(in));
+ List<String> strLine = new ArrayList<String>();
+ String tmp = null;
+ int numberofLines = 0;
+ try {
+ while ((tmp = br.readLine()) != null) {
+ strLine.add(tmp);
+ numberofLines++;
+ }
+ } catch (IOException e) {
+ e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
+ }
+ if (numberofLines > count) {
+ for (int i = numberofLines - count; i < numberofLines; i++) {
+ buffer.append(strLine.get(i));
+ buffer.append("\n");
+ }
+ } else {
+ for (int i = 0; i < numberofLines; i++) {
+ buffer.append(strLine.get(i));
+ buffer.append("\n");
+ }
+ }
+ try {
+ in.close();
+ } catch (IOException e) {
+ e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
+ }
+ return buffer.toString();
+ }
+
+ private static void stageOutputFiles(JobExecutionContext jobExecutionContext, String outputFileStagingPath) throws GFacProviderException,GFacException, ApplicationSettingsException {
+
+
+ HostDescriptionType hostType = jobExecutionContext.getApplicationContext().getHostDescription().getType();
+ String[] gridFTPEndpointArray = null;
+
+ if(jobExecutionContext.getApplicationContext().getHostDescription().getType() instanceof GlobusHostType){
+ gridFTPEndpointArray = ((GlobusHostType) hostType).getGridFTPEndPointArray();
+ }
+ else if (jobExecutionContext.getApplicationContext().getHostDescription().getType() instanceof UnicoreHostType){
+ gridFTPEndpointArray = ((UnicoreHostType) hostType).getGridFTPEndPointArray();
+ }
+ else {
+ //TODO
+ }
+
+
+ MessageContext outputNew = new MessageContext();
+ MessageContext output = jobExecutionContext.getOutMessageContext();
+ Map<String, Object> parameters = output.getParameters();
+ for (String paramName : parameters.keySet()) {
+ ActualParameter actualParameter = (ActualParameter) parameters
+ .get(paramName);
+
+ GridFtp ftp = new GridFtp();
+ GSSCredential gssCred = ((GSISecurityContext)jobExecutionContext.getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT)).getGssCredentials();
+ try {
+ if ("URI".equals(actualParameter.getType().getType().toString())) {
+ for (String endpoint : gridFTPEndpointArray) {
+ ((URIParameterType) actualParameter.getType()).setValue(doStaging(outputFileStagingPath,
+ MappingFactory.toString(actualParameter), ftp, gssCred, endpoint));
+ }
+ } else if ("URIArray".equals(actualParameter.getType().getType().toString())) {
+ List<String> split = Arrays.asList(StringUtil.getElementsFromString(MappingFactory.toString(actualParameter)));
+ List<String> newFiles = new ArrayList<String>();
+ for (String endpoint : gridFTPEndpointArray) {
+ for (String paramValueEach : split) {
+ newFiles.add(doStaging(outputFileStagingPath, paramValueEach, ftp, gssCred, endpoint));
+ }
+ ((URIArrayType) actualParameter.getType()).setValueArray(newFiles.toArray(new String[newFiles.size()]));
+ }
+
+ }
+ } catch (URISyntaxException e) {
+ log.error(e.getMessage());
+ throw new GFacProviderException(e.getMessage(), e);
+ } catch (ToolsException e) {
+ log.error(e.getMessage());
+ throw new GFacProviderException(e.getMessage(), e);
+ }
+ outputNew.getParameters().put(paramName, actualParameter);
+ }
+ jobExecutionContext.setOutMessageContext(outputNew);
+ }
+
+ private static String doStaging(String outputFileStagingPath, String paramValue, GridFtp ftp, GSSCredential gssCred, String endpoint) throws URISyntaxException, ToolsException {
+ URI srcURI = GramProviderUtils.createGsiftpURI(endpoint, paramValue);
+ String fileName = new File(srcURI.getPath()).getName();
+ File outputpath = new File(outputFileStagingPath);
+ if(!outputpath.exists()){
+ outputpath.mkdirs();
+ }
+ File outputFile = new File(outputpath.getAbsolutePath() + File.separator + fileName);
+ ftp.readRemoteFile(srcURI,
+ gssCred, outputFile);
+ return outputFileStagingPath + File.separator + fileName;
+ }
+
+ public void initProperties(Map<String, String> properties) throws GFacHandlerException, GFacException {
+
+ }
+}
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/persistence/DBJobPersistenceManager.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/persistence/DBJobPersistenceManager.java b/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/persistence/DBJobPersistenceManager.java
new file mode 100644
index 0000000..3086b95
--- /dev/null
+++ b/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/persistence/DBJobPersistenceManager.java
@@ -0,0 +1,223 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+
+package org.apache.airavata.gfac.persistence;
+
+import org.apache.airavata.common.utils.DBUtil;
+import org.apache.airavata.gfac.GFacException;
+import org.apache.log4j.Logger;
+import org.globus.gram.internal.GRAMConstants;
+
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * User: AmilaJ (amilaj@apache.org)
+ * Date: 6/18/13
+ * Time: 4:16 PM
+ * Database based job persistence manager. Current default implementation.
+ */
+
+public class DBJobPersistenceManager implements JobPersistenceManager {
+
+ private DBUtil dbUtil;
+
+ private static final Logger log = Logger.getLogger(DBJobPersistenceManager.class);
+
+
+ public DBJobPersistenceManager(DBUtil db) {
+ this.dbUtil = db;
+ }
+
+ public synchronized void updateJobStatus(JobData jobData) throws GFacException {
+
+ if (jobData.getState() == GRAMConstants.STATUS_UNSUBMITTED) {
+ insertJob(jobData);
+ } else {
+
+ String sql = "update gram_job set status = ? where job_id = ?";
+
+ Connection connection = null;
+ PreparedStatement stmt = null;
+
+ try {
+ connection = getConnection();
+ stmt = connection.prepareStatement(sql);
+ stmt.setInt(1, jobData.getState());
+ stmt.setString(2, jobData.getJobId());
+
+ stmt.executeUpdate();
+ connection.commit();
+
+ } catch (SQLException e) {
+ throw new GFacException(e);
+ } finally {
+ try {
+ if (stmt != null) {
+ stmt.close();
+ }
+
+ if (connection != null) {
+ connection.close();
+ }
+
+ } catch (SQLException e) {
+ log.error("Error closing streams", e);
+ }
+ }
+ }
+ }
+
+ private void insertJob(JobData jobData) throws GFacException {
+
+ String sql = "insert into gram_job values (?, ?)";
+
+ PreparedStatement stmt = null;
+ Connection connection = null;
+
+ try {
+ connection = getConnection();
+ stmt = connection.prepareStatement(sql);
+ stmt.setString(1, jobData.getJobId());
+ stmt.setInt(2, jobData.getState());
+
+ stmt.executeUpdate();
+ } catch (SQLException e) {
+ throw new GFacException(e);
+ } finally {
+ try {
+ if (stmt != null) {
+ stmt.close();
+ }
+
+ if (connection != null) {
+ connection.close();
+ }
+
+ } catch (SQLException e) {
+ log.error("Error closing streams", e);
+ }
+ }
+
+ }
+
+ public List<JobData> getRunningJobs() throws GFacException {
+
+ String sql = "select * from gram_job where status not in (?, ?, ?)";
+
+ int[] statuses = new int[3];
+ statuses[0] = GRAMConstants.STATUS_UNSUBMITTED;
+ statuses[1] = GRAMConstants.STATUS_DONE;
+ statuses[2] = GRAMConstants.STATUS_FAILED;
+
+ return getJobs(sql, statuses);
+ }
+
+ public List<JobData> getFailedJobs() throws GFacException {
+
+ String sql = "select * from gram_job where status in (?)";
+
+ int[] statuses = new int[1];
+ statuses[0] = GRAMConstants.STATUS_FAILED;
+
+ return getJobs(sql, statuses);
+ }
+
+ public List<JobData> getUnSubmittedJobs() throws GFacException {
+
+ String sql = "select * from gram_job where status in (?)";
+
+ int[] statuses = new int[1];
+ statuses[0] = GRAMConstants.STATUS_UNSUBMITTED;
+
+ return getJobs(sql, statuses);
+ }
+
+ public List<JobData> getSuccessfullyCompletedJobs() throws GFacException {
+
+ String sql = "select * from gram_job where status in (?)";
+
+ int[] statuses = new int[1];
+ statuses[0] = GRAMConstants.STATUS_DONE;
+
+ return getJobs(sql, statuses);
+
+ }
+
+
+ protected List<JobData> getJobs(String sql, int[] statuses) throws GFacException {
+
+ List<JobData> jobs = new ArrayList<JobData>();
+
+ PreparedStatement preparedStatement = null;
+ Connection connection = null;
+
+ try {
+ connection = getConnection();
+ preparedStatement = connection.prepareStatement(sql);
+
+ int index = 1;
+ for (int status : statuses) {
+ preparedStatement.setInt(index, status);
+ ++index;
+ }
+
+ ResultSet resultSet = preparedStatement.executeQuery();
+
+ while (resultSet.next()) {
+
+ String jobId = resultSet.getString("job_id");
+ int state = resultSet.getInt("status");
+
+ jobs.add(new JobData(jobId, state));
+ }
+
+ } catch (SQLException e) {
+ throw new GFacException(e);
+ } finally {
+ try {
+ if (preparedStatement != null) {
+ preparedStatement.close();
+ }
+
+ if (connection != null) {
+ connection.close();
+ }
+
+ } catch (SQLException e) {
+ log.error("Error closing connection", e);
+ }
+ }
+
+ return jobs;
+ }
+
+ private synchronized Connection getConnection() throws SQLException {
+ Connection connection = dbUtil.getConnection();
+ connection.setAutoCommit(true);
+
+ return connection;
+ }
+}
[02/11] creating gfac-bes and gfac-gram out from gfac-core
Posted by la...@apache.org.
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/provider/impl/GramProvider.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/provider/impl/GramProvider.java b/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/provider/impl/GramProvider.java
new file mode 100644
index 0000000..0cf413c
--- /dev/null
+++ b/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/provider/impl/GramProvider.java
@@ -0,0 +1,526 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+*/
+package org.apache.airavata.gfac.provider.impl;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.MalformedURLException;
+import java.util.Map;
+import java.util.MissingResourceException;
+import java.util.Properties;
+import java.util.concurrent.ConcurrentHashMap;
+
+import org.apache.airavata.common.exception.ApplicationSettingsException;
+import org.apache.airavata.common.utils.ServerSettings;
+import org.apache.airavata.gfac.GFacException;
+import org.apache.airavata.gfac.JobSubmissionFault;
+import org.apache.airavata.gfac.context.JobExecutionContext;
+import org.apache.airavata.gfac.context.security.GSISecurityContext;
+import org.apache.airavata.gfac.notification.events.JobIDEvent;
+import org.apache.airavata.gfac.notification.events.StartExecutionEvent;
+import org.apache.airavata.gfac.provider.GFacProviderException;
+import org.apache.airavata.gfac.util.GramProviderUtils;
+import org.apache.airavata.gfac.utils.GFacUtils;
+import org.apache.airavata.gfac.util.GramJobSubmissionListener;
+import org.apache.airavata.model.workspace.experiment.CorrectiveAction;
+import org.apache.airavata.model.workspace.experiment.ErrorCategory;
+import org.apache.airavata.model.workspace.experiment.JobState;
+import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
+import org.apache.airavata.schemas.gfac.GlobusHostType;
+import org.globus.gram.GramException;
+import org.globus.gram.GramJob;
+import org.globus.gram.WaitingForCommitException;
+import org.globus.gram.internal.GRAMConstants;
+import org.globus.gram.internal.GRAMProtocolErrorConstants;
+import org.ietf.jgss.GSSCredential;
+import org.ietf.jgss.GSSException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class GramProvider extends AbstractProvider{
+ private static final Logger log = LoggerFactory.getLogger(GramJobSubmissionListener.class);
+
+ private GramJob job;
+ private GramJobSubmissionListener listener;
+ private boolean twoPhase = true;
+
+ /**
+ * If normal job submission fail due to an authorisation failure or script failure we
+ * will re-attempt to submit the job. In-order to avoid any recursive loop during a continuous
+ * failure we track whether failure paths are tried or not. Following booleans keeps track whether
+ * we already tried a failure path or not.
+ */
+ /**
+ * To track job submissions during a authorisation failure while requesting job.
+ */
+ private boolean renewCredentialsAttempt = false;
+ /**
+ * To track job submission during a script error situation.
+ */
+ private boolean reSubmissionInProgress = false;
+ /**
+ * To track authorisation failures during status monitoring.
+ */
+ private boolean authorisationFailedAttempt = false;
+
+ private static final Map<String, GramJob> currentlyExecutingJobCache
+ = new ConcurrentHashMap<String, GramJob>();
+
+ private static Properties resources;
+
+ static {
+ try {
+
+ String propFileName = "errors.properties";
+ resources = new Properties();
+ InputStream inputStream = GramProvider.class.getClassLoader()
+ .getResourceAsStream(propFileName);
+
+ if (inputStream == null) {
+ throw new FileNotFoundException("property file '" + propFileName
+ + "' not found in the classpath");
+ }
+
+ resources.load(inputStream);
+
+ } catch (FileNotFoundException mre) {
+ log.error("errors.properties not found", mre);
+ } catch (IOException e) {
+ log.error("Error reading errors.properties file", e);
+ }
+ }
+
+
+ // This method prepare the environment before the application invocation.
+ public void initialize(JobExecutionContext jobExecutionContext) throws GFacProviderException, GFacException {
+
+ try {
+ super.initialize(jobExecutionContext);
+ String strTwoPhase = ServerSettings.getSetting("TwoPhase");
+ if (strTwoPhase != null) {
+ twoPhase = Boolean.parseBoolean(strTwoPhase);
+ log.info("Two phase commit is set to " + twoPhase);
+ }
+ } catch (ApplicationSettingsException e) {
+ log.warn("Error reading TwoPhase property from configurations.", e);
+ }
+
+ job = GramProviderUtils.setupEnvironment(jobExecutionContext, twoPhase);
+ listener = new GramJobSubmissionListener(job, jobExecutionContext);
+ job.addListener(listener);
+ }
+
+ public void execute(JobExecutionContext jobExecutionContext) throws GFacProviderException, GFacException{
+ jobExecutionContext.getNotifier().publish(new StartExecutionEvent());
+ GlobusHostType host = (GlobusHostType) jobExecutionContext.getApplicationContext().
+ getHostDescription().getType();
+ ApplicationDeploymentDescriptionType app = jobExecutionContext.getApplicationContext().
+ getApplicationDeploymentDescription().getType();
+
+ StringBuilder stringBuilder = new StringBuilder();
+ try {
+
+ GSSCredential gssCred = ((GSISecurityContext)jobExecutionContext.
+ getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT)).getGssCredentials();
+ job.setCredentials(gssCred);
+ // We do not support multiple gatekeepers in XBaya GUI, so we simply pick the 0th element in the array
+ String gateKeeper = host.getGlobusGateKeeperEndPointArray(0);
+ log.info("Request to contact:" + gateKeeper);
+
+ stringBuilder.append("Finished launching job, Host = ").append(host.getHostAddress()).append(" RSL = ")
+ .append(job.getRSL()).append(" working directory = ").append(app.getStaticWorkingDirectory())
+ .append(" temp directory = ").append(app.getScratchWorkingDirectory())
+ .append(" Globus GateKeeper Endpoint = ").append(gateKeeper);
+
+ log.info(stringBuilder.toString());
+
+ submitJobs(gateKeeper, jobExecutionContext, host);
+
+ } catch (ApplicationSettingsException e) {
+ throw new GFacException(e.getMessage(), e);
+ } finally {
+ if (job != null) {
+ try {
+ /*
+ * Remove listener
+ */
+ job.removeListener(listener);
+ } catch (Exception e) {
+ log.error(e.getMessage());
+ }
+ }
+ }
+ }
+
+ private void submitJobs(String gateKeeper,
+ JobExecutionContext jobExecutionContext,
+ GlobusHostType globusHostType) throws GFacException, GFacProviderException {
+ boolean applicationSaved=false;
+ String taskID = jobExecutionContext.getTaskData().getTaskID();
+
+ if (twoPhase) {
+ try {
+ /*
+ * The first boolean is to force communication through SSLv3
+ * The second boolean is to specify the job is a batch job - use true for interactive and false for
+ * batch.
+ * The third boolean is to specify to use the full proxy and not delegate a limited proxy.
+ */
+ job.request(true, gateKeeper, false, false);
+
+ // Single boolean to track all authentication failures, therefore we need to re-initialize
+ // this here
+ renewCredentialsAttempt = false;
+
+ } catch (WaitingForCommitException e) {
+ String jobID = job.getIDAsString();
+
+ details.setJobID(jobID);
+ details.setJobDescription(job.getRSL());
+ jobExecutionContext.setJobDetails(details);
+ GFacUtils.saveJobStatus(details, JobState.UN_SUBMITTED, taskID);
+
+ applicationSaved=true;
+ String jobStatusMessage = "Un-submitted JobID= " + jobID;
+ log.info(jobStatusMessage);
+ jobExecutionContext.getNotifier().publish(new JobIDEvent(jobStatusMessage));
+
+ log.info("Two phase commit: sending COMMIT_REQUEST signal; Job id - " + jobID);
+
+ try {
+ job.signal(GramJob.SIGNAL_COMMIT_REQUEST);
+
+ } catch (GramException gramException) {
+ throw new GFacException("Error while sending commit request. Job Id - "
+ + job.getIDAsString(), gramException);
+ } catch (GSSException gssException) {
+
+ // User credentials are invalid
+ log.error("Error while submitting commit request - Credentials provided are invalid. Job Id - "
+ + job.getIDAsString(), e);
+ log.info("Attempting to renew credentials and re-submit commit signal...");
+ GFacUtils.saveErrorDetails(gssException.getLocalizedMessage(), CorrectiveAction.RETRY_SUBMISSION, ErrorCategory.AIRAVATA_INTERNAL_ERROR, taskID);
+ renewCredentials(jobExecutionContext);
+
+ try {
+ job.signal(GramJob.SIGNAL_COMMIT_REQUEST);
+ } catch (GramException e1) {
+ GFacUtils.saveErrorDetails(gssException.getLocalizedMessage(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR, taskID);
+ throw new GFacException("Error while sending commit request. Job Id - "
+ + job.getIDAsString(), e1);
+ } catch (GSSException e1) {
+ GFacUtils.saveErrorDetails(gssException.getLocalizedMessage(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR, taskID);
+ throw new GFacException("Error while sending commit request. Job Id - "
+ + job.getIDAsString() + ". Credentials provided invalid", e1);
+ }
+ }
+ GFacUtils.updateJobStatus(details, JobState.SUBMITTED);
+ jobStatusMessage = "Submitted JobID= " + job.getIDAsString();
+ log.info(jobStatusMessage);
+ jobExecutionContext.getNotifier().publish(new JobIDEvent(jobStatusMessage));
+
+ } catch (GSSException e) {
+ // Renew credentials and re-submit
+ GFacUtils.saveErrorDetails(e.getLocalizedMessage(), CorrectiveAction.RETRY_SUBMISSION, ErrorCategory.AIRAVATA_INTERNAL_ERROR, taskID);
+
+ reSubmitJob(gateKeeper, jobExecutionContext, globusHostType, e);
+
+ } catch (GramException e) {
+ GFacUtils.saveErrorDetails(e.getLocalizedMessage(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR, taskID);
+
+ throw new GFacException("An error occurred while submitting a job, job id = " + job.getIDAsString(), e);
+ }
+ } else {
+
+ /*
+ * The first boolean is to force communication through SSLv3
+ * The second boolean is to specify the job is a batch job - use true for interactive and false for
+ * batch.
+ * The third boolean is to specify to use the full proxy and not delegate a limited proxy.
+ */
+ try {
+
+ job.request(true, gateKeeper, false, false);
+ renewCredentialsAttempt = false;
+
+ } catch (GramException e) {
+ GFacUtils.saveErrorDetails(e.getLocalizedMessage(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR, taskID);
+ throw new GFacException("An error occurred while submitting a job, job id = " + job.getIDAsString(), e);
+ } catch (GSSException e) {
+ GFacUtils.saveErrorDetails(e.getLocalizedMessage(), CorrectiveAction.RETRY_SUBMISSION, ErrorCategory.AIRAVATA_INTERNAL_ERROR, taskID);
+ // Renew credentials and re-submit
+ reSubmitJob(gateKeeper, jobExecutionContext, globusHostType, e);
+ }
+
+ String jobStatusMessage = "Un-submitted JobID= " + job.getIDAsString();
+ log.info(jobStatusMessage);
+ jobExecutionContext.getNotifier().publish(new JobIDEvent(jobStatusMessage));
+
+ }
+
+ currentlyExecutingJobCache.put(job.getIDAsString(), job);
+ /*
+ * Wait until job is done
+ */
+ listener.waitFor();
+
+ checkJobStatus(jobExecutionContext, globusHostType, gateKeeper);
+
+ }
+
+ private void renewCredentials(JobExecutionContext jobExecutionContext) throws GFacException {
+
+ renewCredentials(this.job, jobExecutionContext);
+ }
+
+ private void renewCredentials(GramJob gramJob, JobExecutionContext jobExecutionContext) throws GFacException {
+
+ try {
+ GSSCredential gssCred = ((GSISecurityContext)jobExecutionContext.
+ getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT)).renewCredentials();
+ gramJob.renew(gssCred);
+ } catch (GramException e1) {
+ throw new GFacException("Unable to renew credentials. Job Id - "
+ + gramJob.getIDAsString(), e1);
+ } catch (GSSException e1) {
+ throw new GFacException("Unable to renew credentials. Job Id - "
+ + gramJob.getIDAsString(), e1);
+ } catch (ApplicationSettingsException e) {
+ throw new GFacException(e.getLocalizedMessage(), e);
+ }
+ }
+
+ private void reSubmitJob(String gateKeeper,
+ JobExecutionContext jobExecutionContext,
+ GlobusHostType globusHostType, Exception e) throws GFacException, GFacProviderException {
+
+ if (!renewCredentialsAttempt) {
+
+ renewCredentialsAttempt = true;
+
+ // User credentials are invalid
+ log.error("Error while submitting job - Credentials provided are invalid. Job Id - "
+ + job.getIDAsString(), e);
+ log.info("Attempting to renew credentials and re-submit jobs...");
+
+ // Remove existing listener and register a new listener
+ job.removeListener(listener);
+ listener = new GramJobSubmissionListener(job, jobExecutionContext);
+
+ job.addListener(listener);
+
+ renewCredentials(jobExecutionContext);
+
+ submitJobs(gateKeeper, jobExecutionContext, globusHostType);
+
+ } else {
+ throw new GFacException("Error while submitting job - Credentials provided are invalid. Job Id - "
+ + job.getIDAsString(), e);
+ }
+
+ }
+
+ private void reSubmitJob(String gateKeeper,
+ JobExecutionContext jobExecutionContext,
+ GlobusHostType globusHostType) throws GFacException, GFacProviderException {
+
+ // User credentials are invalid
+ log.info("Attempting to renew credentials and re-submit jobs...");
+
+ // Remove existing listener and register a new listener
+ job.removeListener(listener);
+ listener = new GramJobSubmissionListener(job, jobExecutionContext);
+
+ job.addListener(listener);
+
+ renewCredentials(jobExecutionContext);
+
+ submitJobs(gateKeeper, jobExecutionContext, globusHostType);
+
+ }
+
+
+
+ public void dispose(JobExecutionContext jobExecutionContext) throws GFacProviderException {
+ }
+
+ public void cancelJob(String jobId, JobExecutionContext jobExecutionContext) throws GFacException {
+ cancelSingleJob(jobId, jobExecutionContext);
+ }
+
+
+ private void cancelSingleJob(String jobId, JobExecutionContext context) throws GFacException {
+ // First check whether job id is in the cache
+ if (currentlyExecutingJobCache.containsKey(jobId)) {
+
+ synchronized (this) {
+ GramJob gramJob = currentlyExecutingJobCache.get(jobId);
+
+ // Even though we check using containsKey, at this point job could be null
+ if (gramJob != null && (gramJob.getStatus() != GRAMConstants.STATUS_DONE ||
+ gramJob.getStatus() != GRAMConstants.STATUS_FAILED)) {
+ cancelJob(gramJob, context);
+ }
+ }
+
+ } else {
+
+ try {
+ GSSCredential gssCred = ((GSISecurityContext)context.
+ getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT)).getGssCredentials();
+
+ GramJob gramJob = new GramJob(null);
+ try {
+ gramJob.setID(jobId);
+ } catch (MalformedURLException e) {
+ throw new GFacException("Invalid job id - " + jobId, e);
+ }
+ gramJob.setCredentials(gssCred);
+
+ synchronized (this) {
+ if (gramJob.getStatus() != GRAMConstants.STATUS_DONE ||
+ gramJob.getStatus() != GRAMConstants.STATUS_FAILED) {
+ cancelJob(gramJob, context);
+ }
+ }
+ } catch (ApplicationSettingsException e) {
+ throw new GFacException(e);
+ }
+ }
+ }
+
+ private void cancelJob(GramJob gramJob, JobExecutionContext context) throws GFacException{
+
+ try {
+ gramJob.cancel();
+ } catch (GramException e) {
+ throw new GFacException("Error cancelling job, id - " + gramJob.getIDAsString(), e);
+ } catch (GSSException e) {
+
+ log.warn("Credentials invalid to cancel job. Attempting to renew credentials and re-try. " +
+ "Job id - " + gramJob.getIDAsString());
+ renewCredentials(gramJob, context);
+
+ try {
+ gramJob.cancel();
+ gramJob.signal(GramJob.SIGNAL_COMMIT_END);
+ } catch (GramException e1) {
+ throw new GFacException("Error cancelling job, id - " + gramJob.getIDAsString(), e1);
+ } catch (GSSException e1) {
+ throw new GFacException("Error cancelling job, invalid credentials. Job id - "
+ + gramJob.getIDAsString(), e);
+ }
+ }
+
+ }
+
+ public void initProperties(Map<String, String> properties) throws GFacException {
+
+ }
+
+ private void checkJobStatus(JobExecutionContext jobExecutionContext, GlobusHostType host, String gateKeeper)
+ throws GFacProviderException {
+ int jobStatus = listener.getCurrentStatus();
+
+ if (jobStatus == GramJob.STATUS_FAILED) {
+
+ String errorMsg = "Job " + job.getIDAsString() + " on host " + host.getHostAddress() + " Job Exit Code = "
+ + listener.getError() + " Error Description = " + getGramErrorString(listener.getError());
+
+ if (listener.getError() == GRAMProtocolErrorConstants.INVALID_SCRIPT_REPLY) {
+
+ // re-submitting without renewing
+ // TODO verify why we re-submit jobs when we get a invalid script reply
+ if (!reSubmissionInProgress) {
+ reSubmissionInProgress = true;
+
+ log.info("Invalid script reply received. Re-submitting job, id - " + job.getIDAsString());
+ try {
+ reSubmitJob(gateKeeper, jobExecutionContext, host);
+ } catch (GFacException e) {
+ throw new GFacProviderException
+ ("Error during re-submission. Original job submission data - " + errorMsg, e);
+ }
+ return;
+ }
+
+ } else if (listener.getError() == GRAMProtocolErrorConstants.ERROR_AUTHORIZATION) {
+
+ // re-submit with renewed credentials
+ if (!authorisationFailedAttempt) {
+ authorisationFailedAttempt = true;
+ log.info("Authorisation error contacting provider. Re-submitting job with renewed credentials.");
+
+ try {
+ renewCredentials(jobExecutionContext);
+ reSubmitJob(gateKeeper, jobExecutionContext, host);
+ } catch (GFacException e) {
+ throw new GFacProviderException
+ ("Error during re-submission. Original job submission data - " + errorMsg, e);
+ }
+
+ return;
+ }
+
+ } else if (listener.getError() == GRAMProtocolErrorConstants.USER_CANCELLED) {
+
+ log.info("User successfully cancelled job id " + job.getIDAsString());
+ return;
+ }
+
+
+
+ log.error(errorMsg);
+
+ synchronized (this) {
+ currentlyExecutingJobCache.remove(job.getIDAsString());
+ }
+
+ throw new JobSubmissionFault(new Exception(errorMsg), host.getHostAddress(), gateKeeper,
+ job.getRSL(), jobExecutionContext, getGramErrorString(listener.getError()),
+ listener.getError());
+
+ } else if (jobStatus == GramJob.STATUS_DONE) {
+ log.info("Job " + job.getIDAsString() + " on host " + host.getHostAddress() + " is successfully executed.");
+
+ synchronized (this) {
+ currentlyExecutingJobCache.remove(job.getIDAsString());
+ }
+ }
+ }
+
+ public String getGramErrorString(int errorCode) {
+
+ if (resources != null) {
+ try {
+ return resources.getProperty(String.valueOf(errorCode));
+ } catch (MissingResourceException mre) {
+ log.warn("Error reading globus error descriptions.", mre);
+ return "Error code: " + errorCode;
+ }
+ } else {
+ return "Error code: " + errorCode;
+ }
+
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/util/GramJobSubmissionListener.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/util/GramJobSubmissionListener.java b/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/util/GramJobSubmissionListener.java
new file mode 100644
index 0000000..5500853
--- /dev/null
+++ b/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/util/GramJobSubmissionListener.java
@@ -0,0 +1,141 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+*/
+package org.apache.airavata.gfac.util;
+
+import org.apache.airavata.gfac.GFacException;
+import org.apache.airavata.gfac.context.JobExecutionContext;
+import org.apache.airavata.gfac.context.security.GSISecurityContext;
+import org.apache.airavata.gfac.notification.events.StatusChangeEvent;
+import org.apache.airavata.gfac.utils.GFacUtils;
+import org.globus.gram.GramJob;
+import org.globus.gram.GramJobListener;
+import org.ietf.jgss.GSSCredential;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class GramJobSubmissionListener implements GramJobListener{
+ private final Logger log = LoggerFactory.getLogger(GramJobSubmissionListener.class);
+
+ public static final int NO_ERROR = -42;
+ public static final int INITIAL_STATUS = -43;
+
+ private volatile boolean jobDone = false;
+ private volatile int error = NO_ERROR;
+ private int currentStatus = INITIAL_STATUS;
+
+ private JobExecutionContext context;
+ private GramJob job;
+
+ public GramJobSubmissionListener(GramJob job, JobExecutionContext context) {
+ this.job = job;
+ this.context = context;
+ }
+
+ /**
+ * This method is used to block the process until the currentStatus of the job is DONE or FAILED
+ */
+ public void waitFor() {
+ while (!isJobDone()) {
+ synchronized (this) {
+ try {
+ wait();
+ } catch (InterruptedException e) {}
+ }
+ }
+ }
+
+
+
+ private synchronized boolean isJobDone() {
+ return this.jobDone;
+ }
+
+ private void setStatus(int status, int error) {
+ try {
+ GFacUtils.updateJobStatus(context.getJobDetails(), GramProviderUtils.getApplicationJobStatus(status));
+ } catch (GFacException e) {
+ log.error("Error persisting status" + e.getLocalizedMessage(), e);
+ }
+ this.currentStatus = status;
+ this.error = error;
+
+ switch (this.currentStatus) {
+ case GramJob.STATUS_FAILED:
+ log.info("Job Error Code: " + error);
+ this.jobDone = true;
+ notifyAll();
+ case GramJob.STATUS_DONE:
+ this.jobDone = true;
+ notifyAll();
+ }
+
+ }
+
+ public synchronized void statusChanged(GramJob job) {
+
+ int jobStatus = job.getStatus();
+ String jobStatusMessage = "Status of job " + job.getIDAsString() + "is " + job.getStatusAsString();
+ /*
+ * Notify currentStatus change
+ */
+ this.context.getNotifier().publish(new StatusChangeEvent(jobStatusMessage));
+
+ /*
+ * Set new currentStatus if it is jobDone, notify all wait object
+ */
+ if (currentStatus != jobStatus) {
+ currentStatus = jobStatus;
+
+ setStatus(job.getStatus(), job.getError());
+
+ // Test to see whether we need to renew credentials
+ renewCredentials(job);
+ }
+ }
+
+ private void renewCredentials(GramJob job) {
+
+ try {
+
+ int proxyExpTime = job.getCredentials().getRemainingLifetime();
+ if (proxyExpTime < GSISecurityContext.CREDENTIAL_RENEWING_THRESH_HOLD) {
+ log.info("Job proxy expired. Trying to renew proxy");
+ GSSCredential gssCred = ((GSISecurityContext)context.
+ getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT)).renewCredentials();
+ job.renew(gssCred);
+ log.info("MyProxy credentials are renewed .");
+ }
+
+ } catch (Exception e) {
+ log.error("An error occurred while trying to renew credentials. Job id " + job.getIDAsString());
+ }
+
+
+ }
+
+ public synchronized int getError() {
+ return error;
+ }
+
+ public synchronized int getCurrentStatus() {
+ return currentStatus;
+ }
+}
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/util/GramProviderUtils.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/util/GramProviderUtils.java b/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/util/GramProviderUtils.java
new file mode 100644
index 0000000..d76b067
--- /dev/null
+++ b/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/util/GramProviderUtils.java
@@ -0,0 +1,114 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+*/
+package org.apache.airavata.gfac.util;
+
+import org.apache.airavata.gfac.ToolsException;
+import org.apache.airavata.gfac.context.JobExecutionContext;
+import org.apache.airavata.gfac.provider.GFacProviderException;
+import org.apache.airavata.gfac.utils.*;
+import org.apache.airavata.model.workspace.experiment.JobState;
+import org.globus.gram.GramAttributes;
+import org.globus.gram.GramJob;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+
+public class GramProviderUtils {
+ private static final Logger log = LoggerFactory.getLogger(GramProviderUtils.class);
+
+ public static GramJob setupEnvironment(JobExecutionContext jobExecutionContext, boolean enableTwoPhase) throws GFacProviderException {
+ log.debug("Searching for Gate Keeper");
+ try {
+ GramAttributes jobAttr = GramRSLGenerator.configureRemoteJob(jobExecutionContext);
+ String rsl = jobAttr.toRSL();
+
+ if (enableTwoPhase) {
+ rsl = rsl + "(twoPhase=yes)";
+ }
+
+ log.debug("RSL = " + rsl);
+ GramJob job = new GramJob(rsl);
+ return job;
+ } catch (ToolsException te) {
+ throw new GFacProviderException(te.getMessage(), te);
+ }
+ }
+
+ public static JobState getApplicationJobStatus(int gramStatus) {
+ switch (gramStatus) {
+ case GramJob.STATUS_UNSUBMITTED:
+ return JobState.HELD;
+ case GramJob.STATUS_ACTIVE:
+ return JobState.ACTIVE;
+ case GramJob.STATUS_DONE:
+ return JobState.COMPLETE;
+ case GramJob.STATUS_FAILED:
+ return JobState.FAILED;
+ case GramJob.STATUS_PENDING:
+ return JobState.QUEUED;
+ case GramJob.STATUS_STAGE_IN:
+ return JobState.QUEUED;
+ case GramJob.STATUS_STAGE_OUT:
+ return JobState.COMPLETE;
+ case GramJob.STATUS_SUSPENDED:
+ return JobState.SUSPENDED;
+ default:
+ return JobState.UNKNOWN;
+ }
+ }
+
+ public static URI createGsiftpURI(String host, String localPath) throws URISyntaxException {
+ StringBuffer buf = new StringBuffer();
+ if (!host.startsWith("gsiftp://"))
+ buf.append("gsiftp://");
+ buf.append(host);
+ if (!host.endsWith("/"))
+ buf.append("/");
+ buf.append(localPath);
+ return new URI(buf.toString());
+ }
+
+ public static URI createGsiftpURI(GridFTPContactInfo host, String localPath) throws URISyntaxException {
+ StringBuffer buf = new StringBuffer();
+
+ if (!host.hostName.startsWith("gsiftp://"))
+ buf.append("gsiftp://");
+ buf.append(host).append(":").append(host.port);
+ if (!host.hostName.endsWith("/"))
+ buf.append("/");
+ buf.append(localPath);
+ return new URI(buf.toString());
+ }
+
+ public static String createGsiftpURIAsString(String host, String localPath) throws URISyntaxException {
+ StringBuffer buf = new StringBuffer();
+ if (!host.startsWith("gsiftp://"))
+ buf.append("gsiftp://");
+ buf.append(host);
+ if (!host.endsWith("/"))
+ buf.append("/");
+ buf.append(localPath);
+ return buf.toString();
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/util/GramRSLGenerator.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/util/GramRSLGenerator.java b/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/util/GramRSLGenerator.java
new file mode 100644
index 0000000..81782ff
--- /dev/null
+++ b/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/util/GramRSLGenerator.java
@@ -0,0 +1,211 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+*/
+package org.apache.airavata.gfac.util;
+
+import org.apache.airavata.common.utils.StringUtil;
+import org.apache.airavata.commons.gfac.type.ActualParameter;
+import org.apache.airavata.commons.gfac.type.MappingFactory;
+import org.apache.airavata.gfac.Constants;
+import org.apache.airavata.gfac.ToolsException;
+import org.apache.airavata.gfac.context.JobExecutionContext;
+import org.apache.airavata.gfac.context.MessageContext;
+import org.apache.airavata.gfac.provider.GFacProviderException;
+import org.apache.airavata.model.workspace.experiment.ComputationalResourceScheduling;
+import org.apache.airavata.model.workspace.experiment.TaskDetails;
+import org.apache.airavata.schemas.gfac.*;
+import org.globus.gram.GramAttributes;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Set;
+
+public class GramRSLGenerator {
+ protected static final Logger log = LoggerFactory.getLogger(GramRSLGenerator.class);
+
+ private enum JobType {
+ SERIAL, SINGLE, MPI, MULTIPLE, CONDOR
+ }
+
+ ;
+
+ public static GramAttributes configureRemoteJob(JobExecutionContext context) throws ToolsException {
+ HpcApplicationDeploymentType app = (HpcApplicationDeploymentType) context.getApplicationContext().getApplicationDeploymentDescription().getType();
+ GramAttributes jobAttr = new GramAttributes();
+ jobAttr.setExecutable(app.getExecutableLocation());
+ jobAttr.setDirectory(app.getStaticWorkingDirectory());
+ jobAttr.setStdout(app.getStandardOutput());
+ jobAttr.setStderr(app.getStandardError());
+ /*
+ * The env here contains the env of the host and the application. i.e the env specified in the host description
+ * and application description documents
+ */
+ NameValuePairType[] env = app.getApplicationEnvironmentArray();
+ if (env.length != 0) {
+ Map<String, String> nv = new HashMap<String, String>();
+ for (int i = 0; i < env.length; i++) {
+ String key = env[i].getName();
+ String value = env[i].getValue();
+ nv.put(key, value);
+ }
+
+ for (Map.Entry<String, String> entry : nv.entrySet()) {
+ jobAttr.addEnvVariable(entry.getKey(), entry.getValue());
+ }
+ }
+ jobAttr.addEnvVariable(Constants.INPUT_DATA_DIR_VAR_NAME, app.getInputDataDirectory());
+ jobAttr.addEnvVariable(Constants.OUTPUT_DATA_DIR_VAR_NAME, app.getOutputDataDirectory());
+
+
+
+ if (app.getStandardInput() != null && !"".equals(app.getStandardInput())) {
+ jobAttr.setStdin(app.getStandardInput());
+ } else {
+ MessageContext input = context.getInMessageContext();;
+ Map<String,Object> inputs = input.getParameters();
+ Set<String> keys = inputs.keySet();
+ for (String paramName : keys ) {
+ ActualParameter actualParameter = (ActualParameter) inputs.get(paramName);
+ if ("URIArray".equals(actualParameter.getType().getType().toString()) || "StringArray".equals(actualParameter.getType().getType().toString())
+ || "FileArray".equals(actualParameter.getType().getType().toString())) {
+ String[] values = null;
+ if (actualParameter.getType() instanceof URIArrayType) {
+ values = ((URIArrayType) actualParameter.getType()).getValueArray();
+ } else if (actualParameter.getType() instanceof StringArrayType) {
+ values = ((StringArrayType) actualParameter.getType()).getValueArray();
+ } else if (actualParameter.getType() instanceof FileArrayType) {
+ values = ((FileArrayType) actualParameter.getType()).getValueArray();
+ }
+ String value = StringUtil.createDelimiteredString(values, " ");
+ jobAttr.addArgument(value);
+ } else {
+ String paramValue = MappingFactory.toString(actualParameter);
+ jobAttr.addArgument(paramValue);
+ }
+ }
+ }
+ // Using the workflowContext Header values if user provided them in the request and overwrite the default values in DD
+ //todo finish the scheduling based on workflow execution context
+ TaskDetails taskData = context.getTaskData();
+ if(taskData != null && taskData.isSetTaskScheduling()){
+ ComputationalResourceScheduling computionnalResource = taskData.getTaskScheduling();
+ try {
+ int cpuCount = computionnalResource.getTotalCPUCount();
+ if(cpuCount>0){
+ app.setCpuCount(cpuCount);
+ }
+ } catch (NullPointerException e) {
+ log.debug("No Value sent in WorkflowContextHeader for CPU Count, value in the Deployment Descriptor will be used");
+ new GFacProviderException("No Value sent in WorkflowContextHeader for Node Count, value in the Deployment Descriptor will be used",e);
+ }
+ try {
+ int nodeCount = computionnalResource.getNodeCount();
+ if(nodeCount>0){
+ app.setNodeCount(nodeCount);
+ }
+ } catch (NullPointerException e) {
+ log.debug("No Value sent in WorkflowContextHeader for Node Count, value in the Deployment Descriptor will be used");
+ new GFacProviderException("No Value sent in WorkflowContextHeader for Node Count, value in the Deployment Descriptor will be used",e);
+ }
+ try {
+ String queueName = computionnalResource.getQueueName();
+ if (queueName != null) {
+ if(app.getQueue() == null){
+ QueueType queueType = app.addNewQueue();
+ queueType.setQueueName(queueName);
+ }else{
+ app.getQueue().setQueueName(queueName);
+ }
+ }
+ } catch (NullPointerException e) {
+ log.debug("No Value sent in WorkflowContextHeader for Node Count, value in the Deployment Descriptor will be used");
+ new GFacProviderException("No Value sent in WorkflowContextHeader for Node Count, value in the Deployment Descriptor will be used",e);
+ }
+ try {
+ int maxwallTime = computionnalResource.getWallTimeLimit();
+ if(maxwallTime>0){
+ app.setMaxWallTime(maxwallTime);
+ }
+ } catch (NullPointerException e) {
+ log.debug("No Value sent in WorkflowContextHeader for Node Count, value in the Deployment Descriptor will be used");
+ new GFacProviderException("No Value sent in WorkflowContextHeader for Node Count, value in the Deployment Descriptor will be used",e);
+ }
+ }
+ if (app.getNodeCount() > 0) {
+ jobAttr.set("hostCount", String.valueOf(app.getNodeCount()));
+ log.debug("Setting number of Nodes to " + app.getCpuCount());
+ }
+ if (app.getCpuCount() > 0) {
+ log.debug("Setting number of procs to " + app.getCpuCount());
+ jobAttr.setNumProcs(app.getCpuCount());
+ }
+ if (app.getMinMemory() > 0) {
+ log.debug("Setting minimum memory to " + app.getMinMemory());
+ jobAttr.setMinMemory(app.getMinMemory());
+ }
+ if (app.getMaxMemory() > 0) {
+ log.debug("Setting maximum memory to " + app.getMaxMemory());
+ jobAttr.setMaxMemory(app.getMaxMemory());
+ }
+ if (app.getProjectAccount() != null) {
+ if (app.getProjectAccount().getProjectAccountNumber() != null) {
+ log.debug("Setting project to " + app.getProjectAccount().getProjectAccountNumber());
+ jobAttr.setProject(app.getProjectAccount().getProjectAccountNumber());
+ }
+ }
+ if (app.getQueue() != null) {
+ if (app.getQueue().getQueueName() != null) {
+ log.debug("Setting job queue to " + app.getQueue().getQueueName());
+ jobAttr.setQueue(app.getQueue().getQueueName());
+ }
+ }
+ if (app.getMaxWallTime() > 0) {
+ log.debug("Setting max wall clock time to " + app.getMaxWallTime());
+
+ jobAttr.setMaxWallTime(app.getMaxWallTime());
+ jobAttr.set("proxy_timeout", "1");
+ } else {
+ jobAttr.setMaxWallTime(30);
+ }
+ String jobType = JobType.SINGLE.toString();
+ if (app.getJobType() != null) {
+ jobType = app.getJobType().toString();
+ }
+ if (jobType.equalsIgnoreCase(JobType.SINGLE.toString())) {
+ log.debug("Setting job type to single");
+ jobAttr.setJobType(GramAttributes.JOBTYPE_SINGLE);
+ } if (jobType.equalsIgnoreCase(JobType.SERIAL.toString())) {
+ log.debug("Setting job type to single");
+ jobAttr.setJobType(GramAttributes.JOBTYPE_SINGLE);
+ } else if (jobType.equalsIgnoreCase(JobType.MPI.toString())) {
+ log.debug("Setting job type to mpi");
+ jobAttr.setJobType(GramAttributes.JOBTYPE_MPI);
+ } else if (jobType.equalsIgnoreCase(JobType.MULTIPLE.toString())) {
+ log.debug("Setting job type to multiple");
+ jobAttr.setJobType(GramAttributes.JOBTYPE_MULTIPLE);
+ } else if (jobType.equalsIgnoreCase(JobType.CONDOR.toString())) {
+ jobAttr.setJobType(GramAttributes.JOBTYPE_CONDOR);
+ }
+
+ return jobAttr;
+ }
+}
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/util/GridFTPContactInfo.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/util/GridFTPContactInfo.java b/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/util/GridFTPContactInfo.java
new file mode 100644
index 0000000..d37beba
--- /dev/null
+++ b/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/util/GridFTPContactInfo.java
@@ -0,0 +1,61 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+*/
+package org.apache.airavata.gfac.util;
+
+import org.apache.airavata.gfac.Constants;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class GridFTPContactInfo {
+ protected final static Logger log = LoggerFactory.getLogger(GridFTPContactInfo.class);
+ public String hostName;
+ public int port;
+
+ public GridFTPContactInfo(String hostName, int port) {
+ if (port <= 0 || port == 80) {
+ log.debug(hostName + "port recived " + port + " setting it to " + Constants.DEFAULT_GSI_FTP_PORT);
+ port = Constants.DEFAULT_GSI_FTP_PORT;
+ }
+ this.hostName = hostName;
+ this.port = port;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (obj instanceof GridFTPContactInfo) {
+ return hostName.equals(((GridFTPContactInfo) obj).hostName) && port == ((GridFTPContactInfo) obj).port;
+ } else {
+ return false;
+ }
+ }
+
+ @Override
+ public int hashCode() {
+ return hostName.hashCode();
+ }
+
+ @Override
+ public String toString() {
+ StringBuffer buf = new StringBuffer();
+ buf.append(hostName).append(":").append(port);
+ return buf.toString();
+ }
+}
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-gram/src/main/resources/errors.properties
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gram/src/main/resources/errors.properties b/modules/gfac/gfac-gram/src/main/resources/errors.properties
new file mode 100644
index 0000000..88c41b8
--- /dev/null
+++ b/modules/gfac/gfac-gram/src/main/resources/errors.properties
@@ -0,0 +1,197 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+# Directly copied from jglobus. Not a good way to manager error properties.
+1 = Parameter not supported
+2 = The RSL length is greater than the maximum allowed
+3 = No resources available
+4 = Bad directory specified
+5 = The executable does not exist
+6 = Insufficient funds
+7 = Authentication with the remote server failed
+8 = Job cancelled by user
+9 = Job cancelled by system
+
+10 = Data transfer to the server failed
+11 = The stdin file does not exist
+12 = The connection to the server failed (check host and port)
+13 = The provided RSL 'maxtime' value is invalid (not an integer or must be greater than 0)
+14 = The provided RSL 'count' value is invalid (not an integer or must be greater than 0)
+15 = The job manager received an invalid RSL
+16 = Could not connect to job manager
+17 = The job failed when the job manager attempted to run it
+18 = Paradyn error
+19 = The provided RSL 'jobtype' value is invalid
+
+20 = The provided RSL 'myjob' value is invalid
+21 = The job manager failed to locate an internal script argument file
+22 = The job manager failed to create an internal script argument file
+23 = The job manager detected an invalid job state
+24 = The job manager detected an invalid script response
+25 = The job manager detected an invalid job state
+26 = The provided RSL 'jobtype' value is not supported by this job manager
+27 = Unimplemented
+28 = The job manager failed to create an internal script submission file
+29 = The job manager cannot find the user proxy
+
+30 = The job manager failed to open the user proxy
+31 = The job manager failed to cancel the job as requested
+32 = System memory allocation failed
+33 = The interprocess job communication initialization failed
+34 = The interprocess job communication setup failed
+35 = The provided RSL 'host count' value is invalid
+36 = One of the provided RSL parameters is unsupported
+37 = The provided RSL 'queue' parameter is invalid
+38 = The provided RSL 'project' parameter is invalid
+39 = The provided RSL string includes variables that could not be identified
+
+40 = The provided RSL 'environment' parameter is invalid
+41 = The provided RSL 'dryrun' parameter is invalid
+42 = The provided RSL is invalid (an empty string)
+43 = The job manager failed to stage the executable
+44 = The job manager failed to stage the stdin file
+45 = The requested job manager type is invalid
+46 = The provided RSL 'arguments' parameter is invalid
+47 = The gatekeeper failed to run the job manager
+48 = The provided RSL could not be properly parsed
+49 = There is a version mismatch between GRAM components
+
+50 = The provided RSL 'arguments' parameter is invalid
+51 = The provided RSL 'count' parameter is invalid
+52 = The provided RSL 'directory' parameter is invalid
+53 = The provided RSL 'dryrun' parameter is invalid
+54 = The provided RSL 'environment' parameter is invalid
+55 = The provided RSL 'executable' parameter is invalid
+56 = The provided RSL 'host_count' parameter is invalid
+57 = The provided RSL 'jobtype' parameter is invalid
+58 = The provided RSL 'maxtime' parameter is invalid
+59 = The provided RSL 'myjob' parameter is invalid
+
+60 = The provided RSL 'paradyn' parameter is invalid
+61 = The provided RSL 'project' parameter is invalid
+62 = The provided RSL 'queue' parameter is invalid
+63 = The provided RSL 'stderr' parameter is invalid
+64 = The provided RSL 'stdin' parameter is invalid
+65 = The provided RSL 'stdout' parameter is invalid
+66 = The job manager failed to locate an internal script
+67 = The job manager failed on the system call pipe()
+68 = The job manager failed on the system call fcntl()
+69 = The job manager failed to create the temporary stdout filename
+
+70 = The job manager failed to create the temporary stderr filename
+71 = The job manager failed on the system call fork()
+72 = The executable file permissions do not allow execution
+73 = The job manager failed to open stdout
+74 = The job manager failed to open stderr
+75 = The cache file could not be opened in order to relocate the user proxy
+76 = Cannot access cache files in ~/.globus/.gass_cache, check permissions, quota, and disk space
+77 = The job manager failed to insert the contact in the client contact list
+78 = The contact was not found in the job manager's client contact list
+79 = Connecting to the job manager failed. Possible reasons: job terminated, invalid job contact, network problems, ...
+
+80 = The syntax of the job contact is invalid
+81 = The executable parameter in the RSL is undefined
+82 = The job manager service is misconfigured. condor arch undefined
+83 = The job manager service is misconfigured. condor os undefined
+84 = The provided RSL 'min_memory' parameter is invalid
+85 = The provided RSL 'max_memory' parameter is invalid
+86 = The RSL 'min_memory' value is not zero or greater
+87 = The RSL 'max_memory' value is not zero or greater
+88 = The creation of a HTTP message failed
+89 = Parsing incoming HTTP message failed
+
+90 = The packing of information into a HTTP message failed
+91 = An incoming HTTP message did not contain the expected information
+92 = The job manager does not support the service that the client requested
+93 = The gatekeeper failed to find the requested service
+94 = The jobmanager does not accept any new requests (shutting down)
+95 = The client failed to close the listener associated with the callback URL
+96 = The gatekeeper contact cannot be parsed
+97 = The job manager could not find the 'poe' command
+98 = The job manager could not find the 'mpirun' command
+99 = The provided RSL 'start_time' parameter is invalid"
+100 = The provided RSL 'reservation_handle' parameter is invalid
+
+101 = The provided RSL 'max_wall_time' parameter is invalid
+102 = The RSL 'max_wall_time' value is not zero or greater
+103 = The provided RSL 'max_cpu_time' parameter is invalid
+104 = The RSL 'max_cpu_time' value is not zero or greater
+105 = The job manager is misconfigured, a scheduler script is missing
+106 = The job manager is misconfigured, a scheduler script has invalid permissions
+107 = The job manager failed to signal the job
+108 = The job manager did not recognize/support the signal type
+109 = The job manager failed to get the job id from the local scheduler
+
+110 = The job manager is waiting for a commit signal
+111 = The job manager timed out while waiting for a commit signal
+112 = The provided RSL 'save_state' parameter is invalid
+113 = The provided RSL 'restart' parameter is invalid
+114 = The provided RSL 'two_phase' parameter is invalid
+115 = The RSL 'two_phase' value is not zero or greater
+116 = The provided RSL 'stdout_position' parameter is invalid
+117 = The RSL 'stdout_position' value is not zero or greater
+118 = The provided RSL 'stderr_position' parameter is invalid
+119 = The RSL 'stderr_position' value is not zero or greater
+
+120 = The job manager restart attempt failed
+121 = The job state file doesn't exist
+122 = Could not read the job state file
+123 = Could not write the job state file
+124 = The old job manager is still alive
+125 = The job manager state file TTL expired
+126 = It is unknown if the job was submitted
+127 = The provided RSL 'remote_io_url' parameter is invalid
+128 = Could not write the remote io url file
+129 = The standard output/error size is different
+
+130 = The job manager was sent a stop signal (job is still running)
+131 = The user proxy expired (job is still running)
+132 = The job was not submitted by original jobmanager
+133 = The job manager is not waiting for that commit signal
+134 = The provided RSL scheduler specific parameter is invalid
+135 = The job manager could not stage in a file
+136 = The scratch directory could not be created
+137 = The provided 'gass_cache' parameter is invalid
+138 = The RSL contains attributes which are not valid for job submission
+139 = The RSL contains attributes which are not valid for stdio update
+
+140 = The RSL contains attributes which are not valid for job restart
+141 = The provided RSL 'file_stage_in' parameter is invalid
+142 = The provided RSL 'file_stage_in_shared' parameter is invalid
+143 = The provided RSL 'file_stage_out' parameter is invalid
+144 = The provided RSL 'gass_cache' parameter is invalid
+145 = The provided RSL 'file_cleanup' parameter is invalid
+146 = The provided RSL 'scratch_dir' parameter is invalid
+147 = The provided scheduler-specific RSL parameter is invalid
+148 = A required RSL attribute was not defined in the RSL spec
+149 = The gass_cache attribute points to an invalid cache directory
+
+150 = The provided RSL 'save_state' parameter has an invalid value
+151 = The job manager could not open the RSL attribute validation file
+152 = The job manager could not read the RSL attribute validation file
+153 = The provided RSL 'proxy_timeout' is invalid
+154 = The RSL 'proxy_timeout' value is not greater than zero
+155 = The job manager could not stage out a file
+156 = The job contact string does not match any which the job manager is handling
+157 = Proxy delegation failed
+158 = The job manager could not lock the state lock file
+
+1000 = Failed to start up callback handler
+1003 = Job contact not set
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-gram/src/main/resources/service.properties
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gram/src/main/resources/service.properties b/modules/gfac/gfac-gram/src/main/resources/service.properties
new file mode 100644
index 0000000..391bfea
--- /dev/null
+++ b/modules/gfac/gfac-gram/src/main/resources/service.properties
@@ -0,0 +1,58 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+
+#
+# Class which implemented Scheduler interface. It will be used to determine a Provider
+#
+scheduler.class= org.apache.airavata.core.gfac.scheduler.impl.SchedulerImpl
+
+#
+# Data Service Plugins classes
+#
+datachain.classes= org.apache.airavata.core.gfac.extension.data.RegistryDataService
+
+#
+# Pre execution Plugins classes. For example, GridFTP Input Staging
+#
+prechain.classes= org.apache.airavata.core.gfac.extension.pre.GridFtpInputStaging
+prechain.classes= org.apache.airavata.core.gfac.extension.pre.HttpInputStaging
+
+#
+# Post execution Plugins classes. For example, GridFTP Output Staging
+#
+postchain.classes= org.apache.airavata.core.gfac.extension.post.GridFtpOutputStaging
+postchain.classes= org.apache.airavata.core.gfac.extension.post.OutputRegister
+
+#
+# SSH private key location. It will be used by SSHProvider
+#
+# ssh.key=/home/user/.ssh/id_rsa
+# ssh.keypass=
+# ssh.username=usernameAtHost
+
+#
+# MyProxy credential. It will be used by GridFTP Plugins and GramProvider.
+#
+# myproxy.server=myproxy.teragrid.org
+# myproxy.user=username
+# myproxy.pass=password
+# myproxy.life=3600
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-gram/src/test/java/org/apache/airavata/core/gfac/services/impl/GFacBaseTestWithMyProxyAuth.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gram/src/test/java/org/apache/airavata/core/gfac/services/impl/GFacBaseTestWithMyProxyAuth.java b/modules/gfac/gfac-gram/src/test/java/org/apache/airavata/core/gfac/services/impl/GFacBaseTestWithMyProxyAuth.java
new file mode 100644
index 0000000..24c364c
--- /dev/null
+++ b/modules/gfac/gfac-gram/src/test/java/org/apache/airavata/core/gfac/services/impl/GFacBaseTestWithMyProxyAuth.java
@@ -0,0 +1,115 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+
+package org.apache.airavata.core.gfac.services.impl;
+
+import junit.framework.Assert;
+import org.apache.airavata.common.utils.AiravataUtils;
+import org.apache.airavata.common.utils.DatabaseTestCases;
+import org.apache.airavata.common.utils.DerbyUtil;
+import org.apache.airavata.credential.store.store.CredentialReader;
+import org.apache.airavata.credential.store.store.impl.CredentialReaderImpl;
+import org.apache.airavata.gfac.RequestData;
+import org.apache.airavata.gfac.context.security.GSISecurityContext;
+import org.apache.log4j.Logger;
+import org.junit.BeforeClass;
+
+/**
+ * User: AmilaJ (amilaj@apache.org)
+ * Date: 7/11/13
+ * Time: 1:31 AM
+ */
+
+public class GFacBaseTestWithMyProxyAuth extends DatabaseTestCases {
+
+ private static String myProxyUserName;
+ private static String myProxyPassword;
+
+ private static final Logger log = Logger.getLogger(GFacBaseTestWithMyProxyAuth.class);
+
+
+ @BeforeClass
+ public static void setUpClass() throws Exception {
+ AiravataUtils.setExecutionAsServer();
+
+ myProxyUserName = System.getProperty("myproxy.user");
+ myProxyPassword = System.getProperty("myproxy.password");
+
+ if (userName == null || password == null || userName.trim().equals("") || password.trim().equals("")) {
+ log.error("===== Please set myproxy.user and myproxy.password system properties. =======");
+ Assert.fail("Please set myproxy.user and myproxy.password system properties.");
+ }
+
+ log.info("Using my proxy user name - " + userName);
+
+ setUpDatabase();
+
+ }
+
+
+
+
+ public static void setUpDatabase() throws Exception {
+ DerbyUtil.startDerbyInServerMode(getHostAddress(), getPort(), getUserName(), getPassword());
+
+ waitTillServerStarts();
+
+ /*
+ * String createTable = "CREATE TABLE CREDENTIALS\n" + "(\n" + " GATEWAY_NAME VARCHAR(256) NOT NULL,\n" +
+ * " COMMUNITY_USER_NAME VARCHAR(256) NOT NULL,\n" + " CREDENTIAL BLOB NOT NULL,\n" +
+ * " PRIVATE_KEY BLOB NOT NULL,\n" + " NOT_BEFORE VARCHAR(256) NOT NULL,\n" +
+ * " NOT_AFTER VARCHAR(256) NOT NULL,\n" + " LIFETIME INTEGER NOT NULL,\n" +
+ * " REQUESTING_PORTAL_USER_NAME VARCHAR(256) NOT NULL,\n" +
+ * " REQUESTED_TIME TIMESTAMP DEFAULT '0000-00-00 00:00:00',\n" +
+ * " PRIMARY KEY (GATEWAY_NAME, COMMUNITY_USER_NAME)\n" + ")";
+ */
+
+ String createTable = "CREATE TABLE CREDENTIALS\n" + "(\n"
+ + " GATEWAY_ID VARCHAR(256) NOT NULL,\n"
+ + " TOKEN_ID VARCHAR(256) NOT NULL,\n"
+ + // Actual token used to identify the credential
+ " CREDENTIAL BLOB NOT NULL,\n" + " PORTAL_USER_ID VARCHAR(256) NOT NULL,\n"
+ + " TIME_PERSISTED TIMESTAMP DEFAULT CURRENT_TIMESTAMP,\n"
+ + " PRIMARY KEY (GATEWAY_ID, TOKEN_ID)\n" + ")";
+
+ String dropTable = "drop table CREDENTIALS";
+
+ try {
+ executeSQL(dropTable);
+ } catch (Exception e) {
+ }
+
+ executeSQL(createTable);
+
+ }
+
+ public GSISecurityContext getSecurityContext() throws Exception {
+ GSISecurityContext.setUpTrustedCertificatePath(System.getProperty("gsi.certificate.path"));
+ RequestData requestData = new RequestData();
+ requestData.setMyProxyServerUrl("myproxy.teragrid.org");
+ requestData.setMyProxyUserName(System.getProperty("myproxy.user"));
+ requestData.setMyProxyPassword(System.getProperty("myproxy.password"));
+ requestData.setMyProxyLifeTime(3600);
+ CredentialReader credentialReader = new CredentialReaderImpl(getDbUtil());
+ return new GSISecurityContext(credentialReader, requestData);
+ }
+
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-gram/src/test/java/org/apache/airavata/core/gfac/services/impl/GramProviderTestWithMyProxyAuth.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gram/src/test/java/org/apache/airavata/core/gfac/services/impl/GramProviderTestWithMyProxyAuth.java b/modules/gfac/gfac-gram/src/test/java/org/apache/airavata/core/gfac/services/impl/GramProviderTestWithMyProxyAuth.java
new file mode 100644
index 0000000..ffaaaae
--- /dev/null
+++ b/modules/gfac/gfac-gram/src/test/java/org/apache/airavata/core/gfac/services/impl/GramProviderTestWithMyProxyAuth.java
@@ -0,0 +1,225 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+*/
+package org.apache.airavata.core.gfac.services.impl;
+
+import java.io.File;
+import java.net.URL;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.List;
+import java.util.UUID;
+
+import org.apache.airavata.commons.gfac.type.ActualParameter;
+import org.apache.airavata.commons.gfac.type.ApplicationDescription;
+import org.apache.airavata.commons.gfac.type.HostDescription;
+import org.apache.airavata.commons.gfac.type.MappingFactory;
+import org.apache.airavata.commons.gfac.type.ServiceDescription;
+import org.apache.airavata.gfac.GFacConfiguration;
+import org.apache.airavata.gfac.GFacException;
+import org.apache.airavata.gfac.context.ApplicationContext;
+import org.apache.airavata.gfac.context.JobExecutionContext;
+import org.apache.airavata.gfac.context.MessageContext;
+import org.apache.airavata.gfac.context.security.GSISecurityContext;
+import org.apache.airavata.gfac.cpi.GFacImpl;
+import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
+import org.apache.airavata.schemas.gfac.GlobusHostType;
+import org.apache.airavata.schemas.gfac.HpcApplicationDeploymentType;
+import org.apache.airavata.schemas.gfac.InputParameterType;
+import org.apache.airavata.schemas.gfac.JobTypeType;
+import org.apache.airavata.schemas.gfac.OutputParameterType;
+import org.apache.airavata.schemas.gfac.ProjectAccountType;
+import org.apache.airavata.schemas.gfac.QueueType;
+import org.apache.airavata.schemas.gfac.StringParameterType;
+import org.apache.airavata.schemas.gfac.URIParameterType;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+public class GramProviderTestWithMyProxyAuth extends GFacBaseTestWithMyProxyAuth {
+ private JobExecutionContext jobExecutionContext;
+
+
+// private static final String hostAddress = "blacklight.psc.teragrid.org";
+// private static final String hostName = "Blacklight";
+// private static final String gridftpAddress = "gsiftp://gridftp.blacklight.psc.teragrid.org:2812";
+// private static final String gramAddress = "";
+
+ //FIXME: move job properties to configuration file
+ private static final String hostAddress = "trestles.sdsc.edu";
+ private static final String hostName = "trestles";
+ private static final String gridftpAddress = "gsiftp://trestles.sdsc.edu:2811/";
+ private static final String gramAddress = "trestles-login2.sdsc.edu:2119/jobmanager-pbstest2";
+
+ @Before
+ public void setUp() throws Exception {
+ URL resource = GramProviderTestWithMyProxyAuth.class.getClassLoader().getResource(org.apache.airavata.common.utils.Constants.GFAC_CONFIG_XML);
+ assert resource != null;
+ System.out.println(resource.getFile());
+ GFacConfiguration gFacConfiguration = GFacConfiguration.create(new File(resource.getPath()),null,null);
+// gFacConfiguration.setMyProxyLifeCycle(3600);
+// gFacConfiguration.setMyProxyServer("myproxy.teragrid.org");
+// gFacConfiguration.setMyProxyUser("*****");
+// gFacConfiguration.setMyProxyPassphrase("*****");
+// gFacConfiguration.setTrustedCertLocation("./certificates");
+// //have to set InFlwo Handlers and outFlowHandlers
+// gFacConfiguration.setInHandlers(Arrays.asList(new String[] {"org.apache.airavata.gfac.handler.GramDirectorySetupHandler","org.apache.airavata.gfac.handler.GridFTPInputHandler"}));
+// gFacConfiguration.setOutHandlers(Arrays.asList(new String[] {"org.apache.airavata.gfac.handler.GridFTPOutputHandler"}));
+
+ /*
+ * Host
+ */
+ HostDescription host = new HostDescription(GlobusHostType.type);
+ host.getType().setHostAddress(hostAddress);
+ host.getType().setHostName(hostName);
+ ((GlobusHostType)host.getType()).setGlobusGateKeeperEndPointArray(new String[]{gramAddress});
+ ((GlobusHostType)host.getType()).setGridFTPEndPointArray(new String[]{gridftpAddress});
+ /*
+ * App
+ */
+ ApplicationDescription appDesc = new ApplicationDescription(HpcApplicationDeploymentType.type);
+ HpcApplicationDeploymentType app = (HpcApplicationDeploymentType)appDesc.getType();
+ ApplicationDeploymentDescriptionType.ApplicationName name = ApplicationDeploymentDescriptionType.ApplicationName.Factory.newInstance();
+ name.setStringValue("EchoLocal");
+ app.setApplicationName(name);
+ ProjectAccountType projectAccountType = app.addNewProjectAccount();
+ projectAccountType.setProjectAccountNumber("sds128");
+
+ QueueType queueType = app.addNewQueue();
+ queueType.setQueueName("development");
+
+ app.setCpuCount(1);
+ app.setJobType(JobTypeType.SERIAL);
+ app.setNodeCount(1);
+ app.setProcessorsPerNode(1);
+
+ /*
+ * Use bat file if it is compiled on Windows
+ */
+ app.setExecutableLocation("/bin/echo");
+
+ /*
+ * Default tmp location
+ */
+ String tempDir = "/scratch/01437/ogce/test/";
+ String date = (new Date()).toString();
+ date = date.replaceAll(" ", "_");
+ date = date.replaceAll(":", "_");
+
+ tempDir = tempDir + File.separator
+ + "SimpleEcho" + "_" + date + "_" + UUID.randomUUID();
+
+ System.out.println(tempDir);
+ app.setScratchWorkingDirectory(tempDir);
+ app.setStaticWorkingDirectory(tempDir);
+ app.setInputDataDirectory(tempDir + File.separator + "inputData");
+ app.setOutputDataDirectory(tempDir + File.separator + "outputData");
+ app.setStandardOutput(tempDir + File.separator + app.getApplicationName().getStringValue() + ".stdout");
+ app.setStandardError(tempDir + File.separator + app.getApplicationName().getStringValue() + ".stderr");
+
+
+ /*
+ * Service
+ */
+ ServiceDescription serv = new ServiceDescription();
+ serv.getType().setName("SimpleEcho");
+
+ List<InputParameterType> inputList = new ArrayList<InputParameterType>();
+
+ InputParameterType input = InputParameterType.Factory.newInstance();
+ input.setParameterName("echo_input");
+ input.setParameterType(StringParameterType.Factory.newInstance());
+ inputList.add(input);
+
+ InputParameterType input1 = InputParameterType.Factory.newInstance();
+ input.setParameterName("myinput");
+ URIParameterType uriType = URIParameterType.Factory.newInstance();
+ uriType.setValue("gsiftp://gridftp1.ls4.tacc.utexas.edu:2811//home1/01437/ogce/gram_20130215.log");
+ input.setParameterType(uriType);
+ inputList.add(input1);
+
+
+ InputParameterType[] inputParamList = inputList.toArray(new InputParameterType[inputList
+
+ .size()]);
+ List<OutputParameterType> outputList = new ArrayList<OutputParameterType>();
+ OutputParameterType output = OutputParameterType.Factory.newInstance();
+ output.setParameterName("echo_output");
+ output.setParameterType(StringParameterType.Factory.newInstance());
+ outputList.add(output);
+
+ OutputParameterType[] outputParamList = outputList
+ .toArray(new OutputParameterType[outputList.size()]);
+
+ serv.getType().setInputParametersArray(inputParamList);
+ serv.getType().setOutputParametersArray(outputParamList);
+
+ jobExecutionContext = new JobExecutionContext(gFacConfiguration,serv.getType().getName());
+ // Adding security context
+ jobExecutionContext.addSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT, getSecurityContext());
+ ApplicationContext applicationContext = new ApplicationContext();
+ jobExecutionContext.setApplicationContext(applicationContext);
+ applicationContext.setServiceDescription(serv);
+ applicationContext.setApplicationDeploymentDescription(appDesc);
+ applicationContext.setHostDescription(host);
+
+ MessageContext inMessage = new MessageContext();
+ ActualParameter echo_input = new ActualParameter();
+ ((StringParameterType)echo_input.getType()).setValue("echo_output=hello");
+ inMessage.addParameter("echo_input", echo_input);
+
+ // added extra
+ ActualParameter copy_input = new ActualParameter();
+ copy_input.getType().changeType(URIParameterType.type);
+ ((URIParameterType)copy_input.getType()).setValue("file:///tmp/tmpstrace");
+
+ ActualParameter outlocation = new ActualParameter();
+ ((StringParameterType)outlocation.getType()).setValue("./outputData/.");
+ inMessage.addParameter("copy_input", copy_input);
+ inMessage.addParameter("outputlocation", outlocation);
+
+ // added extra
+
+
+
+ jobExecutionContext.setInMessageContext(inMessage);
+
+ MessageContext outMessage = new MessageContext();
+ ActualParameter echo_out = new ActualParameter();
+// ((StringParameterType)echo_input.getType()).setValue("echo_output=hello");
+ outMessage.addParameter("echo_output", echo_out);
+
+ jobExecutionContext.setOutMessageContext(outMessage);
+
+ }
+
+ @Test
+ public void testGramProvider() throws GFacException {
+ GFacImpl gFacAPI = new GFacImpl();
+ gFacAPI.submitJob(jobExecutionContext);
+ MessageContext outMessageContext = jobExecutionContext.getOutMessageContext();
+ Assert.assertEquals(MappingFactory.toString((ActualParameter)outMessageContext.getParameter("echo_output")), "hello");
+ }
+
+ @Test
+ public void testGetJdbcUrl() {
+ System.out.println(getJDBCUrl());
+ }
+}
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-gram/src/test/resources/PBSTemplate.xslt
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gram/src/test/resources/PBSTemplate.xslt b/modules/gfac/gfac-gram/src/test/resources/PBSTemplate.xslt
new file mode 100644
index 0000000..e749e9c
--- /dev/null
+++ b/modules/gfac/gfac-gram/src/test/resources/PBSTemplate.xslt
@@ -0,0 +1,73 @@
+<!--Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under
+ the Apache License, Version 2.0 (theÏ "License"); you may not use this file except in compliance with the License. You may
+ obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to
+ in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+ ANY ~ KIND, either express or implied. See the License for the specific language governing permissions and limitations under
+ the License. -->
+<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform" xmlns:ns="http://airavata.apache.org/gsi/ssh/2012/12">
+<xsl:output method="text" />
+<xsl:template match="/ns:JobDescriptor">
+#! /bin/sh
+# PBS batch job script built by Globus job manager
+# <xsl:choose>
+ <xsl:when test="ns:shellName">
+##PBS -S <xsl:value-of select="ns:shellName"/>
+ </xsl:when></xsl:choose>
+ <xsl:choose>
+ <xsl:when test="ns:queueName">
+#PBS -q <xsl:value-of select="ns:queueName"/>
+ </xsl:when>
+ </xsl:choose>
+ <xsl:choose>
+ <xsl:when test="ns:mailOptions">
+#PBS -m <xsl:value-of select="ns:mailOptions"/>
+ </xsl:when>
+ </xsl:choose>
+ <xsl:choose>
+<xsl:when test="ns:acountString">
+#PBS -A <xsl:value-of select="ns:acountString"/>
+ </xsl:when>
+ </xsl:choose>
+ <xsl:choose>
+ <xsl:when test="ns:maxWallTime">
+#PBS -l walltime=<xsl:value-of select="ns:maxWallTime"/>
+ </xsl:when>
+ </xsl:choose>
+ <xsl:choose>
+ <xsl:when test="ns:standardOutFile">
+#PBS -o <xsl:value-of select="ns:standardOutFile"/>
+ </xsl:when>
+ </xsl:choose>
+ <xsl:choose>
+ <xsl:when test="ns:standardOutFile">
+#PBS -e <xsl:value-of select="ns:standardErrorFile"/>
+ </xsl:when>
+ </xsl:choose>
+ <xsl:choose>
+ <xsl:when test="(ns:nodes) and (ns:processesPerNode)">
+#PBS -l nodes=<xsl:value-of select="ns:nodes"/>:ppn=<xsl:value-of select="ns:processesPerNode"/>
+<xsl:text>
</xsl:text>
+ </xsl:when>
+ </xsl:choose>
+<xsl:for-each select="ns:exports/ns:name">
+<xsl:value-of select="."/>=<xsl:value-of select="./@value"/><xsl:text>
</xsl:text>
+export<xsl:text> </xsl:text><xsl:value-of select="."/>
+<xsl:text>
</xsl:text>
+</xsl:for-each>
+<xsl:for-each select="ns:preJobCommands/ns:command">
+ <xsl:value-of select="."/><xsl:text> </xsl:text>
+ </xsl:for-each>
+cd <xsl:text> </xsl:text><xsl:value-of select="ns:workingDirectory"/><xsl:text>
</xsl:text>
+ <xsl:choose><xsl:when test="ns:jobSubmitterCommand">
+<xsl:value-of select="ns:jobSubmitterCommand"/><xsl:text> </xsl:text></xsl:when></xsl:choose><xsl:value-of select="ns:executablePath"/><xsl:text> </xsl:text>
+<xsl:for-each select="ns:inputs/ns:input">
+ <xsl:value-of select="."/><xsl:text> </xsl:text>
+ </xsl:for-each>
+<xsl:for-each select="ns:postJobCommands/ns:command">
+ <xsl:value-of select="."/><xsl:text> </xsl:text>
+</xsl:for-each>
+
+</xsl:template>
+
+</xsl:stylesheet>
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-gram/src/test/resources/gfac-config.xml
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gram/src/test/resources/gfac-config.xml b/modules/gfac/gfac-gram/src/test/resources/gfac-config.xml
new file mode 100644
index 0000000..85d148d
--- /dev/null
+++ b/modules/gfac/gfac-gram/src/test/resources/gfac-config.xml
@@ -0,0 +1,33 @@
+<!-- ~ Licensed to the Apache Software Foundation (ASF) under one ~ or more
+ contributor license agreements. See the NOTICE file ~ distributed with this
+ work for additional information ~ regarding copyright ownership. The ASF
+ licenses this file ~ to you under the Apache License, Version 2.0 (the ~
+ "License"); you may not use this file except in compliance ~ with the License.
+ You may obtain a copy of the License at ~ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~ ~ Unless required by applicable law or agreed to in writing, ~ software
+ distributed under the License is distributed on an ~ "AS IS" BASIS, WITHOUT
+ WARRANTIES OR CONDITIONS OF ANY ~ KIND, either express or implied. See the
+ License for the ~ specific language governing permissions and limitations
+ ~ under the License. -->
+
+<GFac>
+ <GlobalHandlers>
+ <InHandlers>
+ <Handler class="org.apache.airavata.gfac.handler.AppDescriptorCheckHandler">
+ <property name="name" value="value"/>
+ </Handler>
+ </InHandlers>
+ <OutHandlers></OutHandlers>
+ </GlobalHandlers>
+
+
+ <Provider class="org.apache.airavata.gfac.provider.impl.SSHProvider" host="org.apache.airavata.schemas.gfac.impl.SSHHostTypeImpl">
+ <InHandlers>
+ <Handler class="org.apache.airavata.gfac.handler.SSHDirectorySetupHandler"/>
+ <Handler class="org.apache.airavata.gfac.handler.SSHInputHandler"/>
+ </InHandlers>
+ <OutHandlers>
+ <Handler class="org.apache.airavata.gfac.handler.SSHOutputHandler"/>
+ </OutHandlers>
+ </Provider>
+</GFac>
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-gram/src/test/resources/logging.properties
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gram/src/test/resources/logging.properties b/modules/gfac/gfac-gram/src/test/resources/logging.properties
new file mode 100644
index 0000000..0584d38
--- /dev/null
+++ b/modules/gfac/gfac-gram/src/test/resources/logging.properties
@@ -0,0 +1,42 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+#default/fallback log4j configuration
+#
+
+# Set root logger level to WARN and its only appender to A1.
+log4j.rootLogger=INFO, A1, A2
+
+# A1 is set to be a rolling file appender with default params
+log4j.appender.A1=org.apache.log4j.RollingFileAppender
+log4j.appender.A1.File=target/seclogs.txt
+
+# A1 uses PatternLayout.
+log4j.appender.A1.layout=org.apache.log4j.PatternLayout
+log4j.appender.A1.layout.ConversionPattern=%d [%t] %-5p %c %x - %m%n
+
+# A2 is a console appender
+log4j.appender.A2=org.apache.log4j.ConsoleAppender
+
+# A2 uses PatternLayout.
+log4j.appender.A2.layout=org.apache.log4j.PatternLayout
+log4j.appender.A2.layout.ConversionPattern=%d [%t] %-5p %c{1} %x - %m%n
+
+log4j.logger.unicore.security=INFO
+
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-gsissh/pom.xml
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gsissh/pom.xml b/modules/gfac/gfac-gsissh/pom.xml
index 2f67b76..7360743 100644
--- a/modules/gfac/gfac-gsissh/pom.xml
+++ b/modules/gfac/gfac-gsissh/pom.xml
@@ -105,28 +105,4 @@
</dependency>
</dependencies>
-
- <build>
- <plugins>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-dependency-plugin</artifactId>
- <executions>
- <execution>
- <id>copy-dependencies</id>
- <phase>package</phase>
- <goals>
- <goal>copy-dependencies</goal>
- </goals>
- <configuration>
- <outputDirectory>target/lib</outputDirectory>
- <overWriteReleases>false</overWriteReleases>
- <overWriteSnapshots>true</overWriteSnapshots>
- </configuration>
- </execution>
- </executions>
- </plugin>
- </plugins>
- </build>
-
</project>
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-ssh/pom.xml
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ssh/pom.xml b/modules/gfac/gfac-ssh/pom.xml
index d3df765..c463661 100644
--- a/modules/gfac/gfac-ssh/pom.xml
+++ b/modules/gfac/gfac-ssh/pom.xml
@@ -106,27 +106,4 @@
</dependencies>
- <build>
- <plugins>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-dependency-plugin</artifactId>
- <executions>
- <execution>
- <id>copy-dependencies</id>
- <phase>package</phase>
- <goals>
- <goal>copy-dependencies</goal>
- </goals>
- <configuration>
- <outputDirectory>target/lib</outputDirectory>
- <overWriteReleases>false</overWriteReleases>
- <overWriteSnapshots>true</overWriteSnapshots>
- </configuration>
- </execution>
- </executions>
- </plugin>
- </plugins>
- </build>
-
</project>
[08/11] creating gfac-bes and gfac-gram out from gfac-core
Posted by la...@apache.org.
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/URIUtils.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/URIUtils.java b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/URIUtils.java
new file mode 100644
index 0000000..34d0da1
--- /dev/null
+++ b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/URIUtils.java
@@ -0,0 +1,119 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+
+package org.apache.airavata.gfac.utils;
+
+import org.apache.commons.httpclient.URI;
+import org.apache.commons.httpclient.URIException;
+import org.apache.commons.httpclient.util.URIUtil;
+
+import java.net.URISyntaxException;
+
+public class URIUtils {
+
+ public static String encodeAll(String uri) throws URIException
+ {
+ String result = encodeAuthority(uri);
+ result = encodePath(uri);
+ result = encodeQuery(result );
+ result = encodeFragment(result );
+ return result;
+ }
+
+ public static String encodeAuthority(String uri) throws URIException
+ {
+ int start = uri.indexOf("//");
+ if(start == -1) return uri;
+ start++;
+ int end = uri.indexOf("/",start+1);
+ if(end == -1) end = uri.indexOf("?",start+1);
+ if(end == -1) end = uri.indexOf("#",start+1);
+ if(end == -1) end = uri.length();
+ String before = uri.substring(0, start+1);
+ String authority= uri.substring(start+1,end);
+ String after = uri.substring(end);
+ authority = URIUtil.encode(authority, URI.allowed_authority);
+
+ return before+authority+after;
+ }
+
+ public static String encodePath(String uri) throws URIException
+ {
+ int doubleSlashIndex = uri.indexOf("//");
+ boolean hasAuthority = doubleSlashIndex >= 0;
+ int start = -1;
+ if(hasAuthority)
+ {
+ start = uri.indexOf("/",doubleSlashIndex+2);
+ }
+ else
+ {
+ start = uri.indexOf(":");
+ }
+ if(start == -1) return uri;
+
+ int end = uri.indexOf("?",start+1);
+ if(end == -1) end = uri.indexOf("#",start+1);
+ if(end == -1) end = uri.length();
+ String before = uri.substring(0, start+1);
+ String path= uri.substring(start+1,end);
+ String after = uri.substring(end);
+ path = URIUtil.encode(path, URI.allowed_abs_path);
+ return before+path+after;
+ }
+
+
+ public static String encodeQuery(String uri) throws URIException
+ {
+ int queryStart = uri.indexOf("?");
+ if(queryStart == -1) return uri;
+ int queryEnd = uri.indexOf("#");
+ if(queryEnd == -1) queryEnd = uri.length();
+
+ String beforeQuery = uri.substring(0, queryStart+1);
+ String query = uri.substring(queryStart+1,queryEnd);
+ String afterQuery = uri.substring(queryEnd);
+ query = URIUtil.encode(query, URI.allowed_query);
+ return beforeQuery+query+afterQuery;
+ }
+
+
+ public static String encodeFragment(String uri) throws URIException
+ {
+ int fragmentStart = uri.indexOf("#");
+ if(fragmentStart == -1) return uri;
+
+ String beforeFragment = uri.substring(0, fragmentStart+1);
+ String fragment = uri.substring(fragmentStart+1);
+ fragment = URIUtil.encode(fragment, URI.allowed_fragment);
+ return beforeFragment+fragment;
+ }
+ public static java.net.URI createGsiftpURI(String host, String localPath) throws URISyntaxException {
+ StringBuffer buf = new StringBuffer();
+ if (!host.startsWith("gsiftp://"))
+ buf.append("gsiftp://");
+ buf.append(host);
+ if (!host.endsWith("/"))
+ buf.append("/");
+ buf.append(localPath);
+ return new java.net.URI(buf.toString());
+ }
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-bes/src/main/resources/errors.properties
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-bes/src/main/resources/errors.properties b/modules/gfac/gfac-bes/src/main/resources/errors.properties
new file mode 100644
index 0000000..88c41b8
--- /dev/null
+++ b/modules/gfac/gfac-bes/src/main/resources/errors.properties
@@ -0,0 +1,197 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+# Directly copied from jglobus. Not a good way to manager error properties.
+1 = Parameter not supported
+2 = The RSL length is greater than the maximum allowed
+3 = No resources available
+4 = Bad directory specified
+5 = The executable does not exist
+6 = Insufficient funds
+7 = Authentication with the remote server failed
+8 = Job cancelled by user
+9 = Job cancelled by system
+
+10 = Data transfer to the server failed
+11 = The stdin file does not exist
+12 = The connection to the server failed (check host and port)
+13 = The provided RSL 'maxtime' value is invalid (not an integer or must be greater than 0)
+14 = The provided RSL 'count' value is invalid (not an integer or must be greater than 0)
+15 = The job manager received an invalid RSL
+16 = Could not connect to job manager
+17 = The job failed when the job manager attempted to run it
+18 = Paradyn error
+19 = The provided RSL 'jobtype' value is invalid
+
+20 = The provided RSL 'myjob' value is invalid
+21 = The job manager failed to locate an internal script argument file
+22 = The job manager failed to create an internal script argument file
+23 = The job manager detected an invalid job state
+24 = The job manager detected an invalid script response
+25 = The job manager detected an invalid job state
+26 = The provided RSL 'jobtype' value is not supported by this job manager
+27 = Unimplemented
+28 = The job manager failed to create an internal script submission file
+29 = The job manager cannot find the user proxy
+
+30 = The job manager failed to open the user proxy
+31 = The job manager failed to cancel the job as requested
+32 = System memory allocation failed
+33 = The interprocess job communication initialization failed
+34 = The interprocess job communication setup failed
+35 = The provided RSL 'host count' value is invalid
+36 = One of the provided RSL parameters is unsupported
+37 = The provided RSL 'queue' parameter is invalid
+38 = The provided RSL 'project' parameter is invalid
+39 = The provided RSL string includes variables that could not be identified
+
+40 = The provided RSL 'environment' parameter is invalid
+41 = The provided RSL 'dryrun' parameter is invalid
+42 = The provided RSL is invalid (an empty string)
+43 = The job manager failed to stage the executable
+44 = The job manager failed to stage the stdin file
+45 = The requested job manager type is invalid
+46 = The provided RSL 'arguments' parameter is invalid
+47 = The gatekeeper failed to run the job manager
+48 = The provided RSL could not be properly parsed
+49 = There is a version mismatch between GRAM components
+
+50 = The provided RSL 'arguments' parameter is invalid
+51 = The provided RSL 'count' parameter is invalid
+52 = The provided RSL 'directory' parameter is invalid
+53 = The provided RSL 'dryrun' parameter is invalid
+54 = The provided RSL 'environment' parameter is invalid
+55 = The provided RSL 'executable' parameter is invalid
+56 = The provided RSL 'host_count' parameter is invalid
+57 = The provided RSL 'jobtype' parameter is invalid
+58 = The provided RSL 'maxtime' parameter is invalid
+59 = The provided RSL 'myjob' parameter is invalid
+
+60 = The provided RSL 'paradyn' parameter is invalid
+61 = The provided RSL 'project' parameter is invalid
+62 = The provided RSL 'queue' parameter is invalid
+63 = The provided RSL 'stderr' parameter is invalid
+64 = The provided RSL 'stdin' parameter is invalid
+65 = The provided RSL 'stdout' parameter is invalid
+66 = The job manager failed to locate an internal script
+67 = The job manager failed on the system call pipe()
+68 = The job manager failed on the system call fcntl()
+69 = The job manager failed to create the temporary stdout filename
+
+70 = The job manager failed to create the temporary stderr filename
+71 = The job manager failed on the system call fork()
+72 = The executable file permissions do not allow execution
+73 = The job manager failed to open stdout
+74 = The job manager failed to open stderr
+75 = The cache file could not be opened in order to relocate the user proxy
+76 = Cannot access cache files in ~/.globus/.gass_cache, check permissions, quota, and disk space
+77 = The job manager failed to insert the contact in the client contact list
+78 = The contact was not found in the job manager's client contact list
+79 = Connecting to the job manager failed. Possible reasons: job terminated, invalid job contact, network problems, ...
+
+80 = The syntax of the job contact is invalid
+81 = The executable parameter in the RSL is undefined
+82 = The job manager service is misconfigured. condor arch undefined
+83 = The job manager service is misconfigured. condor os undefined
+84 = The provided RSL 'min_memory' parameter is invalid
+85 = The provided RSL 'max_memory' parameter is invalid
+86 = The RSL 'min_memory' value is not zero or greater
+87 = The RSL 'max_memory' value is not zero or greater
+88 = The creation of a HTTP message failed
+89 = Parsing incoming HTTP message failed
+
+90 = The packing of information into a HTTP message failed
+91 = An incoming HTTP message did not contain the expected information
+92 = The job manager does not support the service that the client requested
+93 = The gatekeeper failed to find the requested service
+94 = The jobmanager does not accept any new requests (shutting down)
+95 = The client failed to close the listener associated with the callback URL
+96 = The gatekeeper contact cannot be parsed
+97 = The job manager could not find the 'poe' command
+98 = The job manager could not find the 'mpirun' command
+99 = The provided RSL 'start_time' parameter is invalid"
+100 = The provided RSL 'reservation_handle' parameter is invalid
+
+101 = The provided RSL 'max_wall_time' parameter is invalid
+102 = The RSL 'max_wall_time' value is not zero or greater
+103 = The provided RSL 'max_cpu_time' parameter is invalid
+104 = The RSL 'max_cpu_time' value is not zero or greater
+105 = The job manager is misconfigured, a scheduler script is missing
+106 = The job manager is misconfigured, a scheduler script has invalid permissions
+107 = The job manager failed to signal the job
+108 = The job manager did not recognize/support the signal type
+109 = The job manager failed to get the job id from the local scheduler
+
+110 = The job manager is waiting for a commit signal
+111 = The job manager timed out while waiting for a commit signal
+112 = The provided RSL 'save_state' parameter is invalid
+113 = The provided RSL 'restart' parameter is invalid
+114 = The provided RSL 'two_phase' parameter is invalid
+115 = The RSL 'two_phase' value is not zero or greater
+116 = The provided RSL 'stdout_position' parameter is invalid
+117 = The RSL 'stdout_position' value is not zero or greater
+118 = The provided RSL 'stderr_position' parameter is invalid
+119 = The RSL 'stderr_position' value is not zero or greater
+
+120 = The job manager restart attempt failed
+121 = The job state file doesn't exist
+122 = Could not read the job state file
+123 = Could not write the job state file
+124 = The old job manager is still alive
+125 = The job manager state file TTL expired
+126 = It is unknown if the job was submitted
+127 = The provided RSL 'remote_io_url' parameter is invalid
+128 = Could not write the remote io url file
+129 = The standard output/error size is different
+
+130 = The job manager was sent a stop signal (job is still running)
+131 = The user proxy expired (job is still running)
+132 = The job was not submitted by original jobmanager
+133 = The job manager is not waiting for that commit signal
+134 = The provided RSL scheduler specific parameter is invalid
+135 = The job manager could not stage in a file
+136 = The scratch directory could not be created
+137 = The provided 'gass_cache' parameter is invalid
+138 = The RSL contains attributes which are not valid for job submission
+139 = The RSL contains attributes which are not valid for stdio update
+
+140 = The RSL contains attributes which are not valid for job restart
+141 = The provided RSL 'file_stage_in' parameter is invalid
+142 = The provided RSL 'file_stage_in_shared' parameter is invalid
+143 = The provided RSL 'file_stage_out' parameter is invalid
+144 = The provided RSL 'gass_cache' parameter is invalid
+145 = The provided RSL 'file_cleanup' parameter is invalid
+146 = The provided RSL 'scratch_dir' parameter is invalid
+147 = The provided scheduler-specific RSL parameter is invalid
+148 = A required RSL attribute was not defined in the RSL spec
+149 = The gass_cache attribute points to an invalid cache directory
+
+150 = The provided RSL 'save_state' parameter has an invalid value
+151 = The job manager could not open the RSL attribute validation file
+152 = The job manager could not read the RSL attribute validation file
+153 = The provided RSL 'proxy_timeout' is invalid
+154 = The RSL 'proxy_timeout' value is not greater than zero
+155 = The job manager could not stage out a file
+156 = The job contact string does not match any which the job manager is handling
+157 = Proxy delegation failed
+158 = The job manager could not lock the state lock file
+
+1000 = Failed to start up callback handler
+1003 = Job contact not set
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-bes/src/main/resources/service.properties
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-bes/src/main/resources/service.properties b/modules/gfac/gfac-bes/src/main/resources/service.properties
new file mode 100644
index 0000000..391bfea
--- /dev/null
+++ b/modules/gfac/gfac-bes/src/main/resources/service.properties
@@ -0,0 +1,58 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+
+#
+# Class which implemented Scheduler interface. It will be used to determine a Provider
+#
+scheduler.class= org.apache.airavata.core.gfac.scheduler.impl.SchedulerImpl
+
+#
+# Data Service Plugins classes
+#
+datachain.classes= org.apache.airavata.core.gfac.extension.data.RegistryDataService
+
+#
+# Pre execution Plugins classes. For example, GridFTP Input Staging
+#
+prechain.classes= org.apache.airavata.core.gfac.extension.pre.GridFtpInputStaging
+prechain.classes= org.apache.airavata.core.gfac.extension.pre.HttpInputStaging
+
+#
+# Post execution Plugins classes. For example, GridFTP Output Staging
+#
+postchain.classes= org.apache.airavata.core.gfac.extension.post.GridFtpOutputStaging
+postchain.classes= org.apache.airavata.core.gfac.extension.post.OutputRegister
+
+#
+# SSH private key location. It will be used by SSHProvider
+#
+# ssh.key=/home/user/.ssh/id_rsa
+# ssh.keypass=
+# ssh.username=usernameAtHost
+
+#
+# MyProxy credential. It will be used by GridFTP Plugins and GramProvider.
+#
+# myproxy.server=myproxy.teragrid.org
+# myproxy.user=username
+# myproxy.pass=password
+# myproxy.life=3600
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-bes/src/test/java/org/apache/airavata/core/gfac/services/impl/JSDLGeneratorTestWithMyProxyAuth.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-bes/src/test/java/org/apache/airavata/core/gfac/services/impl/JSDLGeneratorTestWithMyProxyAuth.java b/modules/gfac/gfac-bes/src/test/java/org/apache/airavata/core/gfac/services/impl/JSDLGeneratorTestWithMyProxyAuth.java
new file mode 100644
index 0000000..d048f67
--- /dev/null
+++ b/modules/gfac/gfac-bes/src/test/java/org/apache/airavata/core/gfac/services/impl/JSDLGeneratorTestWithMyProxyAuth.java
@@ -0,0 +1,318 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+
+package org.apache.airavata.core.gfac.services.impl;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+import java.io.File;
+import java.net.URL;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.List;
+import java.util.UUID;
+
+import org.apache.airavata.commons.gfac.type.ActualParameter;
+import org.apache.airavata.commons.gfac.type.ApplicationDescription;
+import org.apache.airavata.commons.gfac.type.HostDescription;
+import org.apache.airavata.commons.gfac.type.ServiceDescription;
+import org.apache.airavata.gfac.GFacConfiguration;
+import org.apache.airavata.gfac.context.ApplicationContext;
+import org.apache.airavata.gfac.context.JobExecutionContext;
+import org.apache.airavata.gfac.context.MessageContext;
+import org.apache.airavata.gfac.utils.JSDLGenerator;
+import org.apache.airavata.gfac.utils.JSDLUtils;
+import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
+import org.apache.airavata.schemas.gfac.HpcApplicationDeploymentType;
+import org.apache.airavata.schemas.gfac.InputParameterType;
+import org.apache.airavata.schemas.gfac.JobTypeType;
+import org.apache.airavata.schemas.gfac.OutputParameterType;
+import org.apache.airavata.schemas.gfac.ProjectAccountType;
+import org.apache.airavata.schemas.gfac.QueueType;
+import org.apache.airavata.schemas.gfac.StringParameterType;
+import org.apache.airavata.schemas.gfac.URIParameterType;
+import org.apache.airavata.schemas.gfac.UnicoreHostType;
+import org.apache.log4j.PropertyConfigurator;
+import org.ggf.schemas.jsdl.x2005.x11.jsdl.JobDefinitionDocument;
+import org.junit.Before;
+import org.junit.Test;
+
+//public class JSDLGeneratorTestWithMyProxyAuth {
+//
+// public static final String[] hostArray = new String[] { "https://zam1161v01.zam.kfa-juelich.de:8002/INTEROP1/services/BESFactory?res=default_bes_factory" };
+// public static final String gridftpAddress = "gsiftp://gridftp.blacklight.psc.teragrid.org:2811";
+// public static final String hostAddress = "zam1161v01.zam.kfa-juelich.de";
+// public static final String hostName = "DEMO-INTEROP-SITE";
+// public static final String scratchDir = "/scratch/msmemon/airavata";
+//
+// protected JobExecutionContext jobExecutionContext;
+//
+//
+// @Test
+// public void testSerialJSDLWithStdout() throws Exception{
+//
+// JobTypeType jobType = JobTypeType.Factory.newInstance();
+// jobType.set(JobTypeType.SERIAL);
+// ApplicationContext appContext = getApplicationContext();
+// appContext.setApplicationDeploymentDescription(getApplicationDesc(jobType, true));
+// jobExecutionContext.setApplicationContext(appContext);
+//
+// JobDefinitionDocument jobDefDoc = JSDLGenerator.buildJSDLInstance(jobExecutionContext);
+//
+// assertTrue (jobDefDoc.getJobDefinition().getJobDescription().getApplication().toString().contains("/bin/cat"));
+// assertTrue(jobDefDoc.getJobDefinition().getJobDescription().getDataStagingArray().length > 2);
+//
+// assertTrue(jobDefDoc.getJobDefinition().getJobDescription().getJobIdentification().getJobProjectArray().length > 0);
+//
+// assertFalse(JSDLUtils.getPOSIXApplication(jobDefDoc.getJobDefinition())==null);
+//
+// assertEquals("jsdl_stdout", JSDLUtils.getOrCreatePOSIXApplication(jobDefDoc.getJobDefinition()).getOutput().getStringValue().toString());
+//
+// }
+//
+// @Test
+// public void testSerialJSDLWithoutStdout() throws Exception{
+//
+// JobTypeType jobType = JobTypeType.Factory.newInstance();
+// jobType.set(JobTypeType.SERIAL);
+// ApplicationContext appContext = getApplicationContext();
+// appContext.setApplicationDeploymentDescription(getApplicationDesc(jobType, false));
+// jobExecutionContext.setApplicationContext(appContext);
+//
+// JobDefinitionDocument jobDefDoc = JSDLGenerator.buildJSDLInstance(jobExecutionContext);
+//
+// assertTrue (jobDefDoc.getJobDefinition().getJobDescription().getApplication().toString().contains("/bin/cat"));
+// assertTrue(jobDefDoc.getJobDefinition().getJobDescription().getDataStagingArray().length > 2);
+//
+// assertTrue(jobDefDoc.getJobDefinition().getJobDescription().getJobIdentification().getJobProjectArray().length > 0);
+//
+// assertFalse(JSDLUtils.getPOSIXApplication(jobDefDoc.getJobDefinition())==null);
+//
+// assertEquals("stdout", JSDLUtils.getOrCreatePOSIXApplication(jobDefDoc.getJobDefinition()).getOutput().getStringValue().toString());
+// assertEquals("stderr", JSDLUtils.getOrCreatePOSIXApplication(jobDefDoc.getJobDefinition()).getError().getStringValue().toString());
+//
+// }
+//
+//
+// @Test
+// public void testMPIJSDL() throws Exception{
+//
+// JobTypeType jobType = JobTypeType.Factory.newInstance();
+// jobType.set(JobTypeType.MPI);
+// ApplicationContext appContext = getApplicationContext();
+// appContext.setApplicationDeploymentDescription(getApplicationDesc(jobType, true));
+// jobExecutionContext.setApplicationContext(appContext);
+//
+// JobDefinitionDocument jobDefDoc = JSDLGenerator.buildJSDLInstance(jobExecutionContext);
+//
+// assertTrue (jobDefDoc.getJobDefinition().getJobDescription().getApplication().toString().contains("/bin/cat"));
+// assertTrue(jobDefDoc.getJobDefinition().getJobDescription().getDataStagingArray().length > 2);
+//
+// assertTrue(jobDefDoc.getJobDefinition().getJobDescription().getJobIdentification().getJobProjectArray().length > 0);
+//
+// assertEquals("jsdl_stdout", JSDLUtils.getOrCreateSPMDApplication(jobDefDoc.getJobDefinition()).getOutput().getStringValue().toString());
+//
+// assertFalse(JSDLUtils.getSPMDApplication(jobDefDoc.getJobDefinition())==null);
+//
+//
+// }
+//
+// protected GFacConfiguration getGFACConfig() throws Exception{
+// URL resource = BESProviderTestWithMyProxyAuth.class.getClassLoader().getResource(org.apache.airavata.common.utils.Constants.GFAC_CONFIG_XML);
+// System.out.println(resource.getFile());
+// GFacConfiguration gFacConfiguration = GFacConfiguration.create(new File(resource.getPath()),null,null);
+// return gFacConfiguration;
+// }
+//
+//
+// protected ApplicationContext getApplicationContext() {
+// ApplicationContext applicationContext = new ApplicationContext();
+// applicationContext.setHostDescription(getHostDesc());
+//
+// applicationContext.setServiceDescription(getServiceDesc());
+// return applicationContext;
+// }
+//
+// protected ApplicationDescription getApplicationDesc(JobTypeType jobType, boolean setOuput) {
+// ApplicationDescription appDesc = new ApplicationDescription(
+// HpcApplicationDeploymentType.type);
+// HpcApplicationDeploymentType app = (HpcApplicationDeploymentType) appDesc
+// .getType();
+// ApplicationDeploymentDescriptionType.ApplicationName name = ApplicationDeploymentDescriptionType.ApplicationName.Factory
+// .newInstance();
+// name.setStringValue("EchoLocal");
+// app.setApplicationName(name);
+// ProjectAccountType projectAccountType = app.addNewProjectAccount();
+// projectAccountType.setProjectAccountNumber("TG-AST110064");
+//
+// QueueType queueType = app.addNewQueue();
+// queueType.setQueueName("development");
+//
+// app.setCpuCount(1);
+// // TODO: also handle parallel jobs
+// if((jobType.enumValue() == JobTypeType.SERIAL) || (jobType.enumValue() == JobTypeType.SINGLE)) {
+// app.setJobType(JobTypeType.SERIAL);
+// }
+// else if (jobType.enumValue() == JobTypeType.MPI) {
+// app.setJobType(JobTypeType.MPI);
+// }
+// else {
+// app.setJobType(JobTypeType.OPEN_MP);
+// }
+//
+// app.setNodeCount(1);
+// app.setProcessorsPerNode(1);
+//
+// /*
+// * Use bat file if it is compiled on Windows
+// */
+// app.setExecutableLocation("/bin/cat");
+//
+// /*
+// * Default tmp location
+// */
+// String date = (new Date()).toString();
+// date = date.replaceAll(" ", "_");
+// date = date.replaceAll(":", "_");
+//
+// String remoteTempDir = scratchDir + File.separator + "SimpleEcho" + "_" + date + "_"
+// + UUID.randomUUID();
+//
+// System.out.println(remoteTempDir);
+//
+// // no need of these parameters, as unicore manages by itself
+// app.setScratchWorkingDirectory(remoteTempDir);
+// app.setStaticWorkingDirectory(remoteTempDir);
+// app.setInputDataDirectory(remoteTempDir + File.separator + "inputData");
+// app.setOutputDataDirectory(remoteTempDir + File.separator + "outputData");
+//
+// if(setOuput) {
+// app.setStandardOutput(app.getOutputDataDirectory()+"/jsdl_stdout");
+// app.setStandardError(app.getOutputDataDirectory()+"/jsdl_stderr");
+// }
+// return appDesc;
+// }
+//
+// protected HostDescription getHostDesc() {
+// HostDescription host = new HostDescription(UnicoreHostType.type);
+// host.getType().setHostAddress(hostAddress);
+// host.getType().setHostName(hostName);
+// ((UnicoreHostType) host.getType()).setUnicoreBESEndPointArray(hostArray);
+// ((UnicoreHostType) host.getType()).setGridFTPEndPointArray(new String[]{gridftpAddress});
+// return host;
+// }
+//
+// protected ServiceDescription getServiceDesc() {
+// ServiceDescription serv = new ServiceDescription();
+// serv.getType().setName("SimpleCat");
+//
+// List<InputParameterType> inputList = new ArrayList<InputParameterType>();
+// InputParameterType input = InputParameterType.Factory.newInstance();
+// input.setParameterName("echo_input");
+// input.setParameterType(StringParameterType.Factory.newInstance());
+// inputList.add(input);
+// InputParameterType[] inputParamList = inputList
+// .toArray(new InputParameterType[inputList.size()]);
+//
+// List<OutputParameterType> outputList = new ArrayList<OutputParameterType>();
+// OutputParameterType output = OutputParameterType.Factory.newInstance();
+// output.setParameterName("echo_output");
+// output.setParameterType(StringParameterType.Factory.newInstance());
+// outputList.add(output);
+// OutputParameterType[] outputParamList = outputList
+// .toArray(new OutputParameterType[outputList.size()]);
+//
+// serv.getType().setInputParametersArray(inputParamList);
+// serv.getType().setOutputParametersArray(outputParamList);
+//
+//
+// return serv;
+// }
+//
+// protected MessageContext getInMessageContext() {
+// MessageContext inMessage = new MessageContext();
+//
+// ActualParameter i1 = new ActualParameter();
+// i1.getType().changeType(URIParameterType.type);
+// ((URIParameterType)i1.getType()).setValue("file:///tmp/ifile1");
+// inMessage.addParameter("i1", i1);
+//
+// ActualParameter i2 = new ActualParameter();
+// i2.getType().changeType(URIParameterType.type);
+// ((URIParameterType)i2.getType()).setValue("file:///tmp/ifile2");
+// inMessage.addParameter("i2", i2);
+//
+// ActualParameter i3 = new ActualParameter();
+// i2.getType().changeType(URIParameterType.type);
+// ((URIParameterType)i2.getType()).setValue("///tmp/ifile2");
+// inMessage.addParameter("i3", i2);
+//
+// ActualParameter simpleArg = new ActualParameter();
+// simpleArg.getType().changeType(StringParameterType.type);
+// ((StringParameterType)simpleArg.getType()).setValue("argument1");
+// inMessage.addParameter("a1", simpleArg);
+//
+// ActualParameter nameValueArg = new ActualParameter();
+// nameValueArg.getType().changeType(StringParameterType.type);
+// ((StringParameterType)nameValueArg.getType()).setValue("name1=value1");
+// inMessage.addParameter("nameValueArg", nameValueArg);
+//
+// ActualParameter echo_input = new ActualParameter();
+// ((StringParameterType) echo_input.getType())
+// .setValue("echo_output=hello");
+// inMessage.addParameter("echo_input", echo_input);
+//
+// return inMessage;
+// }
+//
+// protected MessageContext getOutMessageContext() {
+// MessageContext om1 = new MessageContext();
+//
+// // TODO: Aint the output parameters are only the name of the files staged out to the gridftp endpoint?
+// ActualParameter o1 = new ActualParameter();
+// ((StringParameterType) o1.getType())
+// .setValue("tempfile");
+// om1.addParameter("o1", o1);
+//
+// ActualParameter o2 = new ActualParameter();
+// o2.getType().changeType(URIParameterType.type);
+//
+// ((URIParameterType)o2.getType()).setValue("http://path/to/upload");
+// om1.addParameter("o2", o2);
+//
+//
+//
+// return om1;
+// }
+//
+// @Before
+// public void initJobContext() throws Exception {
+// PropertyConfigurator.configure("src/test/resources/logging.properties");
+// jobExecutionContext = new JobExecutionContext(getGFACConfig(), getServiceDesc().getType().getName());
+// jobExecutionContext.setApplicationContext(getApplicationContext());
+// jobExecutionContext.setInMessageContext(getInMessageContext());
+// jobExecutionContext.setOutMessageContext(getOutMessageContext());
+// }
+//
+//
+//}
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-bes/src/test/resources/PBSTemplate.xslt
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-bes/src/test/resources/PBSTemplate.xslt b/modules/gfac/gfac-bes/src/test/resources/PBSTemplate.xslt
new file mode 100644
index 0000000..e749e9c
--- /dev/null
+++ b/modules/gfac/gfac-bes/src/test/resources/PBSTemplate.xslt
@@ -0,0 +1,73 @@
+<!--Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under
+ the Apache License, Version 2.0 (theÏ "License"); you may not use this file except in compliance with the License. You may
+ obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to
+ in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+ ANY ~ KIND, either express or implied. See the License for the specific language governing permissions and limitations under
+ the License. -->
+<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform" xmlns:ns="http://airavata.apache.org/gsi/ssh/2012/12">
+<xsl:output method="text" />
+<xsl:template match="/ns:JobDescriptor">
+#! /bin/sh
+# PBS batch job script built by Globus job manager
+# <xsl:choose>
+ <xsl:when test="ns:shellName">
+##PBS -S <xsl:value-of select="ns:shellName"/>
+ </xsl:when></xsl:choose>
+ <xsl:choose>
+ <xsl:when test="ns:queueName">
+#PBS -q <xsl:value-of select="ns:queueName"/>
+ </xsl:when>
+ </xsl:choose>
+ <xsl:choose>
+ <xsl:when test="ns:mailOptions">
+#PBS -m <xsl:value-of select="ns:mailOptions"/>
+ </xsl:when>
+ </xsl:choose>
+ <xsl:choose>
+<xsl:when test="ns:acountString">
+#PBS -A <xsl:value-of select="ns:acountString"/>
+ </xsl:when>
+ </xsl:choose>
+ <xsl:choose>
+ <xsl:when test="ns:maxWallTime">
+#PBS -l walltime=<xsl:value-of select="ns:maxWallTime"/>
+ </xsl:when>
+ </xsl:choose>
+ <xsl:choose>
+ <xsl:when test="ns:standardOutFile">
+#PBS -o <xsl:value-of select="ns:standardOutFile"/>
+ </xsl:when>
+ </xsl:choose>
+ <xsl:choose>
+ <xsl:when test="ns:standardOutFile">
+#PBS -e <xsl:value-of select="ns:standardErrorFile"/>
+ </xsl:when>
+ </xsl:choose>
+ <xsl:choose>
+ <xsl:when test="(ns:nodes) and (ns:processesPerNode)">
+#PBS -l nodes=<xsl:value-of select="ns:nodes"/>:ppn=<xsl:value-of select="ns:processesPerNode"/>
+<xsl:text>
</xsl:text>
+ </xsl:when>
+ </xsl:choose>
+<xsl:for-each select="ns:exports/ns:name">
+<xsl:value-of select="."/>=<xsl:value-of select="./@value"/><xsl:text>
</xsl:text>
+export<xsl:text> </xsl:text><xsl:value-of select="."/>
+<xsl:text>
</xsl:text>
+</xsl:for-each>
+<xsl:for-each select="ns:preJobCommands/ns:command">
+ <xsl:value-of select="."/><xsl:text> </xsl:text>
+ </xsl:for-each>
+cd <xsl:text> </xsl:text><xsl:value-of select="ns:workingDirectory"/><xsl:text>
</xsl:text>
+ <xsl:choose><xsl:when test="ns:jobSubmitterCommand">
+<xsl:value-of select="ns:jobSubmitterCommand"/><xsl:text> </xsl:text></xsl:when></xsl:choose><xsl:value-of select="ns:executablePath"/><xsl:text> </xsl:text>
+<xsl:for-each select="ns:inputs/ns:input">
+ <xsl:value-of select="."/><xsl:text> </xsl:text>
+ </xsl:for-each>
+<xsl:for-each select="ns:postJobCommands/ns:command">
+ <xsl:value-of select="."/><xsl:text> </xsl:text>
+</xsl:for-each>
+
+</xsl:template>
+
+</xsl:stylesheet>
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-bes/src/test/resources/gfac-config.xml
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-bes/src/test/resources/gfac-config.xml b/modules/gfac/gfac-bes/src/test/resources/gfac-config.xml
new file mode 100644
index 0000000..85d148d
--- /dev/null
+++ b/modules/gfac/gfac-bes/src/test/resources/gfac-config.xml
@@ -0,0 +1,33 @@
+<!-- ~ Licensed to the Apache Software Foundation (ASF) under one ~ or more
+ contributor license agreements. See the NOTICE file ~ distributed with this
+ work for additional information ~ regarding copyright ownership. The ASF
+ licenses this file ~ to you under the Apache License, Version 2.0 (the ~
+ "License"); you may not use this file except in compliance ~ with the License.
+ You may obtain a copy of the License at ~ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~ ~ Unless required by applicable law or agreed to in writing, ~ software
+ distributed under the License is distributed on an ~ "AS IS" BASIS, WITHOUT
+ WARRANTIES OR CONDITIONS OF ANY ~ KIND, either express or implied. See the
+ License for the ~ specific language governing permissions and limitations
+ ~ under the License. -->
+
+<GFac>
+ <GlobalHandlers>
+ <InHandlers>
+ <Handler class="org.apache.airavata.gfac.handler.AppDescriptorCheckHandler">
+ <property name="name" value="value"/>
+ </Handler>
+ </InHandlers>
+ <OutHandlers></OutHandlers>
+ </GlobalHandlers>
+
+
+ <Provider class="org.apache.airavata.gfac.provider.impl.SSHProvider" host="org.apache.airavata.schemas.gfac.impl.SSHHostTypeImpl">
+ <InHandlers>
+ <Handler class="org.apache.airavata.gfac.handler.SSHDirectorySetupHandler"/>
+ <Handler class="org.apache.airavata.gfac.handler.SSHInputHandler"/>
+ </InHandlers>
+ <OutHandlers>
+ <Handler class="org.apache.airavata.gfac.handler.SSHOutputHandler"/>
+ </OutHandlers>
+ </Provider>
+</GFac>
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-bes/src/test/resources/logging.properties
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-bes/src/test/resources/logging.properties b/modules/gfac/gfac-bes/src/test/resources/logging.properties
new file mode 100644
index 0000000..0584d38
--- /dev/null
+++ b/modules/gfac/gfac-bes/src/test/resources/logging.properties
@@ -0,0 +1,42 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+#default/fallback log4j configuration
+#
+
+# Set root logger level to WARN and its only appender to A1.
+log4j.rootLogger=INFO, A1, A2
+
+# A1 is set to be a rolling file appender with default params
+log4j.appender.A1=org.apache.log4j.RollingFileAppender
+log4j.appender.A1.File=target/seclogs.txt
+
+# A1 uses PatternLayout.
+log4j.appender.A1.layout=org.apache.log4j.PatternLayout
+log4j.appender.A1.layout.ConversionPattern=%d [%t] %-5p %c %x - %m%n
+
+# A2 is a console appender
+log4j.appender.A2=org.apache.log4j.ConsoleAppender
+
+# A2 uses PatternLayout.
+log4j.appender.A2.layout=org.apache.log4j.PatternLayout
+log4j.appender.A2.layout.ConversionPattern=%d [%t] %-5p %c{1} %x - %m%n
+
+log4j.logger.unicore.security=INFO
+
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-core/pom.xml
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/pom.xml b/modules/gfac/gfac-core/pom.xml
index 1ec707e..4762c3e 100644
--- a/modules/gfac/gfac-core/pom.xml
+++ b/modules/gfac/gfac-core/pom.xml
@@ -24,38 +24,6 @@
<url>http://airavata.apache.org/</url>
<dependencies>
-
- <dependency>
- <groupId>org.jglobus</groupId>
- <artifactId>gss</artifactId>
- <version>${jglobus.version}</version>
- </dependency>
- <dependency>
- <groupId>org.jglobus</groupId>
- <artifactId>gram</artifactId>
- <version>${jglobus.version}</version>
- <exclusions>
- <exclusion>
- <groupId>org.bouncycastle</groupId>
- <artifactId>bcprov-jdk16</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
- <dependency>
- <groupId>org.bouncycastle</groupId>
- <artifactId>bcprov-jdk16</artifactId>
- </dependency>
- <dependency>
- <groupId>org.jglobus</groupId>
- <artifactId>myproxy</artifactId>
- <version>${jglobus.version}</version>
- </dependency>
- <dependency>
- <groupId>org.jglobus</groupId>
- <artifactId>gridftp</artifactId>
- <version>${jglobus.version}</version>
- </dependency>
-
<!-- Logging -->
<dependency>
<groupId>org.slf4j</groupId>
@@ -101,34 +69,6 @@
<version>${project.version}</version>
</dependency>
- <!-- Amazon EC2 Provider -->
- <dependency>
- <groupId>com.amazonaws</groupId>
- <artifactId>aws-java-sdk</artifactId>
- <version>1.3.20</version>
- </dependency>
- <dependency>
- <groupId>sshtools</groupId>
- <artifactId>j2ssh-core</artifactId>
- <version>0.2.9</version>
- </dependency>
- <dependency>
- <groupId>sshtools</groupId>
- <artifactId>j2ssh-common</artifactId>
- <version>0.2.9</version>
- </dependency>
- <dependency>
- <groupId>org.apache.httpcomponents</groupId>
- <artifactId>httpclient</artifactId>
- <version>4.3</version>
- <type>jar</type>
- </dependency>
- <dependency>
- <groupId>org.apache.httpcomponents</groupId>
- <artifactId>httpcore</artifactId>
- <version>4.3</version>
- <type>jar</type>
- </dependency>
<!-- Test -->
<dependency>
@@ -157,27 +97,12 @@
<artifactId>airavata-server-configuration</artifactId>
<scope>test</scope>
</dependency>
- <dependency>
+ <dependency>
<groupId>org.apache.airavata</groupId>
<artifactId>airavata-client-configuration</artifactId>
<scope>test</scope>
</dependency>
- <!-- Unicore dependencies -->
- <dependency>
- <groupId>eu.unicore</groupId>
- <artifactId>ogsabes-client</artifactId>
- <version>1.7.0-rc</version>
- <exclusions>
- <exclusion>
- <groupId>org.apache.santuario</groupId>
- <artifactId>xmlsec</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.bouncycastle</groupId>
- <artifactId>bcprov-jdk16</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
+
<!-- Hadoop provider related dependencies -->
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/GFacConfiguration.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/GFacConfiguration.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/GFacConfiguration.java
index 7df8b73..3ae29d6 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/GFacConfiguration.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/GFacConfiguration.java
@@ -40,7 +40,6 @@ import org.apache.airavata.common.exception.UnspecifiedApplicationSettingsExcept
import org.apache.airavata.common.utils.ServerSettings;
import org.apache.airavata.gfac.handler.GFacHandlerConfig;
import org.apache.airavata.gfac.provider.GFacProviderConfig;
-import org.apache.airavata.gfac.utils.GridConfigurationHandler;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.*;
@@ -61,35 +60,6 @@ public class GFacConfiguration {
// the provider
private List<GFacHandlerConfig> outHandlers = new ArrayList<GFacHandlerConfig>();
- private static List<GridConfigurationHandler> gridConfigurationHandlers;
-
- private static String GRID_HANDLERS = "airavata.grid.handlers";
-
- static {
- gridConfigurationHandlers = new ArrayList<GridConfigurationHandler>();
- try {
- String handlerString = ServerSettings.getSetting(GRID_HANDLERS);
- String[] handlers = handlerString.split(",");
- for (String handlerClass : handlers) {
- try {
- @SuppressWarnings("unchecked")
- Class<GridConfigurationHandler> classInstance = (Class<GridConfigurationHandler>) GFacConfiguration.class
- .getClassLoader().loadClass(handlerClass);
- gridConfigurationHandlers.add(classInstance.newInstance());
- } catch (Exception e) {
- log.error("Error while loading Grid Configuration Handler class " + handlerClass, e);
- }
- }
- } catch (UnspecifiedApplicationSettingsException e) {
- //no handlers defined
- } catch (ApplicationSettingsException e1) {
- log.error("Error in reading Configuration handler data!!!", e1);
- }
- }
-
- public static GridConfigurationHandler[] getGridConfigurationHandlers() {
- return gridConfigurationHandlers.toArray(new GridConfigurationHandler[]{});
- }
public GFacConfiguration(AiravataAPI airavataAPI) {
this.airavataAPI = airavataAPI;
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/context/security/GSISecurityContext.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/context/security/GSISecurityContext.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/context/security/GSISecurityContext.java
deleted file mode 100644
index 733ee88..0000000
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/context/security/GSISecurityContext.java
+++ /dev/null
@@ -1,289 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.context.security;
-
-import java.io.File;
-import java.security.Security;
-import java.security.cert.X509Certificate;
-import java.util.Properties;
-
-import org.apache.airavata.common.exception.ApplicationSettingsException;
-import org.apache.airavata.common.utils.ServerSettings;
-import org.apache.airavata.credential.store.credential.Credential;
-import org.apache.airavata.credential.store.credential.impl.certificate.CertificateCredential;
-import org.apache.airavata.credential.store.store.CredentialReader;
-import org.apache.airavata.gfac.AbstractSecurityContext;
-import org.apache.airavata.gfac.Constants;
-import org.apache.airavata.gfac.GFacException;
-import org.apache.airavata.gfac.RequestData;
-import org.apache.airavata.gsi.ssh.api.Cluster;
-import org.globus.gsi.X509Credential;
-import org.globus.gsi.gssapi.GlobusGSSCredentialImpl;
-import org.globus.gsi.provider.GlobusProvider;
-import org.globus.myproxy.GetParams;
-import org.globus.myproxy.MyProxy;
-import org.globus.myproxy.MyProxyException;
-import org.gridforum.jgss.ExtendedGSSCredential;
-import org.ietf.jgss.GSSCredential;
-import org.ietf.jgss.GSSException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Handles GRID related security.
- */
-public class GSISecurityContext extends AbstractSecurityContext {
-
- protected static final Logger log = LoggerFactory.getLogger(GSISecurityContext.class);
- /*
- * context name
- */
- public static final String GSI_SECURITY_CONTEXT = "gsi";
-
- public static int CREDENTIAL_RENEWING_THRESH_HOLD = 10 * 90;
-
- private GSSCredential gssCredentials = null;
-
- private Cluster pbsCluster = null;
-
- // Set trusted cert path and add provider
- static {
- Security.addProvider(new GlobusProvider());
- try {
- setUpTrustedCertificatePath();
- } catch (ApplicationSettingsException e) {
- log.error(e.getLocalizedMessage(), e);
- }
- }
-
- public static void setUpTrustedCertificatePath(String trustedCertificatePath) {
-
- File file = new File(trustedCertificatePath);
-
- if (!file.exists() || !file.canRead()) {
- File f = new File(".");
- log.info("Current directory " + f.getAbsolutePath());
- throw new RuntimeException("Cannot read trusted certificate path " + trustedCertificatePath);
- } else {
- System.setProperty(Constants.TRUSTED_CERTIFICATE_SYSTEM_PROPERTY, file.getAbsolutePath());
- }
- }
-
- private static void setUpTrustedCertificatePath() throws ApplicationSettingsException {
-
- String trustedCertificatePath = ServerSettings.getSetting(Constants.TRUSTED_CERT_LOCATION);
-
- setUpTrustedCertificatePath(trustedCertificatePath);
- }
-
- /**
- * Gets the trusted certificate path. Trusted certificate path is stored in "X509_CERT_DIR"
- * system property.
- * @return The trusted certificate path as a string.
- */
- public static String getTrustedCertificatePath() {
- return System.getProperty(Constants.TRUSTED_CERTIFICATE_SYSTEM_PROPERTY);
- }
-
-
- public GSISecurityContext(CredentialReader credentialReader, RequestData requestData) {
- super(credentialReader, requestData);
- }
-
-
- public GSISecurityContext(Cluster pbsCluster) {
- this.setPbsCluster(pbsCluster);
- }
-
- /**
- * Gets GSSCredentials. The process is as follows;
- * If credentials were queried for the first time create credentials.
- * 1. Try creating credentials using certificates stored in the credential store
- * 2. If 1 fails use user name and password to create credentials
- * If credentials are already created check the remaining life time of the credential. If
- * remaining life time is less than CREDENTIAL_RENEWING_THRESH_HOLD, then renew credentials.
- * @return GSSCredentials to be used.
- * @throws GFacException If an error occurred while creating credentials.
- * @throws ApplicationSettingsException
- */
- public GSSCredential getGssCredentials() throws GFacException, ApplicationSettingsException {
-
- if (gssCredentials == null) {
-
- try {
- gssCredentials = getCredentialsFromStore();
- } catch (Exception e) {
- log.error("An exception occurred while retrieving credentials from the credential store. " +
- "Will continue with my proxy user name and password.", e);
- }
-
- // If store does not have credentials try to get from user name and password
- if (gssCredentials == null) {
- gssCredentials = getDefaultCredentials();
- }
-
- // if still null, throw an exception
- if (gssCredentials == null) {
- throw new GFacException("Unable to retrieve my proxy credentials to continue operation.");
- }
- } else {
- try {
- if (gssCredentials.getRemainingLifetime() < CREDENTIAL_RENEWING_THRESH_HOLD) {
- return renewCredentials();
- }
- } catch (GSSException e) {
- throw new GFacException("Unable to retrieve remaining life time from credentials.", e);
- }
- }
-
- return gssCredentials;
- }
-
- /**
- * Renews credentials. First try to renew credentials as a trusted renewer. If that failed
- * use user name and password to renew credentials.
- * @return Renewed credentials.
- * @throws GFacException If an error occurred while renewing credentials.
- * @throws ApplicationSettingsException
- */
- public GSSCredential renewCredentials() throws GFacException, ApplicationSettingsException {
-
- // First try to renew credentials as a trusted renewer
- try {
- gssCredentials = renewCredentialsAsATrustedHost();
- } catch (Exception e) {
- log.warn("Renewing credentials as a trusted renewer failed", e);
- gssCredentials = getProxyCredentials();
- }
-
- return gssCredentials;
- }
-
- /**
- * Reads the credentials from credential store.
- * @return If token is found in the credential store, will return a valid credential. Else returns null.
- * @throws Exception If an error occurred while retrieving credentials.
- */
- public GSSCredential getCredentialsFromStore() throws Exception {
-
- if (getCredentialReader() == null) {
- return null;
- }
-
- Credential credential = getCredentialReader().getCredential(getRequestData().getGatewayId(),
- getRequestData().getTokenId());
-
- if (credential != null) {
- if (credential instanceof CertificateCredential) {
-
- log.info("Successfully found credentials for token id - " + getRequestData().getTokenId() +
- " gateway id - " + getRequestData().getGatewayId());
-
- CertificateCredential certificateCredential = (CertificateCredential) credential;
-
- X509Certificate[] certificates = certificateCredential.getCertificates();
- X509Credential newCredential = new X509Credential(certificateCredential.getPrivateKey(), certificates);
-
- GlobusGSSCredentialImpl cred = new GlobusGSSCredentialImpl(newCredential, GSSCredential.INITIATE_AND_ACCEPT);
- System.out.print(cred.export(ExtendedGSSCredential.IMPEXP_OPAQUE));
- return cred;
- //return new GlobusGSSCredentialImpl(newCredential,
- // GSSCredential.INITIATE_AND_ACCEPT);
- } else {
- log.info("Credential type is not CertificateCredential. Cannot create mapping globus credentials. " +
- "Credential type - " + credential.getClass().getName());
- }
- } else {
- log.info("Could not find credentials for token - " + getRequestData().getTokenId() + " and "
- + "gateway id - " + getRequestData().getGatewayId());
- }
-
- return null;
- }
-
- /**
- * Gets the default proxy certificate.
- * @return Default my proxy credentials.
- * @throws GFacException If an error occurred while retrieving credentials.
- * @throws ApplicationSettingsException
- */
- public GSSCredential getDefaultCredentials() throws GFacException, ApplicationSettingsException{
- MyProxy myproxy = new MyProxy(getRequestData().getMyProxyServerUrl(), getRequestData().getMyProxyPort());
- try {
- return myproxy.get(getRequestData().getMyProxyUserName(), getRequestData().getMyProxyPassword(),
- getRequestData().getMyProxyLifeTime());
- } catch (MyProxyException e) {
- throw new GFacException("An error occurred while retrieving default security credentials.", e);
- }
- }
-
- /**
- * Gets a new proxy certificate given current credentials.
- * @return The short lived GSSCredentials
- * @throws GFacException If an error is occurred while retrieving credentials.
- * @throws ApplicationSettingsException
- */
- public GSSCredential getProxyCredentials() throws GFacException, ApplicationSettingsException {
-
- MyProxy myproxy = new MyProxy(getRequestData().getMyProxyServerUrl(), getRequestData().getMyProxyPort());
- try {
- return myproxy.get(gssCredentials, getRequestData().getMyProxyUserName(), getRequestData().getMyProxyPassword(),
- getRequestData().getMyProxyLifeTime());
- } catch (MyProxyException e) {
- throw new GFacException("An error occurred while renewing security credentials using user/password.", e);
- }
- }
-
- /**
- * Renew GSSCredentials.
- * Before executing we need to add current host as a trusted renewer. Note to renew credentials
- * we dont need user name and password.
- * To do that execute following command
- * > myproxy-logon -t <LIFETIME></LIFETIME> -s <MY PROXY SERVER> -l <USER NAME>
- * E.g :- > myproxy-logon -t 264 -s myproxy.teragrid.org -l us3
- * Enter MyProxy pass phrase:
- * A credential has been received for user us3 in /tmp/x509up_u501.
- * > myproxy-init -A --cert /tmp/x509up_u501 --key /tmp/x509up_u501 -l ogce -s myproxy.teragrid.org
- * @return Renewed credentials.
- * @throws GFacException If an error occurred while renewing credentials.
- * @throws ApplicationSettingsException
- */
- public GSSCredential renewCredentialsAsATrustedHost() throws GFacException, ApplicationSettingsException {
- MyProxy myproxy = new MyProxy(getRequestData().getMyProxyServerUrl(), getRequestData().getMyProxyPort());
- GetParams getParams = new GetParams();
- getParams.setAuthzCreds(gssCredentials);
- getParams.setUserName(getRequestData().getMyProxyUserName());
- getParams.setLifetime(getRequestData().getMyProxyLifeTime());
- try {
- return myproxy.get(gssCredentials, getParams);
- } catch (MyProxyException e) {
- throw new GFacException("An error occurred while renewing security credentials.", e);
- }
- }
-
- public Cluster getPbsCluster() {
- return pbsCluster;
- }
-
- public void setPbsCluster(Cluster pbsCluster) {
- this.pbsCluster = pbsCluster;
- }
-}
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/cpi/GFacImpl.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/cpi/GFacImpl.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/cpi/GFacImpl.java
index ffd8af8..c09f45a 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/cpi/GFacImpl.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/cpi/GFacImpl.java
@@ -28,7 +28,6 @@ import java.util.Map;
import java.util.Properties;
import org.apache.airavata.client.api.AiravataAPI;
-import org.apache.airavata.common.exception.ApplicationSettingsException;
import org.apache.airavata.common.utils.ServerSettings;
import org.apache.airavata.commons.gfac.type.ApplicationDescription;
import org.apache.airavata.commons.gfac.type.HostDescription;
@@ -36,12 +35,10 @@ import org.apache.airavata.commons.gfac.type.ServiceDescription;
import org.apache.airavata.gfac.Constants;
import org.apache.airavata.gfac.GFacConfiguration;
import org.apache.airavata.gfac.GFacException;
-import org.apache.airavata.gfac.RequestData;
import org.apache.airavata.gfac.Scheduler;
import org.apache.airavata.gfac.context.ApplicationContext;
import org.apache.airavata.gfac.context.JobExecutionContext;
import org.apache.airavata.gfac.context.MessageContext;
-import org.apache.airavata.gfac.context.security.GSISecurityContext;
import org.apache.airavata.gfac.handler.GFacHandler;
import org.apache.airavata.gfac.handler.GFacHandlerConfig;
import org.apache.airavata.gfac.handler.GFacHandlerException;
@@ -51,24 +48,11 @@ import org.apache.airavata.gfac.notification.listeners.WorkflowTrackingListener;
import org.apache.airavata.gfac.provider.GFacProvider;
import org.apache.airavata.gfac.scheduler.HostScheduler;
import org.apache.airavata.gfac.utils.GFacUtils;
-import org.apache.airavata.gsi.ssh.api.Cluster;
-import org.apache.airavata.gsi.ssh.api.SSHApiException;
-import org.apache.airavata.gsi.ssh.api.ServerInfo;
-import org.apache.airavata.gsi.ssh.api.authentication.AuthenticationInfo;
-import org.apache.airavata.gsi.ssh.api.authentication.GSIAuthenticationInfo;
-import org.apache.airavata.gsi.ssh.api.job.JobDescriptor;
-import org.apache.airavata.gsi.ssh.api.job.JobManagerConfiguration;
-import org.apache.airavata.gsi.ssh.impl.PBSCluster;
-import org.apache.airavata.gsi.ssh.impl.authentication.DefaultPasswordAuthenticationInfo;
-import org.apache.airavata.gsi.ssh.impl.authentication.DefaultPublicKeyFileAuthentication;
-import org.apache.airavata.gsi.ssh.impl.authentication.MyProxyAuthenticationInfo;
-import org.apache.airavata.gsi.ssh.util.CommonUtils;
import org.apache.airavata.gfac.monitor.AbstractActivityListener;
import org.apache.airavata.gfac.monitor.MonitorManager;
import org.apache.airavata.gfac.monitor.command.ExperimentCancelRequest;
import org.apache.airavata.gfac.monitor.command.TaskCancelRequest;
import org.apache.airavata.model.workspace.experiment.DataObjectType;
-import org.apache.airavata.model.workspace.experiment.JobDetails;
import org.apache.airavata.model.workspace.experiment.TaskDetails;
import org.apache.airavata.persistance.registry.jpa.resources.AbstractResource.TaskDetailConstants;
import org.apache.airavata.registry.api.AiravataRegistry2;
@@ -76,13 +60,6 @@ import org.apache.airavata.registry.cpi.DataType;
import org.apache.airavata.registry.cpi.Registry;
import org.apache.airavata.registry.cpi.RegistryException;
import org.apache.airavata.registry.cpi.utils.Constants.FieldConstants.WorkflowNodeConstants;
-import org.apache.airavata.schemas.gfac.Ec2HostType;
-import org.apache.airavata.schemas.gfac.GlobusHostType;
-import org.apache.airavata.schemas.gfac.GsisshHostType;
-import org.apache.airavata.schemas.gfac.HpcApplicationDeploymentType;
-import org.apache.airavata.schemas.gfac.SSHHostType;
-import org.apache.airavata.schemas.gfac.UnicoreHostType;
-import org.apache.airavata.schemas.wec.SecurityContextDocument;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -202,11 +179,6 @@ public class GFacImpl implements GFac, AbstractActivityListener {
jobExecutionContext.setProperty(Constants.PROP_TOPIC, experimentID);
jobExecutionContext.setExperimentID(experimentID);
- // only in test cases we set the security context outside the gfacimpl, otherwise we setit here
- // but in future we might set multiple security contexts
- if(jobExecutionContext.getAllSecurityContexts().size()==0){
- addSecurityContext(hostDescription, configurationProperties, jobExecutionContext);
- }
return jobExecutionContext;
}
@@ -350,99 +322,7 @@ public class GFacImpl implements GFac, AbstractActivityListener {
}
}
- private void addSecurityContext(HostDescription registeredHost, Properties configurationProperties,
- JobExecutionContext jobExecutionContext) throws GFacException, ApplicationSettingsException {
- RequestData requestData;
- if (registeredHost.getType() instanceof GlobusHostType || registeredHost.getType() instanceof UnicoreHostType
- || registeredHost.getType() instanceof GsisshHostType) {
-
- //todo implement a way to get credential management service from configurationData
- SecurityContextDocument.SecurityContext.CredentialManagementService credentialManagementService = null;
- GSISecurityContext context = null;
-
- /*
- if (credentialManagementService != null) {
- String gatewayId = credentialManagementService.getGatewayId();
- String tokenId
- = credentialManagementService.getTokenId();
- String portalUser = credentialManagementService.getPortalUser();
-
- requestData = new RequestData(tokenId, portalUser, gatewayId);
- } else {
- requestData = new RequestData("default");
- }
-
- try {
- context = new GSISecurityContext(CredentialReaderFactory.createCredentialStoreReader(), requestData);
- } catch (Exception e) {
- throw new WorkflowException("An error occurred while creating GSI security context", e);
- }
-
- if (registeredHost.getType() instanceof GsisshHostType) {
- GSIAuthenticationInfo authenticationInfo
- = new MyProxyAuthenticationInfo(requestData.getMyProxyUserName(), requestData.getMyProxyPassword(), requestData.getMyProxyServerUrl(),
- requestData.getMyProxyPort(), requestData.getMyProxyLifeTime(), System.getProperty(Constants.TRUSTED_CERTIFICATE_SYSTEM_PROPERTY));
- ServerInfo serverInfo = new ServerInfo(requestData.getMyProxyUserName(), registeredHost.getType().getHostAddress());
-
- Cluster pbsCluster = null;
- try {
- pbsCluster = new PBSCluster(serverInfo, authenticationInfo,
- (((HpcApplicationDeploymentType) jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription().getType()).getInstalledParentPath()));
- } catch (SSHApiException e) {
- e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
- }
- context.setPbsCluster(pbsCluster);
- } */
-
- requestData = new RequestData("default");
- try {
- //todo fix this
- context = new GSISecurityContext(null, requestData);
- } catch (Exception e) {
- throw new GFacException("An error occurred while creating GSI security context", e);
- }
- if (registeredHost.getType() instanceof GsisshHostType) {
- GSIAuthenticationInfo authenticationInfo
- = new MyProxyAuthenticationInfo(requestData.getMyProxyUserName(), requestData.getMyProxyPassword(), requestData.getMyProxyServerUrl(),
- requestData.getMyProxyPort(), requestData.getMyProxyLifeTime(), System.getProperty(Constants.TRUSTED_CERTIFICATE_SYSTEM_PROPERTY));
- GsisshHostType gsisshHostType = (GsisshHostType)registeredHost.getType();
- ServerInfo serverInfo = new ServerInfo(requestData.getMyProxyUserName(), registeredHost.getType().getHostAddress(),
- gsisshHostType.getPort());
-
- Cluster pbsCluster = null;
- try {
- JobManagerConfiguration jConfig = null;
- String installedParentPath = ((HpcApplicationDeploymentType)
- jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription().getType()).getInstalledParentPath();
- String jobManager = ((GsisshHostType) registeredHost.getType()).getJobManager();
- if (jobManager == null) {
- log.error("No Job Manager is configured, so we are picking pbs as the default job manager");
- jConfig = CommonUtils.getPBSJobManager(installedParentPath);
- } else {
- if (PBS_JOB_MANAGER.equalsIgnoreCase(jobManager)) {
- jConfig = CommonUtils.getPBSJobManager(installedParentPath);
- } else if (SLURM_JOB_MANAGER.equalsIgnoreCase(jobManager)) {
- jConfig = CommonUtils.getSLURMJobManager(installedParentPath);
- } else if(SUN_GRID_ENGINE_JOB_MANAGER.equalsIgnoreCase(jobManager)){
- jConfig = CommonUtils.getSGEJobManager(installedParentPath);
- }
- }
- pbsCluster = new PBSCluster(serverInfo, authenticationInfo, jConfig);
- } catch (SSHApiException e) {
- e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
- }
-
- context.setPbsCluster(pbsCluster);
- }
- jobExecutionContext.addSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT, context);
- } else if (registeredHost.getType() instanceof Ec2HostType) {
- //todo fixthis amazon securitycontext
-// if (this.configuration.getAmazonSecurityContext() != null) {
-// jobExecutionContext.addSecurityContext(AmazonSecurityContext.AMAZON_SECURITY_CONTEXT,
-// this.configuration.getAmazonSecurityContext());
- }
- }
public void setup(Object... configurations) {
for (Object configuration : configurations) {
[04/11] creating gfac-bes and gfac-gram out from gfac-core
Posted by la...@apache.org.
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/URIUtils.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/URIUtils.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/URIUtils.java
deleted file mode 100644
index 4404a37..0000000
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/provider/utils/URIUtils.java
+++ /dev/null
@@ -1,107 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.provider.utils;
-
-import org.apache.commons.httpclient.URI;
-import org.apache.commons.httpclient.URIException;
-import org.apache.commons.httpclient.util.URIUtil;
-
-public class URIUtils {
-
- public static String encodeAll(String uri) throws URIException
- {
- String result = encodeAuthority(uri);
- result = encodePath(uri);
- result = encodeQuery(result );
- result = encodeFragment(result );
- return result;
- }
-
- public static String encodeAuthority(String uri) throws URIException
- {
- int start = uri.indexOf("//");
- if(start == -1) return uri;
- start++;
- int end = uri.indexOf("/",start+1);
- if(end == -1) end = uri.indexOf("?",start+1);
- if(end == -1) end = uri.indexOf("#",start+1);
- if(end == -1) end = uri.length();
- String before = uri.substring(0, start+1);
- String authority= uri.substring(start+1,end);
- String after = uri.substring(end);
- authority = URIUtil.encode(authority, URI.allowed_authority);
-
- return before+authority+after;
- }
-
- public static String encodePath(String uri) throws URIException
- {
- int doubleSlashIndex = uri.indexOf("//");
- boolean hasAuthority = doubleSlashIndex >= 0;
- int start = -1;
- if(hasAuthority)
- {
- start = uri.indexOf("/",doubleSlashIndex+2);
- }
- else
- {
- start = uri.indexOf(":");
- }
- if(start == -1) return uri;
-
- int end = uri.indexOf("?",start+1);
- if(end == -1) end = uri.indexOf("#",start+1);
- if(end == -1) end = uri.length();
- String before = uri.substring(0, start+1);
- String path= uri.substring(start+1,end);
- String after = uri.substring(end);
- path = URIUtil.encode(path, URI.allowed_abs_path);
- return before+path+after;
- }
-
-
- public static String encodeQuery(String uri) throws URIException
- {
- int queryStart = uri.indexOf("?");
- if(queryStart == -1) return uri;
- int queryEnd = uri.indexOf("#");
- if(queryEnd == -1) queryEnd = uri.length();
-
- String beforeQuery = uri.substring(0, queryStart+1);
- String query = uri.substring(queryStart+1,queryEnd);
- String afterQuery = uri.substring(queryEnd);
- query = URIUtil.encode(query, URI.allowed_query);
- return beforeQuery+query+afterQuery;
- }
-
-
- public static String encodeFragment(String uri) throws URIException
- {
- int fragmentStart = uri.indexOf("#");
- if(fragmentStart == -1) return uri;
-
- String beforeFragment = uri.substring(0, fragmentStart+1);
- String fragment = uri.substring(fragmentStart+1);
- fragment = URIUtil.encode(fragment, URI.allowed_fragment);
- return beforeFragment+fragment;
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/utils/GFacUtils.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/utils/GFacUtils.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/utils/GFacUtils.java
index 49ca569..3058909 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/utils/GFacUtils.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/utils/GFacUtils.java
@@ -52,7 +52,6 @@ import org.apache.airavata.registry.cpi.CompositeIdentifier;
import org.apache.airavata.registry.cpi.Registry;
import org.apache.airavata.schemas.gfac.*;
import org.apache.axiom.om.OMElement;
-import org.globus.gram.GramJob;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -127,28 +126,9 @@ public class GFacUtils {
return serviceName + "_" + date + "_" + UUID.randomUUID();
}
- public static URI createGsiftpURI(GridFTPContactInfo host, String localPath) throws URISyntaxException {
- StringBuffer buf = new StringBuffer();
- if (!host.hostName.startsWith("gsiftp://"))
- buf.append("gsiftp://");
- buf.append(host).append(":").append(host.port);
- if (!host.hostName.endsWith("/"))
- buf.append("/");
- buf.append(localPath);
- return new URI(buf.toString());
- }
- public static URI createGsiftpURI(String host, String localPath) throws URISyntaxException {
- StringBuffer buf = new StringBuffer();
- if (!host.startsWith("gsiftp://"))
- buf.append("gsiftp://");
- buf.append(host);
- if (!host.endsWith("/"))
- buf.append("/");
- buf.append(localPath);
- return new URI(buf.toString());
- }
+
public static String createGsiftpURIAsString(String host, String localPath) throws URISyntaxException {
StringBuffer buf = new StringBuffer();
@@ -649,28 +629,7 @@ public class GFacUtils {
throw new GFacException("Error persisting job status" + e.getLocalizedMessage(),e);
}
}
- public static JobState getApplicationJobStatus(int gramStatus) {
- switch (gramStatus) {
- case GramJob.STATUS_UNSUBMITTED:
- return JobState.HELD;
- case GramJob.STATUS_ACTIVE:
- return JobState.ACTIVE;
- case GramJob.STATUS_DONE:
- return JobState.COMPLETE;
- case GramJob.STATUS_FAILED:
- return JobState.FAILED;
- case GramJob.STATUS_PENDING:
- return JobState.QUEUED;
- case GramJob.STATUS_STAGE_IN:
- return JobState.QUEUED;
- case GramJob.STATUS_STAGE_OUT:
- return JobState.COMPLETE;
- case GramJob.STATUS_SUSPENDED:
- return JobState.SUSPENDED;
- default:
- return JobState.UNKNOWN;
- }
- }
+
public static Map<String, Object> getMessageContext(List<DataObjectType> experimentData,
Parameter[] parameters) throws GFacException {
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/utils/GramJobSubmissionListener.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/utils/GramJobSubmissionListener.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/utils/GramJobSubmissionListener.java
deleted file mode 100644
index c8d8de6..0000000
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/utils/GramJobSubmissionListener.java
+++ /dev/null
@@ -1,141 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.utils;
-
-import org.apache.airavata.gfac.GFacException;
-import org.apache.airavata.gfac.context.JobExecutionContext;
-import org.apache.airavata.gfac.context.security.GSISecurityContext;
-import org.apache.airavata.gfac.notification.events.StatusChangeEvent;
-import org.apache.airavata.model.workspace.experiment.JobState;
-import org.globus.gram.GramJob;
-import org.globus.gram.GramJobListener;
-import org.ietf.jgss.GSSCredential;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class GramJobSubmissionListener implements GramJobListener{
- private final Logger log = LoggerFactory.getLogger(GramJobSubmissionListener.class);
-
- public static final int NO_ERROR = -42;
- public static final int INITIAL_STATUS = -43;
-
- private volatile boolean jobDone = false;
- private volatile int error = NO_ERROR;
- private int currentStatus = INITIAL_STATUS;
-
- private JobExecutionContext context;
- private GramJob job;
-
- public GramJobSubmissionListener(GramJob job, JobExecutionContext context) {
- this.job = job;
- this.context = context;
- }
-
- /**
- * This method is used to block the process until the currentStatus of the job is DONE or FAILED
- */
- public void waitFor() {
- while (!isJobDone()) {
- synchronized (this) {
- try {
- wait();
- } catch (InterruptedException e) {}
- }
- }
- }
-
-
-
- private synchronized boolean isJobDone() {
- return this.jobDone;
- }
-
- private void setStatus(int status, int error) {
- try {
- GFacUtils.updateJobStatus(context.getJobDetails(), GFacUtils.getApplicationJobStatus(status));
- } catch (GFacException e) {
- log.error("Error persisting status" + e.getLocalizedMessage(), e);
- }
- this.currentStatus = status;
- this.error = error;
-
- switch (this.currentStatus) {
- case GramJob.STATUS_FAILED:
- log.info("Job Error Code: " + error);
- this.jobDone = true;
- notifyAll();
- case GramJob.STATUS_DONE:
- this.jobDone = true;
- notifyAll();
- }
-
- }
-
- public synchronized void statusChanged(GramJob job) {
-
- int jobStatus = job.getStatus();
- String jobStatusMessage = "Status of job " + job.getIDAsString() + "is " + job.getStatusAsString();
- /*
- * Notify currentStatus change
- */
- this.context.getNotifier().publish(new StatusChangeEvent(jobStatusMessage));
-
- /*
- * Set new currentStatus if it is jobDone, notify all wait object
- */
- if (currentStatus != jobStatus) {
- currentStatus = jobStatus;
-
- setStatus(job.getStatus(), job.getError());
-
- // Test to see whether we need to renew credentials
- renewCredentials(job);
- }
- }
-
- private void renewCredentials(GramJob job) {
-
- try {
-
- int proxyExpTime = job.getCredentials().getRemainingLifetime();
- if (proxyExpTime < GSISecurityContext.CREDENTIAL_RENEWING_THRESH_HOLD) {
- log.info("Job proxy expired. Trying to renew proxy");
- GSSCredential gssCred = ((GSISecurityContext)context.
- getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT)).renewCredentials();
- job.renew(gssCred);
- log.info("MyProxy credentials are renewed .");
- }
-
- } catch (Exception e) {
- log.error("An error occurred while trying to renew credentials. Job id " + job.getIDAsString());
- }
-
-
- }
-
- public synchronized int getError() {
- return error;
- }
-
- public synchronized int getCurrentStatus() {
- return currentStatus;
- }
-}
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/utils/GramProviderUtils.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/utils/GramProviderUtils.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/utils/GramProviderUtils.java
deleted file mode 100644
index eaf9a4a..0000000
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/utils/GramProviderUtils.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.utils;
-
-import org.apache.airavata.gfac.ToolsException;
-import org.apache.airavata.gfac.context.JobExecutionContext;
-import org.apache.airavata.gfac.provider.GFacProviderException;
-import org.globus.gram.GramAttributes;
-import org.globus.gram.GramJob;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class GramProviderUtils {
- private static final Logger log = LoggerFactory.getLogger(GramJobSubmissionListener.class);
-
- public static GramJob setupEnvironment(JobExecutionContext jobExecutionContext, boolean enableTwoPhase) throws GFacProviderException {
- log.debug("Searching for Gate Keeper");
- try {
- GramAttributes jobAttr = GramRSLGenerator.configureRemoteJob(jobExecutionContext);
- String rsl = jobAttr.toRSL();
-
- if (enableTwoPhase) {
- rsl = rsl + "(twoPhase=yes)";
- }
-
- log.debug("RSL = " + rsl);
- GramJob job = new GramJob(rsl);
- return job;
- } catch (ToolsException te) {
- throw new GFacProviderException(te.getMessage(), te);
- }
- }
-
-
-
-}
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/utils/GramRSLGenerator.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/utils/GramRSLGenerator.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/utils/GramRSLGenerator.java
deleted file mode 100644
index 187d8f1..0000000
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/utils/GramRSLGenerator.java
+++ /dev/null
@@ -1,216 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.utils;
-
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Set;
-
-import org.apache.airavata.common.utils.StringUtil;
-import org.apache.airavata.commons.gfac.type.ActualParameter;
-import org.apache.airavata.commons.gfac.type.MappingFactory;
-import org.apache.airavata.gfac.Constants;
-import org.apache.airavata.gfac.ToolsException;
-import org.apache.airavata.gfac.context.JobExecutionContext;
-import org.apache.airavata.gfac.context.MessageContext;
-import org.apache.airavata.gfac.provider.GFacProviderException;
-import org.apache.airavata.model.workspace.experiment.ComputationalResourceScheduling;
-import org.apache.airavata.model.workspace.experiment.TaskDetails;
-import org.apache.airavata.schemas.gfac.FileArrayType;
-import org.apache.airavata.schemas.gfac.HpcApplicationDeploymentType;
-import org.apache.airavata.schemas.gfac.NameValuePairType;
-import org.apache.airavata.schemas.gfac.QueueType;
-import org.apache.airavata.schemas.gfac.StringArrayType;
-import org.apache.airavata.schemas.gfac.URIArrayType;
-import org.globus.gram.GramAttributes;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class GramRSLGenerator {
- protected static final Logger log = LoggerFactory.getLogger(GramRSLGenerator.class);
-
- private enum JobType {
- SERIAL, SINGLE, MPI, MULTIPLE, CONDOR
- }
-
- ;
-
- public static GramAttributes configureRemoteJob(JobExecutionContext context) throws ToolsException {
- HpcApplicationDeploymentType app = (HpcApplicationDeploymentType) context.getApplicationContext().getApplicationDeploymentDescription().getType();
- GramAttributes jobAttr = new GramAttributes();
- jobAttr.setExecutable(app.getExecutableLocation());
- jobAttr.setDirectory(app.getStaticWorkingDirectory());
- jobAttr.setStdout(app.getStandardOutput());
- jobAttr.setStderr(app.getStandardError());
- /*
- * The env here contains the env of the host and the application. i.e the env specified in the host description
- * and application description documents
- */
- NameValuePairType[] env = app.getApplicationEnvironmentArray();
- if (env.length != 0) {
- Map<String, String> nv = new HashMap<String, String>();
- for (int i = 0; i < env.length; i++) {
- String key = env[i].getName();
- String value = env[i].getValue();
- nv.put(key, value);
- }
-
- for (Map.Entry<String, String> entry : nv.entrySet()) {
- jobAttr.addEnvVariable(entry.getKey(), entry.getValue());
- }
- }
- jobAttr.addEnvVariable(Constants.INPUT_DATA_DIR_VAR_NAME, app.getInputDataDirectory());
- jobAttr.addEnvVariable(Constants.OUTPUT_DATA_DIR_VAR_NAME, app.getOutputDataDirectory());
-
-
-
- if (app.getStandardInput() != null && !"".equals(app.getStandardInput())) {
- jobAttr.setStdin(app.getStandardInput());
- } else {
- MessageContext input = context.getInMessageContext();;
- Map<String,Object> inputs = input.getParameters();
- Set<String> keys = inputs.keySet();
- for (String paramName : keys ) {
- ActualParameter actualParameter = (ActualParameter) inputs.get(paramName);
- if ("URIArray".equals(actualParameter.getType().getType().toString()) || "StringArray".equals(actualParameter.getType().getType().toString())
- || "FileArray".equals(actualParameter.getType().getType().toString())) {
- String[] values = null;
- if (actualParameter.getType() instanceof URIArrayType) {
- values = ((URIArrayType) actualParameter.getType()).getValueArray();
- } else if (actualParameter.getType() instanceof StringArrayType) {
- values = ((StringArrayType) actualParameter.getType()).getValueArray();
- } else if (actualParameter.getType() instanceof FileArrayType) {
- values = ((FileArrayType) actualParameter.getType()).getValueArray();
- }
- String value = StringUtil.createDelimiteredString(values, " ");
- jobAttr.addArgument(value);
- } else {
- String paramValue = MappingFactory.toString(actualParameter);
- jobAttr.addArgument(paramValue);
- }
- }
- }
- // Using the workflowContext Header values if user provided them in the request and overwrite the default values in DD
- //todo finish the scheduling based on workflow execution context
- TaskDetails taskData = context.getTaskData();
- if(taskData != null && taskData.isSetTaskScheduling()){
- ComputationalResourceScheduling computionnalResource = taskData.getTaskScheduling();
- try {
- int cpuCount = computionnalResource.getTotalCPUCount();
- if(cpuCount>0){
- app.setCpuCount(cpuCount);
- }
- } catch (NullPointerException e) {
- log.debug("No Value sent in WorkflowContextHeader for CPU Count, value in the Deployment Descriptor will be used");
- new GFacProviderException("No Value sent in WorkflowContextHeader for Node Count, value in the Deployment Descriptor will be used",e);
- }
- try {
- int nodeCount = computionnalResource.getNodeCount();
- if(nodeCount>0){
- app.setNodeCount(nodeCount);
- }
- } catch (NullPointerException e) {
- log.debug("No Value sent in WorkflowContextHeader for Node Count, value in the Deployment Descriptor will be used");
- new GFacProviderException("No Value sent in WorkflowContextHeader for Node Count, value in the Deployment Descriptor will be used",e);
- }
- try {
- String queueName = computionnalResource.getQueueName();
- if (queueName != null) {
- if(app.getQueue() == null){
- QueueType queueType = app.addNewQueue();
- queueType.setQueueName(queueName);
- }else{
- app.getQueue().setQueueName(queueName);
- }
- }
- } catch (NullPointerException e) {
- log.debug("No Value sent in WorkflowContextHeader for Node Count, value in the Deployment Descriptor will be used");
- new GFacProviderException("No Value sent in WorkflowContextHeader for Node Count, value in the Deployment Descriptor will be used",e);
- }
- try {
- int maxwallTime = computionnalResource.getWallTimeLimit();
- if(maxwallTime>0){
- app.setMaxWallTime(maxwallTime);
- }
- } catch (NullPointerException e) {
- log.debug("No Value sent in WorkflowContextHeader for Node Count, value in the Deployment Descriptor will be used");
- new GFacProviderException("No Value sent in WorkflowContextHeader for Node Count, value in the Deployment Descriptor will be used",e);
- }
- }
- if (app.getNodeCount() > 0) {
- jobAttr.set("hostCount", String.valueOf(app.getNodeCount()));
- log.debug("Setting number of Nodes to " + app.getCpuCount());
- }
- if (app.getCpuCount() > 0) {
- log.debug("Setting number of procs to " + app.getCpuCount());
- jobAttr.setNumProcs(app.getCpuCount());
- }
- if (app.getMinMemory() > 0) {
- log.debug("Setting minimum memory to " + app.getMinMemory());
- jobAttr.setMinMemory(app.getMinMemory());
- }
- if (app.getMaxMemory() > 0) {
- log.debug("Setting maximum memory to " + app.getMaxMemory());
- jobAttr.setMaxMemory(app.getMaxMemory());
- }
- if (app.getProjectAccount() != null) {
- if (app.getProjectAccount().getProjectAccountNumber() != null) {
- log.debug("Setting project to " + app.getProjectAccount().getProjectAccountNumber());
- jobAttr.setProject(app.getProjectAccount().getProjectAccountNumber());
- }
- }
- if (app.getQueue() != null) {
- if (app.getQueue().getQueueName() != null) {
- log.debug("Setting job queue to " + app.getQueue().getQueueName());
- jobAttr.setQueue(app.getQueue().getQueueName());
- }
- }
- if (app.getMaxWallTime() > 0) {
- log.debug("Setting max wall clock time to " + app.getMaxWallTime());
-
- jobAttr.setMaxWallTime(app.getMaxWallTime());
- jobAttr.set("proxy_timeout", "1");
- } else {
- jobAttr.setMaxWallTime(30);
- }
- String jobType = JobType.SINGLE.toString();
- if (app.getJobType() != null) {
- jobType = app.getJobType().toString();
- }
- if (jobType.equalsIgnoreCase(JobType.SINGLE.toString())) {
- log.debug("Setting job type to single");
- jobAttr.setJobType(GramAttributes.JOBTYPE_SINGLE);
- } if (jobType.equalsIgnoreCase(JobType.SERIAL.toString())) {
- log.debug("Setting job type to single");
- jobAttr.setJobType(GramAttributes.JOBTYPE_SINGLE);
- } else if (jobType.equalsIgnoreCase(JobType.MPI.toString())) {
- log.debug("Setting job type to mpi");
- jobAttr.setJobType(GramAttributes.JOBTYPE_MPI);
- } else if (jobType.equalsIgnoreCase(JobType.MULTIPLE.toString())) {
- log.debug("Setting job type to multiple");
- jobAttr.setJobType(GramAttributes.JOBTYPE_MULTIPLE);
- } else if (jobType.equalsIgnoreCase(JobType.CONDOR.toString())) {
- jobAttr.setJobType(GramAttributes.JOBTYPE_CONDOR);
- }
-
- return jobAttr;
- }
-}
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/utils/GridConfigurationHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/utils/GridConfigurationHandler.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/utils/GridConfigurationHandler.java
deleted file mode 100644
index ed7e57e..0000000
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/utils/GridConfigurationHandler.java
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.utils;
-import org.apache.airavata.common.utils.Version;
-import org.globus.ftp.GridFTPClient;
-
-public interface GridConfigurationHandler {
- public static class GridConfigurationHandlerException extends Exception{
- private static final long serialVersionUID = 4009827774771871814L;
-
- public GridConfigurationHandlerException(String message, Throwable e){
- super(message,e);
- }
- }
-
- /**
- * A unique name for this GridConfigruationHandler
- * @return
- */
- public String getHandlerName();
-
- /**
- * The version of this handler.
- * Note: Given multiple versions of the same handler, the latest version will be used.
- * @return
- */
- public Version getHandlerVersion();
-
- /**
- * Gets triggered if any GridFTP tasks other than mentioned below if called. This is there to
- * support future extensions in GridFTP tasks so that the handlers need not be updated necessarily
- * @param client
- * @param taskDescription - a description of the task that is being carried out.
- * @throws Exception
- */
- public void handleFTPClientConfigurations(GridFTPClient client, String taskDescription) throws GridConfigurationHandlerException;
-
- /**
- * Do the configurations required for the source GridFTPClient object
- * @param source - <code>null</code> if the transfer is from the local file-system
- * @param destination - <code>null</code> if the transfer is to the local file-system
- * @throws Exception
- */
- public void handleFileTransferFTPClientConfigurations(GridFTPClient source, GridFTPClient destination) throws GridConfigurationHandlerException;
-
- /**
- * Do the configurations required for the GridFTPClient object which is going to create a directory
- * @param client
- * @throws Exception
- */
- public void handleMakeDirFTPClientConfigurations(GridFTPClient client, String dirPath) throws GridConfigurationHandlerException;
-
- /**
- * Do the configurations required for the GridFTPClient object which is going to list a directory
- * @param client
- * @param dirPath
- * @throws Exception
- */
- public void handleListDirFTPClientConfigurations(GridFTPClient client, String dirPath) throws GridConfigurationHandlerException;
-}
-
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/utils/GridFTPContactInfo.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/utils/GridFTPContactInfo.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/utils/GridFTPContactInfo.java
deleted file mode 100644
index 397f4b8..0000000
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/utils/GridFTPContactInfo.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.utils;
-
-import org.apache.airavata.gfac.Constants;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class GridFTPContactInfo {
- protected final static Logger log = LoggerFactory.getLogger(GridFTPContactInfo.class);
- public String hostName;
- public int port;
-
- public GridFTPContactInfo(String hostName, int port) {
- if (port <= 0 || port == 80) {
- log.debug(hostName + "port recived " + port + " setting it to " + Constants.DEFAULT_GSI_FTP_PORT);
- port = Constants.DEFAULT_GSI_FTP_PORT;
- }
- this.hostName = hostName;
- this.port = port;
- }
-
- @Override
- public boolean equals(Object obj) {
- if (obj instanceof GridFTPContactInfo) {
- return hostName.equals(((GridFTPContactInfo) obj).hostName) && port == ((GridFTPContactInfo) obj).port;
- } else {
- return false;
- }
- }
-
- @Override
- public int hashCode() {
- return hostName.hashCode();
- }
-
- @Override
- public String toString() {
- StringBuffer buf = new StringBuffer();
- buf.append(hostName).append(":").append(port);
- return buf.toString();
- }
-}
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-core/src/test/java/org/apache/airavata/core/gfac/services/impl/GFacBaseTestWithMyProxyAuth.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/test/java/org/apache/airavata/core/gfac/services/impl/GFacBaseTestWithMyProxyAuth.java b/modules/gfac/gfac-core/src/test/java/org/apache/airavata/core/gfac/services/impl/GFacBaseTestWithMyProxyAuth.java
deleted file mode 100644
index 1cd8ee6..0000000
--- a/modules/gfac/gfac-core/src/test/java/org/apache/airavata/core/gfac/services/impl/GFacBaseTestWithMyProxyAuth.java
+++ /dev/null
@@ -1,115 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.core.gfac.services.impl;
-
-import junit.framework.Assert;
-import org.apache.airavata.common.utils.AiravataUtils;
-import org.apache.airavata.common.utils.DatabaseTestCases;
-import org.apache.airavata.common.utils.DerbyUtil;
-import org.apache.airavata.credential.store.store.CredentialReader;
-import org.apache.airavata.credential.store.store.impl.CredentialReaderImpl;
-import org.apache.airavata.gfac.RequestData;
-import org.apache.airavata.gfac.context.security.GSISecurityContext;
-import org.apache.log4j.Logger;
-import org.junit.BeforeClass;
-
-/**
- * User: AmilaJ (amilaj@apache.org)
- * Date: 7/11/13
- * Time: 1:31 AM
- */
-
-public class GFacBaseTestWithMyProxyAuth extends DatabaseTestCases {
-
- private static String myProxyUserName;
- private static String myProxyPassword;
-
- private static final Logger log = Logger.getLogger(GFacBaseTestWithMyProxyAuth.class);
-
-
- @BeforeClass
- public static void setUpClass() throws Exception {
- AiravataUtils.setExecutionAsServer();
-
- myProxyUserName = System.getProperty("myproxy.user");
- myProxyPassword = System.getProperty("myproxy.password");
-
- if (userName == null || password == null || userName.trim().equals("") || password.trim().equals("")) {
- log.error("===== Please set myproxy.user and myproxy.password system properties. =======");
- Assert.fail("Please set myproxy.user and myproxy.password system properties.");
- }
-
- log.info("Using my proxy user name - " + userName);
-
- setUpDatabase();
-
- }
-
-
-
-
- public static void setUpDatabase() throws Exception {
- DerbyUtil.startDerbyInServerMode(getHostAddress(), getPort(), getUserName(), getPassword());
-
- waitTillServerStarts();
-
- /*
- * String createTable = "CREATE TABLE CREDENTIALS\n" + "(\n" + " GATEWAY_NAME VARCHAR(256) NOT NULL,\n" +
- * " COMMUNITY_USER_NAME VARCHAR(256) NOT NULL,\n" + " CREDENTIAL BLOB NOT NULL,\n" +
- * " PRIVATE_KEY BLOB NOT NULL,\n" + " NOT_BEFORE VARCHAR(256) NOT NULL,\n" +
- * " NOT_AFTER VARCHAR(256) NOT NULL,\n" + " LIFETIME INTEGER NOT NULL,\n" +
- * " REQUESTING_PORTAL_USER_NAME VARCHAR(256) NOT NULL,\n" +
- * " REQUESTED_TIME TIMESTAMP DEFAULT '0000-00-00 00:00:00',\n" +
- * " PRIMARY KEY (GATEWAY_NAME, COMMUNITY_USER_NAME)\n" + ")";
- */
-
- String createTable = "CREATE TABLE CREDENTIALS\n" + "(\n"
- + " GATEWAY_ID VARCHAR(256) NOT NULL,\n"
- + " TOKEN_ID VARCHAR(256) NOT NULL,\n"
- + // Actual token used to identify the credential
- " CREDENTIAL BLOB NOT NULL,\n" + " PORTAL_USER_ID VARCHAR(256) NOT NULL,\n"
- + " TIME_PERSISTED TIMESTAMP DEFAULT CURRENT_TIMESTAMP,\n"
- + " PRIMARY KEY (GATEWAY_ID, TOKEN_ID)\n" + ")";
-
- String dropTable = "drop table CREDENTIALS";
-
- try {
- executeSQL(dropTable);
- } catch (Exception e) {
- }
-
- executeSQL(createTable);
-
- }
-
- public GSISecurityContext getSecurityContext() throws Exception {
- GSISecurityContext.setUpTrustedCertificatePath(System.getProperty("gsi.certificate.path"));
- RequestData requestData = new RequestData();
- requestData.setMyProxyServerUrl("myproxy.teragrid.org");
- requestData.setMyProxyUserName(System.getProperty("myproxy.user"));
- requestData.setMyProxyPassword(System.getProperty("myproxy.password"));
- requestData.setMyProxyLifeTime(3600);
- CredentialReader credentialReader = new CredentialReaderImpl(getDbUtil());
- return new GSISecurityContext(credentialReader, requestData);
- }
-
-}
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-core/src/test/java/org/apache/airavata/core/gfac/services/impl/GramProviderTestWithMyProxyAuth.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/test/java/org/apache/airavata/core/gfac/services/impl/GramProviderTestWithMyProxyAuth.java b/modules/gfac/gfac-core/src/test/java/org/apache/airavata/core/gfac/services/impl/GramProviderTestWithMyProxyAuth.java
deleted file mode 100644
index ffaaaae..0000000
--- a/modules/gfac/gfac-core/src/test/java/org/apache/airavata/core/gfac/services/impl/GramProviderTestWithMyProxyAuth.java
+++ /dev/null
@@ -1,225 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.core.gfac.services.impl;
-
-import java.io.File;
-import java.net.URL;
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.List;
-import java.util.UUID;
-
-import org.apache.airavata.commons.gfac.type.ActualParameter;
-import org.apache.airavata.commons.gfac.type.ApplicationDescription;
-import org.apache.airavata.commons.gfac.type.HostDescription;
-import org.apache.airavata.commons.gfac.type.MappingFactory;
-import org.apache.airavata.commons.gfac.type.ServiceDescription;
-import org.apache.airavata.gfac.GFacConfiguration;
-import org.apache.airavata.gfac.GFacException;
-import org.apache.airavata.gfac.context.ApplicationContext;
-import org.apache.airavata.gfac.context.JobExecutionContext;
-import org.apache.airavata.gfac.context.MessageContext;
-import org.apache.airavata.gfac.context.security.GSISecurityContext;
-import org.apache.airavata.gfac.cpi.GFacImpl;
-import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
-import org.apache.airavata.schemas.gfac.GlobusHostType;
-import org.apache.airavata.schemas.gfac.HpcApplicationDeploymentType;
-import org.apache.airavata.schemas.gfac.InputParameterType;
-import org.apache.airavata.schemas.gfac.JobTypeType;
-import org.apache.airavata.schemas.gfac.OutputParameterType;
-import org.apache.airavata.schemas.gfac.ProjectAccountType;
-import org.apache.airavata.schemas.gfac.QueueType;
-import org.apache.airavata.schemas.gfac.StringParameterType;
-import org.apache.airavata.schemas.gfac.URIParameterType;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-
-public class GramProviderTestWithMyProxyAuth extends GFacBaseTestWithMyProxyAuth {
- private JobExecutionContext jobExecutionContext;
-
-
-// private static final String hostAddress = "blacklight.psc.teragrid.org";
-// private static final String hostName = "Blacklight";
-// private static final String gridftpAddress = "gsiftp://gridftp.blacklight.psc.teragrid.org:2812";
-// private static final String gramAddress = "";
-
- //FIXME: move job properties to configuration file
- private static final String hostAddress = "trestles.sdsc.edu";
- private static final String hostName = "trestles";
- private static final String gridftpAddress = "gsiftp://trestles.sdsc.edu:2811/";
- private static final String gramAddress = "trestles-login2.sdsc.edu:2119/jobmanager-pbstest2";
-
- @Before
- public void setUp() throws Exception {
- URL resource = GramProviderTestWithMyProxyAuth.class.getClassLoader().getResource(org.apache.airavata.common.utils.Constants.GFAC_CONFIG_XML);
- assert resource != null;
- System.out.println(resource.getFile());
- GFacConfiguration gFacConfiguration = GFacConfiguration.create(new File(resource.getPath()),null,null);
-// gFacConfiguration.setMyProxyLifeCycle(3600);
-// gFacConfiguration.setMyProxyServer("myproxy.teragrid.org");
-// gFacConfiguration.setMyProxyUser("*****");
-// gFacConfiguration.setMyProxyPassphrase("*****");
-// gFacConfiguration.setTrustedCertLocation("./certificates");
-// //have to set InFlwo Handlers and outFlowHandlers
-// gFacConfiguration.setInHandlers(Arrays.asList(new String[] {"org.apache.airavata.gfac.handler.GramDirectorySetupHandler","org.apache.airavata.gfac.handler.GridFTPInputHandler"}));
-// gFacConfiguration.setOutHandlers(Arrays.asList(new String[] {"org.apache.airavata.gfac.handler.GridFTPOutputHandler"}));
-
- /*
- * Host
- */
- HostDescription host = new HostDescription(GlobusHostType.type);
- host.getType().setHostAddress(hostAddress);
- host.getType().setHostName(hostName);
- ((GlobusHostType)host.getType()).setGlobusGateKeeperEndPointArray(new String[]{gramAddress});
- ((GlobusHostType)host.getType()).setGridFTPEndPointArray(new String[]{gridftpAddress});
- /*
- * App
- */
- ApplicationDescription appDesc = new ApplicationDescription(HpcApplicationDeploymentType.type);
- HpcApplicationDeploymentType app = (HpcApplicationDeploymentType)appDesc.getType();
- ApplicationDeploymentDescriptionType.ApplicationName name = ApplicationDeploymentDescriptionType.ApplicationName.Factory.newInstance();
- name.setStringValue("EchoLocal");
- app.setApplicationName(name);
- ProjectAccountType projectAccountType = app.addNewProjectAccount();
- projectAccountType.setProjectAccountNumber("sds128");
-
- QueueType queueType = app.addNewQueue();
- queueType.setQueueName("development");
-
- app.setCpuCount(1);
- app.setJobType(JobTypeType.SERIAL);
- app.setNodeCount(1);
- app.setProcessorsPerNode(1);
-
- /*
- * Use bat file if it is compiled on Windows
- */
- app.setExecutableLocation("/bin/echo");
-
- /*
- * Default tmp location
- */
- String tempDir = "/scratch/01437/ogce/test/";
- String date = (new Date()).toString();
- date = date.replaceAll(" ", "_");
- date = date.replaceAll(":", "_");
-
- tempDir = tempDir + File.separator
- + "SimpleEcho" + "_" + date + "_" + UUID.randomUUID();
-
- System.out.println(tempDir);
- app.setScratchWorkingDirectory(tempDir);
- app.setStaticWorkingDirectory(tempDir);
- app.setInputDataDirectory(tempDir + File.separator + "inputData");
- app.setOutputDataDirectory(tempDir + File.separator + "outputData");
- app.setStandardOutput(tempDir + File.separator + app.getApplicationName().getStringValue() + ".stdout");
- app.setStandardError(tempDir + File.separator + app.getApplicationName().getStringValue() + ".stderr");
-
-
- /*
- * Service
- */
- ServiceDescription serv = new ServiceDescription();
- serv.getType().setName("SimpleEcho");
-
- List<InputParameterType> inputList = new ArrayList<InputParameterType>();
-
- InputParameterType input = InputParameterType.Factory.newInstance();
- input.setParameterName("echo_input");
- input.setParameterType(StringParameterType.Factory.newInstance());
- inputList.add(input);
-
- InputParameterType input1 = InputParameterType.Factory.newInstance();
- input.setParameterName("myinput");
- URIParameterType uriType = URIParameterType.Factory.newInstance();
- uriType.setValue("gsiftp://gridftp1.ls4.tacc.utexas.edu:2811//home1/01437/ogce/gram_20130215.log");
- input.setParameterType(uriType);
- inputList.add(input1);
-
-
- InputParameterType[] inputParamList = inputList.toArray(new InputParameterType[inputList
-
- .size()]);
- List<OutputParameterType> outputList = new ArrayList<OutputParameterType>();
- OutputParameterType output = OutputParameterType.Factory.newInstance();
- output.setParameterName("echo_output");
- output.setParameterType(StringParameterType.Factory.newInstance());
- outputList.add(output);
-
- OutputParameterType[] outputParamList = outputList
- .toArray(new OutputParameterType[outputList.size()]);
-
- serv.getType().setInputParametersArray(inputParamList);
- serv.getType().setOutputParametersArray(outputParamList);
-
- jobExecutionContext = new JobExecutionContext(gFacConfiguration,serv.getType().getName());
- // Adding security context
- jobExecutionContext.addSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT, getSecurityContext());
- ApplicationContext applicationContext = new ApplicationContext();
- jobExecutionContext.setApplicationContext(applicationContext);
- applicationContext.setServiceDescription(serv);
- applicationContext.setApplicationDeploymentDescription(appDesc);
- applicationContext.setHostDescription(host);
-
- MessageContext inMessage = new MessageContext();
- ActualParameter echo_input = new ActualParameter();
- ((StringParameterType)echo_input.getType()).setValue("echo_output=hello");
- inMessage.addParameter("echo_input", echo_input);
-
- // added extra
- ActualParameter copy_input = new ActualParameter();
- copy_input.getType().changeType(URIParameterType.type);
- ((URIParameterType)copy_input.getType()).setValue("file:///tmp/tmpstrace");
-
- ActualParameter outlocation = new ActualParameter();
- ((StringParameterType)outlocation.getType()).setValue("./outputData/.");
- inMessage.addParameter("copy_input", copy_input);
- inMessage.addParameter("outputlocation", outlocation);
-
- // added extra
-
-
-
- jobExecutionContext.setInMessageContext(inMessage);
-
- MessageContext outMessage = new MessageContext();
- ActualParameter echo_out = new ActualParameter();
-// ((StringParameterType)echo_input.getType()).setValue("echo_output=hello");
- outMessage.addParameter("echo_output", echo_out);
-
- jobExecutionContext.setOutMessageContext(outMessage);
-
- }
-
- @Test
- public void testGramProvider() throws GFacException {
- GFacImpl gFacAPI = new GFacImpl();
- gFacAPI.submitJob(jobExecutionContext);
- MessageContext outMessageContext = jobExecutionContext.getOutMessageContext();
- Assert.assertEquals(MappingFactory.toString((ActualParameter)outMessageContext.getParameter("echo_output")), "hello");
- }
-
- @Test
- public void testGetJdbcUrl() {
- System.out.println(getJDBCUrl());
- }
-}
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-core/src/test/java/org/apache/airavata/core/gfac/services/impl/JSDLGeneratorTestWithMyProxyAuth.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/test/java/org/apache/airavata/core/gfac/services/impl/JSDLGeneratorTestWithMyProxyAuth.java b/modules/gfac/gfac-core/src/test/java/org/apache/airavata/core/gfac/services/impl/JSDLGeneratorTestWithMyProxyAuth.java
deleted file mode 100644
index 3351b32..0000000
--- a/modules/gfac/gfac-core/src/test/java/org/apache/airavata/core/gfac/services/impl/JSDLGeneratorTestWithMyProxyAuth.java
+++ /dev/null
@@ -1,318 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.core.gfac.services.impl;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-
-import java.io.File;
-import java.net.URL;
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.List;
-import java.util.UUID;
-
-import org.apache.airavata.commons.gfac.type.ActualParameter;
-import org.apache.airavata.commons.gfac.type.ApplicationDescription;
-import org.apache.airavata.commons.gfac.type.HostDescription;
-import org.apache.airavata.commons.gfac.type.ServiceDescription;
-import org.apache.airavata.gfac.GFacConfiguration;
-import org.apache.airavata.gfac.context.ApplicationContext;
-import org.apache.airavata.gfac.context.JobExecutionContext;
-import org.apache.airavata.gfac.context.MessageContext;
-import org.apache.airavata.gfac.provider.utils.JSDLGenerator;
-import org.apache.airavata.gfac.provider.utils.JSDLUtils;
-import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
-import org.apache.airavata.schemas.gfac.HpcApplicationDeploymentType;
-import org.apache.airavata.schemas.gfac.InputParameterType;
-import org.apache.airavata.schemas.gfac.JobTypeType;
-import org.apache.airavata.schemas.gfac.OutputParameterType;
-import org.apache.airavata.schemas.gfac.ProjectAccountType;
-import org.apache.airavata.schemas.gfac.QueueType;
-import org.apache.airavata.schemas.gfac.StringParameterType;
-import org.apache.airavata.schemas.gfac.URIParameterType;
-import org.apache.airavata.schemas.gfac.UnicoreHostType;
-import org.apache.log4j.PropertyConfigurator;
-import org.ggf.schemas.jsdl.x2005.x11.jsdl.JobDefinitionDocument;
-import org.junit.Before;
-import org.junit.Test;
-
-//public class JSDLGeneratorTestWithMyProxyAuth {
-//
-// public static final String[] hostArray = new String[] { "https://zam1161v01.zam.kfa-juelich.de:8002/INTEROP1/services/BESFactory?res=default_bes_factory" };
-// public static final String gridftpAddress = "gsiftp://gridftp.blacklight.psc.teragrid.org:2811";
-// public static final String hostAddress = "zam1161v01.zam.kfa-juelich.de";
-// public static final String hostName = "DEMO-INTEROP-SITE";
-// public static final String scratchDir = "/scratch/msmemon/airavata";
-//
-// protected JobExecutionContext jobExecutionContext;
-//
-//
-// @Test
-// public void testSerialJSDLWithStdout() throws Exception{
-//
-// JobTypeType jobType = JobTypeType.Factory.newInstance();
-// jobType.set(JobTypeType.SERIAL);
-// ApplicationContext appContext = getApplicationContext();
-// appContext.setApplicationDeploymentDescription(getApplicationDesc(jobType, true));
-// jobExecutionContext.setApplicationContext(appContext);
-//
-// JobDefinitionDocument jobDefDoc = JSDLGenerator.buildJSDLInstance(jobExecutionContext);
-//
-// assertTrue (jobDefDoc.getJobDefinition().getJobDescription().getApplication().toString().contains("/bin/cat"));
-// assertTrue(jobDefDoc.getJobDefinition().getJobDescription().getDataStagingArray().length > 2);
-//
-// assertTrue(jobDefDoc.getJobDefinition().getJobDescription().getJobIdentification().getJobProjectArray().length > 0);
-//
-// assertFalse(JSDLUtils.getPOSIXApplication(jobDefDoc.getJobDefinition())==null);
-//
-// assertEquals("jsdl_stdout", JSDLUtils.getOrCreatePOSIXApplication(jobDefDoc.getJobDefinition()).getOutput().getStringValue().toString());
-//
-// }
-//
-// @Test
-// public void testSerialJSDLWithoutStdout() throws Exception{
-//
-// JobTypeType jobType = JobTypeType.Factory.newInstance();
-// jobType.set(JobTypeType.SERIAL);
-// ApplicationContext appContext = getApplicationContext();
-// appContext.setApplicationDeploymentDescription(getApplicationDesc(jobType, false));
-// jobExecutionContext.setApplicationContext(appContext);
-//
-// JobDefinitionDocument jobDefDoc = JSDLGenerator.buildJSDLInstance(jobExecutionContext);
-//
-// assertTrue (jobDefDoc.getJobDefinition().getJobDescription().getApplication().toString().contains("/bin/cat"));
-// assertTrue(jobDefDoc.getJobDefinition().getJobDescription().getDataStagingArray().length > 2);
-//
-// assertTrue(jobDefDoc.getJobDefinition().getJobDescription().getJobIdentification().getJobProjectArray().length > 0);
-//
-// assertFalse(JSDLUtils.getPOSIXApplication(jobDefDoc.getJobDefinition())==null);
-//
-// assertEquals("stdout", JSDLUtils.getOrCreatePOSIXApplication(jobDefDoc.getJobDefinition()).getOutput().getStringValue().toString());
-// assertEquals("stderr", JSDLUtils.getOrCreatePOSIXApplication(jobDefDoc.getJobDefinition()).getError().getStringValue().toString());
-//
-// }
-//
-//
-// @Test
-// public void testMPIJSDL() throws Exception{
-//
-// JobTypeType jobType = JobTypeType.Factory.newInstance();
-// jobType.set(JobTypeType.MPI);
-// ApplicationContext appContext = getApplicationContext();
-// appContext.setApplicationDeploymentDescription(getApplicationDesc(jobType, true));
-// jobExecutionContext.setApplicationContext(appContext);
-//
-// JobDefinitionDocument jobDefDoc = JSDLGenerator.buildJSDLInstance(jobExecutionContext);
-//
-// assertTrue (jobDefDoc.getJobDefinition().getJobDescription().getApplication().toString().contains("/bin/cat"));
-// assertTrue(jobDefDoc.getJobDefinition().getJobDescription().getDataStagingArray().length > 2);
-//
-// assertTrue(jobDefDoc.getJobDefinition().getJobDescription().getJobIdentification().getJobProjectArray().length > 0);
-//
-// assertEquals("jsdl_stdout", JSDLUtils.getOrCreateSPMDApplication(jobDefDoc.getJobDefinition()).getOutput().getStringValue().toString());
-//
-// assertFalse(JSDLUtils.getSPMDApplication(jobDefDoc.getJobDefinition())==null);
-//
-//
-// }
-//
-// protected GFacConfiguration getGFACConfig() throws Exception{
-// URL resource = BESProviderTestWithMyProxyAuth.class.getClassLoader().getResource(org.apache.airavata.common.utils.Constants.GFAC_CONFIG_XML);
-// System.out.println(resource.getFile());
-// GFacConfiguration gFacConfiguration = GFacConfiguration.create(new File(resource.getPath()),null,null);
-// return gFacConfiguration;
-// }
-//
-//
-// protected ApplicationContext getApplicationContext() {
-// ApplicationContext applicationContext = new ApplicationContext();
-// applicationContext.setHostDescription(getHostDesc());
-//
-// applicationContext.setServiceDescription(getServiceDesc());
-// return applicationContext;
-// }
-//
-// protected ApplicationDescription getApplicationDesc(JobTypeType jobType, boolean setOuput) {
-// ApplicationDescription appDesc = new ApplicationDescription(
-// HpcApplicationDeploymentType.type);
-// HpcApplicationDeploymentType app = (HpcApplicationDeploymentType) appDesc
-// .getType();
-// ApplicationDeploymentDescriptionType.ApplicationName name = ApplicationDeploymentDescriptionType.ApplicationName.Factory
-// .newInstance();
-// name.setStringValue("EchoLocal");
-// app.setApplicationName(name);
-// ProjectAccountType projectAccountType = app.addNewProjectAccount();
-// projectAccountType.setProjectAccountNumber("TG-AST110064");
-//
-// QueueType queueType = app.addNewQueue();
-// queueType.setQueueName("development");
-//
-// app.setCpuCount(1);
-// // TODO: also handle parallel jobs
-// if((jobType.enumValue() == JobTypeType.SERIAL) || (jobType.enumValue() == JobTypeType.SINGLE)) {
-// app.setJobType(JobTypeType.SERIAL);
-// }
-// else if (jobType.enumValue() == JobTypeType.MPI) {
-// app.setJobType(JobTypeType.MPI);
-// }
-// else {
-// app.setJobType(JobTypeType.OPEN_MP);
-// }
-//
-// app.setNodeCount(1);
-// app.setProcessorsPerNode(1);
-//
-// /*
-// * Use bat file if it is compiled on Windows
-// */
-// app.setExecutableLocation("/bin/cat");
-//
-// /*
-// * Default tmp location
-// */
-// String date = (new Date()).toString();
-// date = date.replaceAll(" ", "_");
-// date = date.replaceAll(":", "_");
-//
-// String remoteTempDir = scratchDir + File.separator + "SimpleEcho" + "_" + date + "_"
-// + UUID.randomUUID();
-//
-// System.out.println(remoteTempDir);
-//
-// // no need of these parameters, as unicore manages by itself
-// app.setScratchWorkingDirectory(remoteTempDir);
-// app.setStaticWorkingDirectory(remoteTempDir);
-// app.setInputDataDirectory(remoteTempDir + File.separator + "inputData");
-// app.setOutputDataDirectory(remoteTempDir + File.separator + "outputData");
-//
-// if(setOuput) {
-// app.setStandardOutput(app.getOutputDataDirectory()+"/jsdl_stdout");
-// app.setStandardError(app.getOutputDataDirectory()+"/jsdl_stderr");
-// }
-// return appDesc;
-// }
-//
-// protected HostDescription getHostDesc() {
-// HostDescription host = new HostDescription(UnicoreHostType.type);
-// host.getType().setHostAddress(hostAddress);
-// host.getType().setHostName(hostName);
-// ((UnicoreHostType) host.getType()).setUnicoreBESEndPointArray(hostArray);
-// ((UnicoreHostType) host.getType()).setGridFTPEndPointArray(new String[]{gridftpAddress});
-// return host;
-// }
-//
-// protected ServiceDescription getServiceDesc() {
-// ServiceDescription serv = new ServiceDescription();
-// serv.getType().setName("SimpleCat");
-//
-// List<InputParameterType> inputList = new ArrayList<InputParameterType>();
-// InputParameterType input = InputParameterType.Factory.newInstance();
-// input.setParameterName("echo_input");
-// input.setParameterType(StringParameterType.Factory.newInstance());
-// inputList.add(input);
-// InputParameterType[] inputParamList = inputList
-// .toArray(new InputParameterType[inputList.size()]);
-//
-// List<OutputParameterType> outputList = new ArrayList<OutputParameterType>();
-// OutputParameterType output = OutputParameterType.Factory.newInstance();
-// output.setParameterName("echo_output");
-// output.setParameterType(StringParameterType.Factory.newInstance());
-// outputList.add(output);
-// OutputParameterType[] outputParamList = outputList
-// .toArray(new OutputParameterType[outputList.size()]);
-//
-// serv.getType().setInputParametersArray(inputParamList);
-// serv.getType().setOutputParametersArray(outputParamList);
-//
-//
-// return serv;
-// }
-//
-// protected MessageContext getInMessageContext() {
-// MessageContext inMessage = new MessageContext();
-//
-// ActualParameter i1 = new ActualParameter();
-// i1.getType().changeType(URIParameterType.type);
-// ((URIParameterType)i1.getType()).setValue("file:///tmp/ifile1");
-// inMessage.addParameter("i1", i1);
-//
-// ActualParameter i2 = new ActualParameter();
-// i2.getType().changeType(URIParameterType.type);
-// ((URIParameterType)i2.getType()).setValue("file:///tmp/ifile2");
-// inMessage.addParameter("i2", i2);
-//
-// ActualParameter i3 = new ActualParameter();
-// i2.getType().changeType(URIParameterType.type);
-// ((URIParameterType)i2.getType()).setValue("///tmp/ifile2");
-// inMessage.addParameter("i3", i2);
-//
-// ActualParameter simpleArg = new ActualParameter();
-// simpleArg.getType().changeType(StringParameterType.type);
-// ((StringParameterType)simpleArg.getType()).setValue("argument1");
-// inMessage.addParameter("a1", simpleArg);
-//
-// ActualParameter nameValueArg = new ActualParameter();
-// nameValueArg.getType().changeType(StringParameterType.type);
-// ((StringParameterType)nameValueArg.getType()).setValue("name1=value1");
-// inMessage.addParameter("nameValueArg", nameValueArg);
-//
-// ActualParameter echo_input = new ActualParameter();
-// ((StringParameterType) echo_input.getType())
-// .setValue("echo_output=hello");
-// inMessage.addParameter("echo_input", echo_input);
-//
-// return inMessage;
-// }
-//
-// protected MessageContext getOutMessageContext() {
-// MessageContext om1 = new MessageContext();
-//
-// // TODO: Aint the output parameters are only the name of the files staged out to the gridftp endpoint?
-// ActualParameter o1 = new ActualParameter();
-// ((StringParameterType) o1.getType())
-// .setValue("tempfile");
-// om1.addParameter("o1", o1);
-//
-// ActualParameter o2 = new ActualParameter();
-// o2.getType().changeType(URIParameterType.type);
-//
-// ((URIParameterType)o2.getType()).setValue("http://path/to/upload");
-// om1.addParameter("o2", o2);
-//
-//
-//
-// return om1;
-// }
-//
-// @Before
-// public void initJobContext() throws Exception {
-// PropertyConfigurator.configure("src/test/resources/logging.properties");
-// jobExecutionContext = new JobExecutionContext(getGFACConfig(), getServiceDesc().getType().getName());
-// jobExecutionContext.setApplicationContext(getApplicationContext());
-// jobExecutionContext.setInMessageContext(getInMessageContext());
-// jobExecutionContext.setOutMessageContext(getOutMessageContext());
-// }
-//
-//
-//}
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-gram/pom.xml
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gram/pom.xml b/modules/gfac/gfac-gram/pom.xml
new file mode 100644
index 0000000..47382ae
--- /dev/null
+++ b/modules/gfac/gfac-gram/pom.xml
@@ -0,0 +1,135 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!--Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under
+ the Apache License, Version 2.0 (theÏ "License"); you may not use this file except in compliance with the License. You may
+ obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to
+ in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+ ANY ~ KIND, either express or implied. See the License for the specific language governing permissions and limitations under
+ the License. -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <parent>
+ <groupId>org.apache.airavata</groupId>
+ <artifactId>gfac</artifactId>
+ <version>0.12-SNAPSHOT</version>
+ <relativePath>../pom.xml</relativePath>
+ </parent>
+
+ <modelVersion>4.0.0</modelVersion>
+ <artifactId>airavata-gfac-gram</artifactId>
+ <name>Airavata GFac GRAM implementation</name>
+ <description>This is the extension of GFAC to use GRAM</description>
+ <url>http://airavata.apache.org/</url>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.jglobus</groupId>
+ <artifactId>gss</artifactId>
+ <version>${jglobus.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.jglobus</groupId>
+ <artifactId>gram</artifactId>
+ <version>${jglobus.version}</version>
+ <exclusions>
+ <exclusion>
+ <groupId>org.bouncycastle</groupId>
+ <artifactId>bcprov-jdk16</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>org.bouncycastle</groupId>
+ <artifactId>bcprov-jdk16</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.jglobus</groupId>
+ <artifactId>myproxy</artifactId>
+ <version>${jglobus.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.jglobus</groupId>
+ <artifactId>gridftp</artifactId>
+ <version>${jglobus.version}</version>
+ </dependency>
+
+ <!-- Logging -->
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-api</artifactId>
+ </dependency>
+
+ <!-- GFAC schemas -->
+ <dependency>
+ <groupId>org.apache.airavata</groupId>
+ <artifactId>airavata-gfac-core</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <!-- Credential Store -->
+ <dependency>
+ <groupId>org.apache.airavata</groupId>
+ <artifactId>airavata-credential-store</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.airavata</groupId>
+ <artifactId>airavata-server-configuration</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.airavata</groupId>
+ <artifactId>airavata-client-configuration</artifactId>
+ <scope>test</scope>
+ </dependency>
+
+
+ <!-- Test -->
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.testng</groupId>
+ <artifactId>testng</artifactId>
+ <version>6.1.1</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>jcl-over-slf4j</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-log4j12</artifactId>
+ <scope>test</scope>
+ </dependency>
+
+ <!-- gsi-ssh api dependencies -->
+
+ <dependency>
+ <groupId>org.apache.airavata</groupId>
+ <artifactId>airavata-data-models</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>com.jcraft</groupId>
+ <artifactId>jsch</artifactId>
+ <version>0.1.50</version>
+ </dependency>
+ <dependency>
+ <groupId>org.ogce</groupId>
+ <artifactId>bcgss</artifactId>
+ <version>146</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.xmlbeans</groupId>
+ <artifactId>xmlbeans</artifactId>
+ <version>${xmlbeans.version}</version>
+ </dependency>
+
+ </dependencies>
+</project>
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/context/security/GSISecurityContext.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/context/security/GSISecurityContext.java b/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/context/security/GSISecurityContext.java
new file mode 100644
index 0000000..3eb020f
--- /dev/null
+++ b/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/context/security/GSISecurityContext.java
@@ -0,0 +1,288 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+*/
+package org.apache.airavata.gfac.context.security;
+
+import org.apache.airavata.common.exception.ApplicationSettingsException;
+import org.apache.airavata.common.utils.ServerSettings;
+import org.apache.airavata.credential.store.credential.Credential;
+import org.apache.airavata.credential.store.credential.impl.certificate.CertificateCredential;
+import org.apache.airavata.credential.store.store.CredentialReader;
+import org.apache.airavata.gfac.AbstractSecurityContext;
+import org.apache.airavata.gfac.Constants;
+import org.apache.airavata.gfac.GFacException;
+import org.apache.airavata.gfac.RequestData;
+import org.apache.airavata.gsi.ssh.api.Cluster;
+import org.globus.gsi.X509Credential;
+import org.globus.gsi.gssapi.GlobusGSSCredentialImpl;
+import org.globus.gsi.provider.GlobusProvider;
+import org.globus.myproxy.GetParams;
+import org.globus.myproxy.MyProxy;
+import org.globus.myproxy.MyProxyException;
+import org.gridforum.jgss.ExtendedGSSCredential;
+import org.ietf.jgss.GSSCredential;
+import org.ietf.jgss.GSSException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.security.Security;
+import java.security.cert.X509Certificate;
+
+/**
+ * Handles GRID related security.
+ */
+public class GSISecurityContext extends AbstractSecurityContext {
+
+ protected static final Logger log = LoggerFactory.getLogger(GSISecurityContext.class);
+ /*
+ * context name
+ */
+ public static final String GSI_SECURITY_CONTEXT = "gsi";
+
+ public static int CREDENTIAL_RENEWING_THRESH_HOLD = 10 * 90;
+
+ private GSSCredential gssCredentials = null;
+
+ private Cluster pbsCluster = null;
+
+ // Set trusted cert path and add provider
+ static {
+ Security.addProvider(new GlobusProvider());
+ try {
+ setUpTrustedCertificatePath();
+ } catch (ApplicationSettingsException e) {
+ log.error(e.getLocalizedMessage(), e);
+ }
+ }
+
+ public static void setUpTrustedCertificatePath(String trustedCertificatePath) {
+
+ File file = new File(trustedCertificatePath);
+
+ if (!file.exists() || !file.canRead()) {
+ File f = new File(".");
+ log.info("Current directory " + f.getAbsolutePath());
+ throw new RuntimeException("Cannot read trusted certificate path " + trustedCertificatePath);
+ } else {
+ System.setProperty(Constants.TRUSTED_CERTIFICATE_SYSTEM_PROPERTY, file.getAbsolutePath());
+ }
+ }
+
+ private static void setUpTrustedCertificatePath() throws ApplicationSettingsException {
+
+ String trustedCertificatePath = ServerSettings.getSetting(Constants.TRUSTED_CERT_LOCATION);
+
+ setUpTrustedCertificatePath(trustedCertificatePath);
+ }
+
+ /**
+ * Gets the trusted certificate path. Trusted certificate path is stored in "X509_CERT_DIR"
+ * system property.
+ * @return The trusted certificate path as a string.
+ */
+ public static String getTrustedCertificatePath() {
+ return System.getProperty(Constants.TRUSTED_CERTIFICATE_SYSTEM_PROPERTY);
+ }
+
+
+ public GSISecurityContext(CredentialReader credentialReader, RequestData requestData) {
+ super(credentialReader, requestData);
+ }
+
+
+ public GSISecurityContext(Cluster pbsCluster) {
+ this.setPbsCluster(pbsCluster);
+ }
+
+ /**
+ * Gets GSSCredentials. The process is as follows;
+ * If credentials were queried for the first time create credentials.
+ * 1. Try creating credentials using certificates stored in the credential store
+ * 2. If 1 fails use user name and password to create credentials
+ * If credentials are already created check the remaining life time of the credential. If
+ * remaining life time is less than CREDENTIAL_RENEWING_THRESH_HOLD, then renew credentials.
+ * @return GSSCredentials to be used.
+ * @throws org.apache.airavata.gfac.GFacException If an error occurred while creating credentials.
+ * @throws org.apache.airavata.common.exception.ApplicationSettingsException
+ */
+ public GSSCredential getGssCredentials() throws GFacException, ApplicationSettingsException {
+
+ if (gssCredentials == null) {
+
+ try {
+ gssCredentials = getCredentialsFromStore();
+ } catch (Exception e) {
+ log.error("An exception occurred while retrieving credentials from the credential store. " +
+ "Will continue with my proxy user name and password.", e);
+ }
+
+ // If store does not have credentials try to get from user name and password
+ if (gssCredentials == null) {
+ gssCredentials = getDefaultCredentials();
+ }
+
+ // if still null, throw an exception
+ if (gssCredentials == null) {
+ throw new GFacException("Unable to retrieve my proxy credentials to continue operation.");
+ }
+ } else {
+ try {
+ if (gssCredentials.getRemainingLifetime() < CREDENTIAL_RENEWING_THRESH_HOLD) {
+ return renewCredentials();
+ }
+ } catch (GSSException e) {
+ throw new GFacException("Unable to retrieve remaining life time from credentials.", e);
+ }
+ }
+
+ return gssCredentials;
+ }
+
+ /**
+ * Renews credentials. First try to renew credentials as a trusted renewer. If that failed
+ * use user name and password to renew credentials.
+ * @return Renewed credentials.
+ * @throws org.apache.airavata.gfac.GFacException If an error occurred while renewing credentials.
+ * @throws org.apache.airavata.common.exception.ApplicationSettingsException
+ */
+ public GSSCredential renewCredentials() throws GFacException, ApplicationSettingsException {
+
+ // First try to renew credentials as a trusted renewer
+ try {
+ gssCredentials = renewCredentialsAsATrustedHost();
+ } catch (Exception e) {
+ log.warn("Renewing credentials as a trusted renewer failed", e);
+ gssCredentials = getProxyCredentials();
+ }
+
+ return gssCredentials;
+ }
+
+ /**
+ * Reads the credentials from credential store.
+ * @return If token is found in the credential store, will return a valid credential. Else returns null.
+ * @throws Exception If an error occurred while retrieving credentials.
+ */
+ public GSSCredential getCredentialsFromStore() throws Exception {
+
+ if (getCredentialReader() == null) {
+ return null;
+ }
+
+ Credential credential = getCredentialReader().getCredential(getRequestData().getGatewayId(),
+ getRequestData().getTokenId());
+
+ if (credential != null) {
+ if (credential instanceof CertificateCredential) {
+
+ log.info("Successfully found credentials for token id - " + getRequestData().getTokenId() +
+ " gateway id - " + getRequestData().getGatewayId());
+
+ CertificateCredential certificateCredential = (CertificateCredential) credential;
+
+ X509Certificate[] certificates = certificateCredential.getCertificates();
+ X509Credential newCredential = new X509Credential(certificateCredential.getPrivateKey(), certificates);
+
+ GlobusGSSCredentialImpl cred = new GlobusGSSCredentialImpl(newCredential, GSSCredential.INITIATE_AND_ACCEPT);
+ System.out.print(cred.export(ExtendedGSSCredential.IMPEXP_OPAQUE));
+ return cred;
+ //return new GlobusGSSCredentialImpl(newCredential,
+ // GSSCredential.INITIATE_AND_ACCEPT);
+ } else {
+ log.info("Credential type is not CertificateCredential. Cannot create mapping globus credentials. " +
+ "Credential type - " + credential.getClass().getName());
+ }
+ } else {
+ log.info("Could not find credentials for token - " + getRequestData().getTokenId() + " and "
+ + "gateway id - " + getRequestData().getGatewayId());
+ }
+
+ return null;
+ }
+
+ /**
+ * Gets the default proxy certificate.
+ * @return Default my proxy credentials.
+ * @throws org.apache.airavata.gfac.GFacException If an error occurred while retrieving credentials.
+ * @throws org.apache.airavata.common.exception.ApplicationSettingsException
+ */
+ public GSSCredential getDefaultCredentials() throws GFacException, ApplicationSettingsException{
+ MyProxy myproxy = new MyProxy(getRequestData().getMyProxyServerUrl(), getRequestData().getMyProxyPort());
+ try {
+ return myproxy.get(getRequestData().getMyProxyUserName(), getRequestData().getMyProxyPassword(),
+ getRequestData().getMyProxyLifeTime());
+ } catch (MyProxyException e) {
+ throw new GFacException("An error occurred while retrieving default security credentials.", e);
+ }
+ }
+
+ /**
+ * Gets a new proxy certificate given current credentials.
+ * @return The short lived GSSCredentials
+ * @throws org.apache.airavata.gfac.GFacException If an error is occurred while retrieving credentials.
+ * @throws org.apache.airavata.common.exception.ApplicationSettingsException
+ */
+ public GSSCredential getProxyCredentials() throws GFacException, ApplicationSettingsException {
+
+ MyProxy myproxy = new MyProxy(getRequestData().getMyProxyServerUrl(), getRequestData().getMyProxyPort());
+ try {
+ return myproxy.get(gssCredentials, getRequestData().getMyProxyUserName(), getRequestData().getMyProxyPassword(),
+ getRequestData().getMyProxyLifeTime());
+ } catch (MyProxyException e) {
+ throw new GFacException("An error occurred while renewing security credentials using user/password.", e);
+ }
+ }
+
+ /**
+ * Renew GSSCredentials.
+ * Before executing we need to add current host as a trusted renewer. Note to renew credentials
+ * we dont need user name and password.
+ * To do that execute following command
+ * > myproxy-logon -t <LIFETIME></LIFETIME> -s <MY PROXY SERVER> -l <USER NAME>
+ * E.g :- > myproxy-logon -t 264 -s myproxy.teragrid.org -l us3
+ * Enter MyProxy pass phrase:
+ * A credential has been received for user us3 in /tmp/x509up_u501.
+ * > myproxy-init -A --cert /tmp/x509up_u501 --key /tmp/x509up_u501 -l ogce -s myproxy.teragrid.org
+ * @return Renewed credentials.
+ * @throws org.apache.airavata.gfac.GFacException If an error occurred while renewing credentials.
+ * @throws org.apache.airavata.common.exception.ApplicationSettingsException
+ */
+ public GSSCredential renewCredentialsAsATrustedHost() throws GFacException, ApplicationSettingsException {
+ MyProxy myproxy = new MyProxy(getRequestData().getMyProxyServerUrl(), getRequestData().getMyProxyPort());
+ GetParams getParams = new GetParams();
+ getParams.setAuthzCreds(gssCredentials);
+ getParams.setUserName(getRequestData().getMyProxyUserName());
+ getParams.setLifetime(getRequestData().getMyProxyLifeTime());
+ try {
+ return myproxy.get(gssCredentials, getParams);
+ } catch (MyProxyException e) {
+ throw new GFacException("An error occurred while renewing security credentials.", e);
+ }
+ }
+
+ public Cluster getPbsCluster() {
+ return pbsCluster;
+ }
+
+ public void setPbsCluster(Cluster pbsCluster) {
+ this.pbsCluster = pbsCluster;
+ }
+}
[09/11] creating gfac-bes and gfac-gram out from gfac-core
Posted by la...@apache.org.
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/FileTransferBase.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/FileTransferBase.java b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/FileTransferBase.java
new file mode 100644
index 0000000..114ee57
--- /dev/null
+++ b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/FileTransferBase.java
@@ -0,0 +1,227 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+
+package org.apache.airavata.gfac.utils;
+
+import java.io.File;
+import java.io.FilenameFilter;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.regex.Pattern;
+
+import org.unigrids.services.atomic.types.GridFileType;
+import org.unigrids.services.atomic.types.ProtocolType;
+
+import de.fzj.unicore.uas.client.StorageClient;
+import de.fzj.unicore.uas.util.PropertyHelper;
+public class FileTransferBase {
+
+ protected Properties extraParameterSource;
+
+ protected boolean timing=false;
+
+ protected boolean recurse=false;
+
+ protected String from;
+
+ protected String to;
+
+ //index of first byte to download
+ protected Long startByte;
+
+ //index of last byte to download
+ protected Long endByte;
+
+ /**
+ * the creation mode
+ */
+ protected Mode mode;
+
+ /**
+ * whether the job processing should fail if an error occurs
+ */
+ protected boolean failOnError;
+
+ protected List<ProtocolType.Enum> preferredProtocols=new ArrayList<ProtocolType.Enum>();
+
+ public FileTransferBase(){
+ preferredProtocols.add(ProtocolType.BFT);
+ }
+
+ protected Map<String,String>makeExtraParameters(ProtocolType.Enum protocol){
+ Map<String, String> res;
+ if(extraParameterSource==null){
+ res=new HashMap<String, String>();
+ }
+ else{
+ String p=String.valueOf(protocol);
+ PropertyHelper ph=new PropertyHelper(extraParameterSource, new String[]{p,p.toLowerCase()});
+ res= ph.getFilteredMap();
+ }
+ if(res.size()>0){
+ // TODO: change it to logger
+ System.out.println("Have "+res.size()+" extra parameters for protocol "+protocol);
+ }
+ return res;
+ }
+
+
+ public String getTo() {
+ return to;
+ }
+
+ public String getFrom() {
+ return from;
+ }
+
+ public void setTo(String to) {
+ this.to = to;
+ }
+
+ public void setFrom(String from) {
+ this.from = from;
+ }
+
+ public Mode getMode() {
+ return mode;
+ }
+
+ public boolean isFailOnError() {
+ return failOnError;
+ }
+
+ public boolean isTiming() {
+ return timing;
+ }
+
+ public void setTiming(boolean timing) {
+ this.timing = timing;
+ }
+
+ public void setFailOnError(boolean failOnError) {
+ this.failOnError = failOnError;
+ }
+
+ public List<ProtocolType.Enum> getPreferredProtocols() {
+ return preferredProtocols;
+ }
+
+ public void setPreferredProtocols(List<ProtocolType.Enum> preferredProtocols) {
+ this.preferredProtocols = preferredProtocols;
+ }
+
+ public void setExtraParameterSource(Properties properties){
+ this.extraParameterSource=properties;
+ }
+
+ public void setRecurse(boolean recurse) {
+ this.recurse = recurse;
+ }
+ /**
+ * check if the given path denotes a valid remote directory
+ * @param remotePath - the path
+ * @param sms - the storage
+ * @return <code>true</code> if the remote directory exists and is a directory
+ */
+ protected boolean isValidDirectory(String remotePath, StorageClient sms){
+ boolean result=false;
+ if(! ("/".equals(remotePath) || ".".equals(remotePath)) ){
+ try{
+ GridFileType gft=sms.listProperties(remotePath);
+ result=gft.getIsDirectory();
+ }catch(Exception ex){
+ result=false;
+ }
+ }
+ else result=true;
+
+ return result;
+ }
+
+ public File[] resolveWildCards(File original){
+ final String name=original.getName();
+ if(!hasWildCards(original))return new File[]{original};
+ File parent=original.getParentFile();
+ if(parent==null)parent=new File(".");
+ FilenameFilter filter=new FilenameFilter(){
+ Pattern p=createPattern(name);
+ public boolean accept(File file, String name){
+ return p.matcher(name).matches();
+ }
+ };
+ return parent.listFiles(filter);
+ }
+
+ protected boolean hasWildCards(File file){
+ return hasWildCards(file.getName());
+ }
+
+ public boolean hasWildCards(String name){
+ return name.contains("*") || name.contains("?");
+ }
+
+ private Pattern createPattern(String nameWithWildcards){
+ String regex=nameWithWildcards.replace("?",".").replace("*", ".*");
+ return Pattern.compile(regex);
+ }
+
+ protected ProtocolType.Enum chosenProtocol=null;
+
+ public ProtocolType.Enum getChosenProtocol(){
+ return chosenProtocol;
+ }
+
+ public Long getStartByte() {
+ return startByte;
+ }
+
+ public void setStartByte(Long startByte) {
+ this.startByte = startByte;
+ }
+
+ public Long getEndByte() {
+ return endByte;
+ }
+
+ public void setEndByte(Long endByte) {
+ this.endByte = endByte;
+ }
+
+ /**
+ * checks if a byte range is defined
+ * @return <code>true</code> iff both startByte and endByte are defined
+ */
+ protected boolean isRange(){
+ return startByte!=null && endByte!=null;
+ }
+
+ /**
+ * get the number of bytes in the byte range, or "-1" if the range is open-ended
+ * @return
+ */
+ protected long getRangeSize(){
+ if(Long.MAX_VALUE==endByte)return -1;
+ return endByte-startByte;
+ }
+}
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/FileUploader.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/FileUploader.java b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/FileUploader.java
new file mode 100644
index 0000000..4ab1737
--- /dev/null
+++ b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/FileUploader.java
@@ -0,0 +1,245 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+
+package org.apache.airavata.gfac.utils;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.util.Map;
+
+import org.unigrids.services.atomic.types.ProtocolType;
+
+import de.fzj.unicore.uas.client.FileTransferClient;
+import de.fzj.unicore.uas.client.StorageClient;
+import de.fzj.unicore.uas.client.UFTPConstants;
+import de.fzj.unicore.uas.client.UFTPFileTransferClient;
+import de.fzj.unicore.uas.fts.FiletransferOptions.IMonitorable;
+
+/**
+ * upload local file(s) to a remote location
+ *
+ * @author schuller
+ */
+public class FileUploader extends FileTransferBase{
+
+ public FileUploader(String from, String to, Mode mode)throws FileNotFoundException{
+ this(from,to,mode,true);
+ }
+
+ public FileUploader(String from, String to, Mode mode, boolean failOnError)throws FileNotFoundException{
+ this.to=to;
+ this.from=from;
+ this.mode=mode;
+ this.failOnError=failOnError;
+ checkOK();
+ }
+
+ public String getFrom() {
+ return from;
+ }
+
+ public String getTo() {
+ return to;
+ }
+
+
+ public void perform(StorageClient sms)throws Exception{
+ File fileSpec=new File(from);
+ boolean hasWildCards=false;
+ boolean isDirectory=fileSpec.isDirectory();
+ File[] fileset=null;
+
+ if(!isDirectory){
+ hasWildCards=hasWildCards(fileSpec);
+ }
+
+ chosenProtocol=sms.findSupportedProtocol(preferredProtocols.toArray(new ProtocolType.Enum[preferredProtocols.size()]));
+ Map<String,String>extraParameters=makeExtraParameters(chosenProtocol);
+
+ if(!hasWildCards && !isDirectory){
+ //single regular file
+ uploadFile(fileSpec,to,sms,chosenProtocol,extraParameters);
+ return;
+ }
+
+ //handle wildcards or directory
+ if(hasWildCards){
+ fileset=resolveWildCards(fileSpec);
+ }
+ else{
+ fileset=fileSpec.listFiles();
+ }
+
+ if(!isValidDirectory(to, sms)){
+ throw new IOException("The specified remote target '"+to+"' is not a directory");
+ }
+ if(to==null)to="/";
+ String target=isDirectory?to+"/"+fileSpec.getName():to;
+ sms.createDirectory(target);
+ uploadFiles(fileset,target,sms,chosenProtocol,extraParameters);
+ }
+
+ /**
+ * upload a set of files to a remote directory (which must exist)
+ *
+ * @param files
+ * @param remoteDirectory
+ * @param sms
+ * @param protocol
+ * @param extraParameters
+ * @param msg
+ * @throws Exception
+ */
+ private void uploadFiles(File[]files, String remoteDirectory, StorageClient sms, ProtocolType.Enum protocol,
+ Map<String,String>extraParameters)throws Exception{
+ for(File localFile: files){
+ String target=remoteDirectory+"/"+localFile.getName();
+ if(localFile.isDirectory()){
+ if(!recurse){
+ System.out.println("Skipping directory "+localFile.getAbsolutePath());
+ }else{
+ File[] fileset=localFile.listFiles();
+ sms.createDirectory(target);
+ uploadFiles(fileset,target,sms,protocol,extraParameters);
+ }
+ }else{
+ uploadFile(localFile,target,sms,protocol,extraParameters);
+ }
+ }
+ }
+
+ /**
+ * uploads a single regular file
+ *
+ * @param localFile
+ * @param remotePath
+ * @param sms
+ * @param protocol
+ * @param extraParameters
+ * @param msg
+ * @throws Exception
+ */
+ private void uploadFile(File localFile, String remotePath, StorageClient sms, ProtocolType.Enum protocol,
+ Map<String,String>extraParameters) throws Exception{
+ long startTime=System.currentTimeMillis();
+ FileInputStream is=null;
+ FileTransferClient ftc=null;
+ try{
+ if(remotePath==null){
+ remotePath="/"+localFile.getName();
+ }
+ else if(remotePath.endsWith("/")){
+ remotePath+=localFile.getName();
+ }
+ System.out.println("Uploading local file '"+localFile.getAbsolutePath()+"' -> '"+sms.getUrl()+"#"+remotePath+"'");
+ is=new FileInputStream(localFile.getAbsolutePath());
+ boolean append=Mode.append.equals(mode);
+ ftc=sms.getImport(remotePath, append, extraParameters, protocol);
+ configure(ftc, extraParameters);
+ if(append)ftc.setAppend(true);
+ String url=ftc.getUrl();
+ System.out.println("File transfer URL : "+url);
+// ProgressBar p=null;
+ if(ftc instanceof IMonitorable){
+ long size=localFile.length();
+ if(isRange()){
+ size=getRangeSize();
+ }
+// p=new ProgressBar(localFile.getName(),size,msg);
+// ((IMonitorable) ftc).setProgressListener(p);
+ }
+ if(isRange()){
+ System.out.println("Byte range: "+startByte+" - "+(getRangeSize()>0?endByte:""));
+ long skipped=0;
+ while(skipped<startByte){
+ skipped+=is.skip(startByte);
+ }
+ ftc.writeAllData(is, endByte-startByte+1);
+
+ }else{
+ ftc.writeAllData(is);
+ }
+ copyProperties(localFile, sms, remotePath);
+
+// if(ftc instanceof IMonitorable){
+// p.finish();
+// }
+
+ }finally{
+ if(ftc!=null){
+ try{
+ ftc.destroy();
+ }catch(Exception e1){
+// msg.error("Could not clean-up the filetransfer at <"+ftc.getUrl()+">",e1);
+ }
+ }
+ try{ if(is!=null)is.close(); }catch(Exception ignored){}
+ }
+ if(timing){
+ long duration=System.currentTimeMillis()-startTime;
+ double rate=(double)localFile.length()/(double)duration;
+ System.out.println("Rate: "+rate+ " kB/sec.");
+ }
+ }
+
+ /**
+ * if possible, copy the local executable flag to the remote file
+ * @param sourceFile - local file
+ * @throws Exception
+ */
+ private void copyProperties(File sourceFile, StorageClient sms, String target)throws Exception{
+ boolean x=sourceFile.canExecute();
+ try{
+ if(x){
+ sms.changePermissions(target, true, true, x);
+ }
+ }catch(Exception ex){
+// System.out.println("Can't set exectuable flag on remote file.",ex);
+ }
+ }
+
+ private void checkOK()throws FileNotFoundException{
+ if(!failOnError){
+ return;
+ }
+ File orig=new File(from);
+ if(!orig.isAbsolute()){
+ orig=new File(System.getProperty("user.dir"),from);
+ }
+ File[] files=resolveWildCards(orig);
+ if(files==null){
+ throw new FileNotFoundException("Local import '"+from+"' does not exist.");
+ }
+ for(File f: files){
+ if(!f.exists())throw new FileNotFoundException("Local import '"+from+"' does not exist.");
+ }
+ }
+
+ private void configure(FileTransferClient ftc, Map<String,String>params){
+ if(ftc instanceof UFTPFileTransferClient){
+ UFTPFileTransferClient u=(UFTPFileTransferClient)ftc;
+ String secret=params.get(UFTPConstants.PARAM_SECRET);
+ u.setSecret(secret);
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/JSDLGenerator.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/JSDLGenerator.java b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/JSDLGenerator.java
new file mode 100644
index 0000000..8754b17
--- /dev/null
+++ b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/JSDLGenerator.java
@@ -0,0 +1,105 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+package org.apache.airavata.gfac.utils;
+
+
+import org.apache.airavata.gfac.context.JobExecutionContext;
+import org.apache.airavata.gfac.utils.ResourceProcessor;
+import org.apache.airavata.gfac.utils.UASDataStagingProcessor;
+import org.apache.airavata.schemas.gfac.HpcApplicationDeploymentType;
+import org.ggf.schemas.jsdl.x2005.x11.jsdl.JobDefinitionDocument;
+import org.ggf.schemas.jsdl.x2005.x11.jsdl.JobDefinitionType;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ *
+ * Utility class generates a JSDL instance from JobExecutionContext instance
+ * @author shahbaz memon
+ *
+ * */
+
+public class JSDLGenerator {
+
+ protected final Logger log = LoggerFactory.getLogger(this.getClass());
+
+
+ public synchronized static JobDefinitionDocument buildJSDLInstance(JobExecutionContext context) throws Exception {
+
+ JobDefinitionDocument jobDefDoc = JobDefinitionDocument.Factory
+ .newInstance();
+ JobDefinitionType value = jobDefDoc.addNewJobDefinition();
+
+ HpcApplicationDeploymentType appDepType = (HpcApplicationDeploymentType) context
+ .getApplicationContext().getApplicationDeploymentDescription()
+ .getType();
+
+ // build Identification
+ createJobIdentification(value, appDepType);
+
+ ResourceProcessor.generateResourceElements(value, context);
+
+ ApplicationProcessor.generateJobSpecificAppElements(value, context);
+
+ DataStagingProcessor.generateDataStagingElements(value, context);
+
+
+ return jobDefDoc;
+ }
+
+
+ public synchronized static JobDefinitionDocument buildJSDLInstance(JobExecutionContext context, String smsUrl) throws Exception {
+
+ JobDefinitionDocument jobDefDoc = JobDefinitionDocument.Factory
+ .newInstance();
+ JobDefinitionType value = jobDefDoc.addNewJobDefinition();
+
+ HpcApplicationDeploymentType appDepType = (HpcApplicationDeploymentType) context
+ .getApplicationContext().getApplicationDeploymentDescription()
+ .getType();
+
+ // build Identification
+ createJobIdentification(value, appDepType);
+
+ ResourceProcessor.generateResourceElements(value, context);
+
+ ApplicationProcessor.generateJobSpecificAppElements(value, context);
+
+ UASDataStagingProcessor.generateDataStagingElements(value, context, smsUrl);
+
+ return jobDefDoc;
+ }
+
+ private static void createJobIdentification(JobDefinitionType value, HpcApplicationDeploymentType appDepType){
+ if( appDepType.getProjectAccount() != null ){
+
+ if (appDepType.getProjectAccount().getProjectAccountNumber() != null)
+ JSDLUtils.addProjectName(value, appDepType.getProjectAccount()
+ .getProjectAccountNumber());
+
+ if (appDepType.getProjectAccount().getProjectAccountDescription() != null)
+ JSDLUtils.getOrCreateJobIdentification(value).setDescription(
+ appDepType.getProjectAccount()
+ .getProjectAccountDescription());
+ }
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/JSDLUtils.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/JSDLUtils.java b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/JSDLUtils.java
new file mode 100644
index 0000000..438fc34
--- /dev/null
+++ b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/JSDLUtils.java
@@ -0,0 +1,540 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+
+package org.apache.airavata.gfac.utils;
+
+
+import javax.xml.namespace.QName;
+
+import org.apache.commons.httpclient.URIException;
+import org.apache.xmlbeans.XmlCursor;
+import org.apache.xmlbeans.XmlObject;
+import org.ggf.schemas.jsdl.x2005.x11.jsdl.ApplicationType;
+import org.ggf.schemas.jsdl.x2005.x11.jsdl.BoundaryType;
+import org.ggf.schemas.jsdl.x2005.x11.jsdl.CPUArchitectureType;
+import org.ggf.schemas.jsdl.x2005.x11.jsdl.CandidateHostsType;
+import org.ggf.schemas.jsdl.x2005.x11.jsdl.CreationFlagEnumeration;
+import org.ggf.schemas.jsdl.x2005.x11.jsdl.DataStagingType;
+import org.ggf.schemas.jsdl.x2005.x11.jsdl.ExactType;
+import org.ggf.schemas.jsdl.x2005.x11.jsdl.JobDefinitionType;
+import org.ggf.schemas.jsdl.x2005.x11.jsdl.JobDescriptionType;
+import org.ggf.schemas.jsdl.x2005.x11.jsdl.JobIdentificationType;
+import org.ggf.schemas.jsdl.x2005.x11.jsdl.OperatingSystemType;
+import org.ggf.schemas.jsdl.x2005.x11.jsdl.OperatingSystemTypeEnumeration;
+import org.ggf.schemas.jsdl.x2005.x11.jsdl.OperatingSystemTypeType;
+import org.ggf.schemas.jsdl.x2005.x11.jsdl.ProcessorArchitectureEnumeration;
+import org.ggf.schemas.jsdl.x2005.x11.jsdl.ResourcesType;
+import org.ggf.schemas.jsdl.x2005.x11.jsdl.SourceTargetType;
+import org.ggf.schemas.jsdl.x2005.x11.jsdlPosix.EnvironmentType;
+import org.ggf.schemas.jsdl.x2005.x11.jsdlPosix.POSIXApplicationDocument;
+import org.ggf.schemas.jsdl.x2005.x11.jsdlPosix.POSIXApplicationType;
+import org.ggf.schemas.jsdl.x2006.x07.jsdlHpcpa.HPCProfileApplicationDocument;
+import org.ggf.schemas.jsdl.x2006.x07.jsdlHpcpa.HPCProfileApplicationType;
+import org.ogf.schemas.jsdl.x2007.x02.jsdlSpmd.SPMDApplicationDocument;
+import org.ogf.schemas.jsdl.x2007.x02.jsdlSpmd.SPMDApplicationType;
+
+
+/**
+ *
+ * @author shahbaz memon, bastian demuth
+ *
+ */
+public class JSDLUtils
+{
+
+ public static final int FLAG_OVERWRITE = 1;
+ public static final int FLAG_APPEND = 2;
+ public static final int FLAG_DELETE_ON_TERMINATE = 32;
+
+ public static final QName POSIX_APPLICATION=POSIXApplicationDocument.type.getDocumentElementName();
+
+ public static final QName HPC_PROFILE_APPLICATION=HPCProfileApplicationDocument.type.getDocumentElementName();
+
+ public static final QName SPMD_APPLICATION=SPMDApplicationDocument.type.getDocumentElementName();
+
+ public static final String PROCESSESPERHOST = "ProcessesPerHost";
+ public static final String NUMBEROFPROCESSES = "NumberOfProcesses";
+ public static final String THREADSPERHOST = "ThreadsPerHost";
+
+
+
+ public static EnvironmentType addEnvVariable(JobDefinitionType def,String name, String value) {
+ POSIXApplicationType posixApp = getOrCreatePOSIXApplication(def);
+ EnvironmentType newEnv = posixApp.addNewEnvironment();
+ newEnv.setName(name);
+ newEnv.setStringValue(value);
+ return newEnv;
+ }
+
+ public static void setApplicationName(JobDefinitionType value, String applicationName) {
+ getOrCreateApplication(value).setApplicationName(applicationName);
+ }
+
+ public static void setApplicationVersion(JobDefinitionType value, String applicationVersion) {
+ getOrCreateApplication(value).setApplicationVersion(applicationVersion);
+ }
+
+ public static void addProjectName(JobDefinitionType value, String projectName) {
+ getOrCreateJobIdentification(value).addNewJobProject().setStringValue(projectName);
+ }
+
+ public static void addMultipleProjectNames(JobDefinitionType value, String[] projectNames) {
+ for (String name : projectNames) {
+ getOrCreateJobIdentification(value).addNewJobProject().setStringValue(name);
+ }
+ }
+
+ public static void addCandidateHost(JobDefinitionType value, String host) {
+ getOrCreateCandidateHosts(value).addHostName(host);
+
+ }
+ public static void addDataStagingTargetElement(JobDefinitionType value, String fileSystem, String file, String uri) {
+ addDataStagingTargetElement(value,fileSystem, file, uri, 0);
+ }
+
+ public static void addDataStagingTargetElement(JobDefinitionType value, String fileSystem, String file, String uri, int flags) {
+ JobDescriptionType jobDescr = getOrCreateJobDescription(value);
+ DataStagingType newDS = jobDescr.addNewDataStaging();
+ CreationFlagEnumeration.Enum creationFlag = CreationFlagEnumeration.DONT_OVERWRITE;
+ if((flags & FLAG_OVERWRITE) != 0) creationFlag = CreationFlagEnumeration.OVERWRITE;
+ if((flags & FLAG_APPEND) != 0) creationFlag = CreationFlagEnumeration.APPEND;
+ boolean deleteOnTerminate = (flags & FLAG_DELETE_ON_TERMINATE) != 0;
+ newDS.setCreationFlag(creationFlag);
+ newDS.setDeleteOnTermination(deleteOnTerminate);
+ SourceTargetType target = newDS.addNewTarget();
+
+ try {
+ uri = (uri == null) ? null : URIUtils.encodeAll(uri);
+ } catch (URIException e) {
+ }
+ target.setURI(uri);
+ newDS.setFileName(file);
+ if (fileSystem != null && !fileSystem.equals("Work")) { //$NON-NLS-1$
+ newDS.setFilesystemName(fileSystem);
+ }
+ }
+
+ public static void addDataStagingSourceElement(JobDefinitionType value, String uri, String fileSystem, String file) {
+ addDataStagingSourceElement(value, uri, fileSystem, file, 0);
+ }
+
+ public static void addDataStagingSourceElement(JobDefinitionType value, String uri, String fileSystem, String file, int flags) {
+ JobDescriptionType jobDescr = getOrCreateJobDescription(value);
+
+ try {
+ uri = (uri == null) ? null : URIUtils.encodeAll(uri);
+ } catch (URIException e) {
+ }
+ DataStagingType newDS = jobDescr.addNewDataStaging();
+ CreationFlagEnumeration.Enum creationFlag = CreationFlagEnumeration.DONT_OVERWRITE;
+ if((flags & FLAG_OVERWRITE) != 0) creationFlag = CreationFlagEnumeration.OVERWRITE;
+ if((flags & FLAG_APPEND) != 0) creationFlag = CreationFlagEnumeration.APPEND;
+ boolean deleteOnTerminate = (flags & FLAG_DELETE_ON_TERMINATE) != 0;
+ newDS.setCreationFlag(creationFlag);
+ newDS.setDeleteOnTermination(deleteOnTerminate);
+ SourceTargetType source = newDS.addNewSource();
+ source.setURI(uri);
+ newDS.setFileName(file);
+ if (fileSystem != null && !fileSystem.equals("Work")) { //$NON-NLS-1$
+ newDS.setFilesystemName(fileSystem);
+ }
+ }
+
+
+ public static ApplicationType getOrCreateApplication(JobDefinitionType value) {
+ JobDescriptionType jobDescr = getOrCreateJobDescription(value);
+ if (!jobDescr.isSetApplication()) {
+ jobDescr.addNewApplication();
+ }
+ return jobDescr.getApplication();
+ }
+
+ public static CandidateHostsType getOrCreateCandidateHosts(JobDefinitionType value) {
+ ResourcesType resources = getOrCreateResources(value);
+ if (!resources.isSetCandidateHosts()) {
+ resources.addNewCandidateHosts();
+ }
+ return resources.getCandidateHosts();
+ }
+
+ public static CPUArchitectureType getOrCreateCPUArchitecture(JobDefinitionType value) {
+
+ ResourcesType jobResources = getOrCreateResources(value);
+ if (!jobResources.isSetCPUArchitecture()) {
+ jobResources.addNewCPUArchitecture();
+ }
+ return jobResources.getCPUArchitecture();
+ }
+
+ public static org.ggf.schemas.jsdl.x2005.x11.jsdl.RangeValueType getOrCreateIndividualCPUCount(JobDefinitionType value) {
+ ResourcesType jobResources = getOrCreateResources(value);
+ if (!jobResources.isSetIndividualCPUCount()) {
+ jobResources.addNewIndividualCPUCount();
+ }
+ return jobResources.getIndividualCPUCount();
+ }
+
+
+ public static org.ggf.schemas.jsdl.x2005.x11.jsdl.RangeValueType getOrCreateIndividualCPUSpeed(JobDefinitionType value) {
+
+ ResourcesType jobResources = getOrCreateResources(value);
+ if (!jobResources.isSetIndividualCPUSpeed()) {
+ jobResources.addNewIndividualCPUSpeed();
+ }
+ return jobResources.getIndividualCPUSpeed();
+ }
+
+ public static org.ggf.schemas.jsdl.x2005.x11.jsdl.RangeValueType getOrCreateIndividualCPUTime(JobDefinitionType value) {
+
+ ResourcesType jobResources = getOrCreateResources(value);
+ if ( !jobResources.isSetIndividualCPUTime() ) {
+ jobResources.addNewIndividualCPUTime();
+ }
+ return jobResources.getIndividualCPUTime();
+ }
+
+ public static org.ggf.schemas.jsdl.x2005.x11.jsdl.RangeValueType getOrCreateIndividualDiskSpace(JobDefinitionType value) {
+
+ ResourcesType jobResources = getOrCreateResources(value);
+ if (!jobResources.isSetIndividualDiskSpace()) {
+ jobResources.addNewIndividualDiskSpace();
+ }
+ return jobResources.getIndividualDiskSpace();
+ }
+
+ public static org.ggf.schemas.jsdl.x2005.x11.jsdl.RangeValueType getOrCreateIndividualPhysicalMemory(JobDefinitionType value) {
+
+ ResourcesType jobResources = getOrCreateResources(value);
+ if (!jobResources.isSetIndividualPhysicalMemory()) {
+ jobResources.addNewIndividualPhysicalMemory();
+ }
+ return jobResources.getIndividualPhysicalMemory();
+ }
+
+ public static JobDescriptionType getOrCreateJobDescription(JobDefinitionType value) {
+ if (value.getJobDescription() == null) {
+ return value.addNewJobDescription();
+ }
+ return value.getJobDescription();
+ }
+
+ public static JobIdentificationType getOrCreateJobIdentification(JobDefinitionType value) {
+ JobDescriptionType descr = getOrCreateJobDescription(value);
+ if (descr.getJobIdentification() == null) {
+ return descr.addNewJobIdentification();
+ }
+ return descr.getJobIdentification();
+ }
+
+ public static OperatingSystemType getOrCreateOperatingSystem(JobDefinitionType value)
+ {
+ ResourcesType jobResources = getOrCreateResources(value);
+ if(!jobResources.isSetOperatingSystem()) {
+ jobResources.addNewOperatingSystem();
+ }
+ return jobResources.getOperatingSystem();
+ }
+
+ public static ResourcesType getOrCreateResources(JobDefinitionType value) {
+ JobDescriptionType jobDescr = getOrCreateJobDescription(value);
+ if (!jobDescr.isSetResources()) {
+ jobDescr.addNewResources();
+ }
+ return jobDescr.getResources();
+ }
+
+
+ public static org.ggf.schemas.jsdl.x2005.x11.jsdl.RangeValueType getOrCreateTotalCPUCount(JobDefinitionType value) {
+
+ ResourcesType jobResources = getOrCreateResources(value);
+ if ( !jobResources.isSetTotalCPUCount() ) {
+ jobResources.addNewTotalCPUCount();
+ }
+ return jobResources.getTotalCPUCount();
+ }
+
+
+ public static org.ggf.schemas.jsdl.x2005.x11.jsdl.RangeValueType getOrCreateTotalResourceCount(JobDefinitionType value) {
+
+ ResourcesType jobResources = getOrCreateResources(value);
+ if ( !jobResources.isSetTotalResourceCount())
+ {
+ jobResources.addNewTotalResourceCount();
+ }
+ return jobResources.getTotalResourceCount();
+ }
+
+ public static POSIXApplicationType getOrCreatePOSIXApplication(JobDefinitionType value) {
+
+ ApplicationType application = getOrCreateApplication(value);
+
+ if(getHPCProfileApplication(value) != null){
+ //TODO handle: not creating POSIX element if HPCProfile already exists
+ return getPOSIXApplication(value);
+ }
+
+ if (getPOSIXApplication(value) == null) {
+ XmlCursor acursor = application.newCursor();
+ acursor.toEndToken();
+ acursor.insertElement(POSIX_APPLICATION);
+ acursor.dispose();
+ }
+ return getPOSIXApplication(value);
+ }
+
+
+ public static SPMDApplicationType getOrCreateSPMDApplication(JobDefinitionType value) {
+
+ ApplicationType application = getOrCreateApplication(value);
+
+ if (getSPMDApplication(value) == null) {
+ XmlCursor acursor = application.newCursor();
+ acursor.toEndToken();
+ acursor.insertElement(SPMD_APPLICATION);
+ acursor.dispose();
+ }
+ return getSPMDApplication(value);
+ }
+
+ public static SPMDApplicationType getSPMDApplication(JobDefinitionType value) {
+ if (value != null &&
+ value.getJobDescription() != null &&
+ value.getJobDescription().isSetApplication() ) {
+ XmlCursor acursor = value.getJobDescription().getApplication().newCursor();
+ if (acursor.toFirstChild()) {
+ do {
+ if(acursor.getName().equals(SPMD_APPLICATION)) {
+ XmlObject result = acursor.getObject();
+ acursor.dispose();
+ return (SPMDApplicationType) result;
+ }
+ } while (acursor.toNextSibling());
+ acursor.dispose();
+ return null;
+ } else {
+ acursor.dispose();
+ return null;
+ }
+ } else {
+ return null;
+ }
+ }
+
+
+
+ public static POSIXApplicationType getPOSIXApplication(JobDefinitionType value) {
+ if (value != null &&
+ value.getJobDescription() != null &&
+ value.getJobDescription().isSetApplication() ) {
+ XmlCursor acursor = value.getJobDescription().getApplication().newCursor();
+ if (acursor.toFirstChild()) {
+ do {
+ if(acursor.getName().equals(POSIX_APPLICATION)) {
+ XmlObject result = acursor.getObject();
+ acursor.dispose();
+ return (POSIXApplicationType) result;
+ }
+ } while (acursor.toNextSibling());
+ acursor.dispose();
+ return null;
+ } else {
+ acursor.dispose();
+ return null;
+ }
+ } else {
+ return null;
+ }
+ }
+
+
+
+ public static HPCProfileApplicationType getOrCreateHPCProfileApplication(JobDefinitionType value) {
+
+ ApplicationType application = getOrCreateApplication(value);
+
+ if(getPOSIXApplication(value) != null){
+ //TODO handle: creating HPC element if POSIX already exists
+ return getHPCProfileApplication(value);
+ }
+
+ if (getHPCProfileApplication(value) == null) {
+ XmlCursor acursor = application.newCursor();
+ acursor.toEndToken();
+ acursor.insertElement(HPC_PROFILE_APPLICATION);
+ acursor.dispose();
+ }
+ return getHPCProfileApplication(value);
+ }
+
+
+ public static HPCProfileApplicationType getHPCProfileApplication(JobDefinitionType value) {
+ if (value != null &&
+ value.getJobDescription() != null &&
+ value.getJobDescription().isSetApplication() ) {
+ XmlCursor acursor = value.getJobDescription().getApplication().newCursor();
+ if (acursor.toFirstChild()) {
+ do {
+ if(acursor.getName().equals(HPC_PROFILE_APPLICATION)) {
+ XmlObject result = acursor.getObject();
+ acursor.dispose();
+ return (HPCProfileApplicationType) result;
+ }
+ } while (acursor.toNextSibling());
+ acursor.dispose();
+ return null;
+ } else {
+ acursor.dispose();
+ return null;
+ }
+ } else {
+ return null;
+ }
+ }
+
+
+
+
+ public static RangeValueType getTotalCPUCountRequirements(JobDefinitionType value) {
+ if(value != null && value.getJobDescription() != null && value.getJobDescription().isSetResources() &&
+ value.getJobDescription().getResources().isSetTotalCPUCount()){
+ return toU6RangeValue(value.getJobDescription().getResources().getTotalCPUCount());
+ }
+ else
+ return null;
+ }
+
+ public static RangeValueType getTotalResourceCountRequirements(JobDefinitionType value) {
+ if(value != null && value.getJobDescription() != null && value.getJobDescription().isSetResources() &&
+ value.getJobDescription().getResources().isSetTotalResourceCount()){
+ return toU6RangeValue(value.getJobDescription().getResources().getTotalResourceCount());
+ }
+ else
+ return null;
+ }
+
+
+ public static RangeValueType toU6RangeValue(org.ggf.schemas.jsdl.x2005.x11.jsdl.RangeValueType jsdlType) {
+ RangeValueType result = new RangeValueType();
+ if(jsdlType.getExactArray().length > 0){
+ result.setExact(jsdlType.getExactArray(0).getDoubleValue());
+ }
+ if(jsdlType.isSetLowerBoundedRange()){
+ result.setLowerBound(jsdlType.getLowerBoundedRange().getDoubleValue());
+ }
+ if(jsdlType.isSetUpperBoundedRange()){
+ result.setUpperBound(jsdlType.getUpperBoundedRange().getDoubleValue());
+ }
+ return result;
+ }
+
+
+
+ public static void setCPUArchitectureRequirements(JobDefinitionType value, ProcessorRequirement cpuArchitecture) {
+ if(cpuArchitecture == null || cpuArchitecture.getValue() == null) return;
+ CPUArchitectureType cpuArch = getOrCreateCPUArchitecture(value);
+ cpuArch.setCPUArchitectureName(ProcessorArchitectureEnumeration.Enum.forString(cpuArchitecture.getValue()));
+ }
+
+ public static void setIndividualCPUCountRequirements(JobDefinitionType value, RangeValueType cpuCount) {
+ org.ggf.schemas.jsdl.x2005.x11.jsdl.RangeValueType individualCPUCount = getOrCreateIndividualCPUCount(value);
+ setRangeValue(cpuCount, individualCPUCount);
+ }
+
+ public static void setIndividualCPUSpeedRequirements(JobDefinitionType value, RangeValueType cpuSpeed) {
+ org.ggf.schemas.jsdl.x2005.x11.jsdl.RangeValueType individualCPUSpeed = getOrCreateIndividualCPUSpeed(value);
+ setRangeValue(cpuSpeed, individualCPUSpeed);
+ }
+
+ public static void setIndividualCPUTimeRequirements(JobDefinitionType value, RangeValueType cpuTime) {
+ org.ggf.schemas.jsdl.x2005.x11.jsdl.RangeValueType cpuIndividualTime = getOrCreateIndividualCPUTime(value);
+ setRangeValue(cpuTime, cpuIndividualTime);
+ }
+
+ public static void setIndividualDiskSpaceRequirements(JobDefinitionType value, RangeValueType diskSpace) {
+ org.ggf.schemas.jsdl.x2005.x11.jsdl.RangeValueType individualDiskSpace = getOrCreateIndividualDiskSpace(value);
+ setRangeValue(diskSpace, individualDiskSpace);
+ }
+
+ public static void setIndividualPhysicalMemoryRequirements(JobDefinitionType value, RangeValueType physicalMemory) {
+ org.ggf.schemas.jsdl.x2005.x11.jsdl.RangeValueType individualPhysicalMemory = getOrCreateIndividualPhysicalMemory(value);
+ setRangeValue(physicalMemory, individualPhysicalMemory);
+ }
+
+
+ public static void setName(JobDefinitionType value, String name) {
+ getOrCreateJobIdentification(value).setJobName(name);
+ }
+
+ public static void setOperatingSystemRequirements(JobDefinitionType value, OSRequirement osType) {
+ if(osType == null || osType.getOSType() == null) return;
+ OperatingSystemType os_Type = getOrCreateOperatingSystem(value);
+ OperatingSystemTypeType ostt = os_Type.addNewOperatingSystemType();
+ ostt.setOperatingSystemName(OperatingSystemTypeEnumeration.Enum.forString(osType.getOSType().getValue()));
+ if(osType.getOSVersion() != null)
+ {
+ os_Type.setOperatingSystemVersion(osType.getOSVersion());
+ }
+ }
+
+ public static void setRangeValue(RangeValueType u6Type, org.ggf.schemas.jsdl.x2005.x11.jsdl.RangeValueType jsdlType) {
+ Double exact = u6Type.getExact();
+ Double epsilon = u6Type.getEpsilon();
+ Double lower = u6Type.getLowerBound();
+ Double upper = u6Type.getUpperBound();
+
+
+ if(lower.isNaN() && upper.isNaN())
+ {
+ ExactType exactType = jsdlType.getExactArray().length > 0 ? jsdlType.getExactArray(0) : jsdlType.addNewExact();
+ exactType.setDoubleValue(exact);
+ if(!epsilon.isNaN() && epsilon != 0)
+ {
+ exactType.setEpsilon(epsilon);
+ }
+ }
+ else
+ {
+ if(!lower.isNaN())
+ {
+ BoundaryType lowerBound = jsdlType.isSetLowerBoundedRange() ? jsdlType.getLowerBoundedRange() : jsdlType.addNewLowerBoundedRange();
+ lowerBound.setDoubleValue(lower);
+ lowerBound.setExclusiveBound(!u6Type.isIncludeLowerBound());
+ }
+
+ if(!upper.isNaN())
+ {
+ BoundaryType upperBound = jsdlType.isSetUpperBoundedRange() ? jsdlType.getUpperBoundedRange() : jsdlType.addNewUpperBoundedRange();
+ upperBound.setDoubleValue(upper);
+ upperBound.setExclusiveBound(!u6Type.isIncludeUpperBound());
+ }
+ }
+ }
+
+ public static void setTotalCPUCountRequirements(JobDefinitionType value, RangeValueType cpuCount) {
+ org.ggf.schemas.jsdl.x2005.x11.jsdl.RangeValueType cpuTotalCount = getOrCreateTotalCPUCount(value);
+ setRangeValue(cpuCount, cpuTotalCount);
+ }
+
+ public static void setTotalResourceCountRequirements(JobDefinitionType value, RangeValueType resourceCount) {
+ org.ggf.schemas.jsdl.x2005.x11.jsdl.RangeValueType totalCount = getOrCreateTotalResourceCount(value);
+ setRangeValue(resourceCount, totalCount);
+ }
+}
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/Mode.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/Mode.java b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/Mode.java
new file mode 100644
index 0000000..f47b75d
--- /dev/null
+++ b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/Mode.java
@@ -0,0 +1,45 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+
+package org.apache.airavata.gfac.utils;
+
+/**
+ * file creation modes
+ */
+public enum Mode {
+
+ /**
+ * overwrite any existing file
+ */
+ overwrite,
+
+ /**
+ * append to an existing file
+ */
+ append,
+
+ /**
+ * do NOT overwrite and fail if the file exists
+ */
+ nooverwrite
+
+
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/OSRequirement.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/OSRequirement.java b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/OSRequirement.java
new file mode 100644
index 0000000..aa59de9
--- /dev/null
+++ b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/OSRequirement.java
@@ -0,0 +1,108 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+
+package org.apache.airavata.gfac.utils;
+
+import org.apache.airavata.gfac.provider.utils.ResourceRequirement;
+
+public class OSRequirement implements ResourceRequirement {
+ private OSType osType;
+ private String version;
+ protected boolean enabled;
+
+
+ public OSRequirement() {
+ }
+
+ /**
+ *
+ * @param type -
+ * the type of the O/S
+ * @param version -
+ * the version of the O/S
+ */
+ public OSRequirement(OSType osType, String osVersion) {
+ setOSType(osType);
+ setOSVersion(osVersion);
+ }
+
+ /**
+ * Set the type of the O/S
+ *
+ * @param type -
+ * the type of the O/S
+ */
+ public void setOSType(OSType osType) {
+ this.osType = osType;
+ }
+
+ /**
+ * Get the type of the O/S
+ *
+ * @return the type of the O/S
+ */
+ public OSType getOSType() {
+ return osType;
+ }
+
+ /**
+ * Set the version of the O/S
+ *
+ * @param version -
+ * the version of the O/S
+ */
+ public void setOSVersion(String version) {
+ this.version = version;
+ }
+
+ /**
+ * Get the version of the O/S
+ *
+ * @return the version of the O/S
+ */
+ public String getOSVersion() {
+ return version;
+ }
+
+ /**
+ *
+ * equals this instance of class with another instance
+ */
+ public boolean equals(Object obj) {
+ if (this == obj)
+ return true;
+ if (obj==null || getClass() != obj.getClass()) return false;
+ final OSRequirement other = (OSRequirement) obj;
+ boolean typeEqual = osType == null ? other.osType == null : osType.equals(other.osType);
+ boolean versionEqual = version == null ? other.version == null : version.equals(other.version);
+ return typeEqual && versionEqual && isEnabled() == other.isEnabled();
+ }
+
+
+
+ public boolean isEnabled() {
+ return enabled;
+ }
+
+ public void setEnabled(boolean enabled) {
+ this.enabled = enabled;
+ }
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/OSType.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/OSType.java b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/OSType.java
new file mode 100644
index 0000000..b720f95
--- /dev/null
+++ b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/OSType.java
@@ -0,0 +1,124 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+
+package org.apache.airavata.gfac.utils;
+
+public enum OSType {
+
+ unknown("Unknown"), //$NON-NLS-1$
+ linux("LINUX"), //$NON-NLS-1$
+ mac_os("MACOS"), //$NON-NLS-1$
+ win95("WIN95"), //$NON-NLS-1$
+ win98("WIN98"), //$NON-NLS-1$
+ windows_R_Me("Windows_R_Me"), //$NON-NLS-1$
+ winNT("WINNT"), //$NON-NLS-1$
+ windows_2000("Windows_2000"), //$NON-NLS-1$
+ windows_XP("Windows_XP"), //$NON-NLS-1$
+ msdos("MSDOS"), //$NON-NLS-1$
+ solaris("Solaris"), //$NON-NLS-1$
+ sunOS("SunOS"), //$NON-NLS-1$
+ freeBSD("FreeBSD"), //$NON-NLS-1$
+ netBSD("NetBSD"), //$NON-NLS-1$
+ openBSD("OpenBSD"), //$NON-NLS-1$
+ bsdunix("BSDUNIX"), //$NON-NLS-1$
+ aix("AIX"), //$NON-NLS-1$
+ z_OS("z_OS"), //$NON-NLS-1$
+ os_2("OS_2"), //$NON-NLS-1$
+ os9("OS9"), //$NON-NLS-1$
+ netWare("NetWare"), //$NON-NLS-1$
+ tru64_unix("Tru64_UNIX"), //$NON-NLS-1$
+ irix("IRIX"), //$NON-NLS-1$
+ osf("OSF"), //$NON-NLS-1$
+
+ mvs("MVS"), //$NON-NLS-1$
+ os400("OS400"), //$NON-NLS-1$
+ javaVM("JavaVM"), //$NON-NLS-1$
+ win3x("WIN3x"), //$NON-NLS-1$
+ winCE("WINCE"), //$NON-NLS-1$
+ NCR3000("NCR3000"), //$NON-NLS-1$
+ dc_os("DC_OS"), //$NON-NLS-1$
+ reliant_unix("Reliant_UNIX"), //$NON-NLS-1$
+ sco_unixWare("SCO_UnixWare"), //$NON-NLS-1$
+ sco_openServer("SCO_OpenServer"), //$NON-NLS-1$
+ sequent("Sequent"), //$NON-NLS-1$
+ u6000("U6000"), //$NON-NLS-1$
+ aseries("ASERIES"), //$NON-NLS-1$
+ tandemNSK("TandemNSK"), //$NON-NLS-1$
+ tandemNT("TandemNT"), //$NON-NLS-1$
+ bs2000("BS2000"), //$NON-NLS-1$
+ lynx("Lynx"), //$NON-NLS-1$
+ xenix("XENIX"), //$NON-NLS-1$
+ vm("VM"), //$NON-NLS-1$
+ interactive_unix("Interactive_UNIX"), //$NON-NLS-1$
+ gnu_hurd("GNU_Hurd"), //$NON-NLS-1$
+ mach_kernel("MACH_Kernel"), //$NON-NLS-1$
+ inferno("Inferno"), //$NON-NLS-1$
+ qnx("QNX"), //$NON-NLS-1$
+ epoc("EPOC"), //$NON-NLS-1$
+ ixWorks("IxWorks"), //$NON-NLS-1$
+ vxWorks("VxWorks"), //$NON-NLS-1$
+ mint("MiNT"), //$NON-NLS-1$
+ beOS("BeOS"), //$NON-NLS-1$
+ hp_mpe("HP_MPE"), //$NON-NLS-1$
+ nextStep("NextStep"), //$NON-NLS-1$
+ palmPilot("PalmPilot"), //$NON-NLS-1$
+ rhapsody("Rhapsody"), //$NON-NLS-1$
+ dedicated("Dedicated"), //$NON-NLS-1$
+ os_390("OS_390"), //$NON-NLS-1$
+ vse("VSE"), //$NON-NLS-1$
+ tpf("TPF"), //$NON-NLS-1$
+ caldera_open_unix("Caldera_Open_UNIX"), //$NON-NLS-1$
+ attunix("ATTUNIX"), //$NON-NLS-1$
+ dgux("DGUX"), //$NON-NLS-1$
+ decnt("DECNT"), //$NON-NLS-1$
+ openVMS("OpenVMS"), //$NON-NLS-1$
+ hpux("HPUX"), //$NON-NLS-1$
+ other("other"); //$NON-NLS-1$
+
+
+ private OSType(String value) {
+ this.value = value;
+ }
+
+ private final String value;
+
+ public String getValue() {
+ return value;
+ }
+
+ public static OSType fromString(String value)
+ {
+ for(OSType type : values())
+ {
+ if(type.value.equals(value))
+ {
+ return type;
+ }
+ }
+ return null;
+ }
+
+ public String toString()
+ {
+ return value;
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/ProcessorRequirement.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/ProcessorRequirement.java b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/ProcessorRequirement.java
new file mode 100644
index 0000000..82bf043
--- /dev/null
+++ b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/ProcessorRequirement.java
@@ -0,0 +1,61 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+*/
+
+package org.apache.airavata.gfac.utils;
+
+public enum ProcessorRequirement{
+ sparc("sparc"), //$NON-NLS-1$
+ powerpc("powerpc"), //$NON-NLS-1$
+ x86("x86"), //$NON-NLS-1$
+ x86_32("x86_32"), //$NON-NLS-1$
+ x86_64("x86_64"), //$NON-NLS-1$
+ parisc("parisc"), //$NON-NLS-1$
+ mips("mips"), //$NON-NLS-1$
+ ia64("ia64"), //$NON-NLS-1$
+ arm("arm"), //$NON-NLS-1$
+ other("other"); //$NON-NLS-1$
+
+ ProcessorRequirement(String value) {
+ this.value = value;
+ }
+
+ private final String value;
+
+ public String getValue() {
+ return value;
+ }
+
+ public static ProcessorRequirement fromString(String value)
+ {
+ for (ProcessorRequirement type : values()) {
+ if (type.value.equals(value)) {
+ return type;
+ }
+ }
+ return other;
+ }
+
+ public String toString()
+ {
+ return value;
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/RangeValueType.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/RangeValueType.java b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/RangeValueType.java
new file mode 100644
index 0000000..114ea51
--- /dev/null
+++ b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/RangeValueType.java
@@ -0,0 +1,274 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+
+package org.apache.airavata.gfac.utils;
+
+import org.apache.airavata.gfac.provider.utils.ResourceRequirement;
+
+public class RangeValueType implements ResourceRequirement {
+
+
+ private double exact = Double.NaN;
+ private double lowerBound = Double.NEGATIVE_INFINITY;
+ private double upperBound = Double.POSITIVE_INFINITY;
+
+ private double epsilon = Double.NaN;
+ private boolean includeLowerBound = true;
+ private boolean includeUpperBound = true;
+
+ private boolean enabled = false;
+
+
+ public RangeValueType(double exact, double epsilon, double lowerBound, boolean includeLowerBound, double upperBound, boolean includeUpperBound, boolean enabled) {
+ this.exact = exact;
+ this.epsilon = epsilon;
+ this.lowerBound = lowerBound;
+ this.includeLowerBound = includeLowerBound;
+ this.upperBound = upperBound;
+ this.includeUpperBound = includeUpperBound;
+ this.enabled = enabled;
+ }
+
+
+
+ /**
+ * Create the range requirements
+ *
+ * @param exact -
+ * the exact value
+ * @param lowerBound -
+ * the lower bound
+ * @param upperBound -
+ * the upper bound
+ * @param includelowerBound -
+ * true, if lowerBound should be included in range
+ * @param includeUpperBound -
+ * true, if upperBound should be included in range
+ *
+ */
+ public RangeValueType(double exact, double epsilon, double lowerBound, boolean includeLowerBound, double upperBound, boolean includeUpperBound) {
+ this(exact,epsilon,lowerBound,includeLowerBound,upperBound,includeUpperBound,false);
+
+ }
+
+
+ /**
+ * Create the range requirements
+ *
+ * @param exact -
+ * the exact value
+ * @param lowerBound -
+ * the lower bound
+ * @param upperBound -
+ * the upper bound
+ */
+ public RangeValueType(double exact, double epsilon, double lowerBound, double upperBound) {
+ this(exact,epsilon,lowerBound,true,upperBound,true);
+ }
+
+
+ public RangeValueType(double exact, double lowerBound, double upperBound) {
+ this(exact,Double.NaN,lowerBound,true,upperBound,true);
+ }
+
+ /**
+ * Create the exact requirements
+ *
+ * @param exact -
+ * the exact value
+ * @param epsilon -
+ * the epsilon arround exact
+ *
+ */
+ public RangeValueType(double exact, double epsilon) {
+ this(exact,epsilon,Double.NaN,Double.NaN);
+ }
+
+
+ /**
+ * Create the exact requirements
+ *
+ * @param exact -
+ * the exact value
+ */
+ public RangeValueType(double exact) {
+ this(exact,Double.NaN);
+ }
+
+ public RangeValueType() {
+ }
+
+ /**
+ * Get exact requirements
+ *
+ * @return the exact requirements
+ */
+ public double getExact() {
+ return exact;
+ }
+
+ /**
+ * Set exact requirements
+ *
+ * @param exact -
+ * the exact requirements
+ */
+ public void setExact(double exact) {
+ this.exact = exact;
+ }
+
+ /**
+ * Get epsilon
+ *
+ * @return the epsilon
+ */
+ public double getEpsilon() {
+ return epsilon;
+ }
+
+ /**
+ * Set epsilon
+ *
+ * @param epsilon -
+ * epsilon belonging to to exact requirements
+ */
+ public void setEpsilon(double epsilon) {
+ this.epsilon = epsilon;
+ }
+
+ /**
+ * Get lower bound
+ *
+ * @return the lower bound
+ */
+ public double getLowerBound() {
+ return lowerBound;
+ }
+
+ /**
+ * Set lower bound
+ *
+ * @param lowerBound -
+ * the lower bound
+ */
+ public void setLowerBound(double lowerBound) {
+ this.lowerBound = lowerBound;
+ }
+
+ /**
+ * Get upper bound
+ *
+ * @return the upper bound
+ */
+ public double getUpperBound() {
+ return upperBound;
+ }
+
+ /**
+ * Set upper bound
+ *
+ * @param upperBound -
+ * the upper bound
+ */
+ public void setUpperBound(double upperBound) {
+ this.upperBound = upperBound;
+ }
+
+ /**
+ * Test if requirements are met
+ *
+ * @param value -
+ * the tested value
+ * @return <code>true</code> if value is in the range and not less than
+ * the exact value
+ */
+ public boolean lowerThanDouble(double value) {
+ return (value >= exact && value >= lowerBound && value <= upperBound) ? true : false;
+ }
+
+ public String toString() {
+ if (lowerBound == Double.NEGATIVE_INFINITY && upperBound == Double.POSITIVE_INFINITY) {
+ return Double.toString(exact);
+ }
+ else {
+ return "(e=" + Double.toString(exact) + ",l=" + Double.toString(lowerBound) + ",u=" //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
+ + Double.toString(upperBound) + ")"; //$NON-NLS-1$
+ }
+ }
+
+
+ public boolean isIncludeLowerBound() {
+ return includeLowerBound;
+ }
+
+
+ public void setIncludeLowerBound(boolean includeLowerBound) {
+ this.includeLowerBound = includeLowerBound;
+ }
+
+
+ public boolean isIncludeUpperBound() {
+ return includeUpperBound;
+ }
+
+
+ public void setIncludeUpperBound(boolean includeUpperBound) {
+ this.includeUpperBound = includeUpperBound;
+ }
+
+ public RangeValueType clone(){
+ return new RangeValueType(this.exact, this.epsilon, this.lowerBound, this.includeLowerBound, this.upperBound, this.includeUpperBound,this.enabled);
+ }
+
+
+
+ public boolean isEnabled() {
+ return enabled;
+ }
+
+
+
+ public void setEnabled(boolean enabled) {
+ this.enabled = enabled;
+ }
+
+
+ public boolean equals(Object o)
+ {
+ if(! (o instanceof RangeValueType)) return false;
+ RangeValueType other = (RangeValueType) o;
+ return doublesEqual(getExact(),other.getExact())
+ && doublesEqual(getEpsilon(), other.getEpsilon())
+ && doublesEqual(getLowerBound(), other.getLowerBound())
+ && doublesEqual(getUpperBound(), other.getUpperBound())
+ && isIncludeLowerBound() == other.isIncludeLowerBound()
+ && isIncludeUpperBound() == other.isIncludeUpperBound()
+ && isEnabled() == other.isEnabled();
+ }
+
+
+ private boolean doublesEqual(double a, double b)
+ {
+ Double A = new Double(a);
+ Double B = new Double(b);
+ return A.equals(B);
+ }
+}
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/ResourceProcessor.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/ResourceProcessor.java b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/ResourceProcessor.java
new file mode 100644
index 0000000..cc98f96
--- /dev/null
+++ b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/ResourceProcessor.java
@@ -0,0 +1,152 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+
+package org.apache.airavata.gfac.utils;
+
+import org.apache.airavata.gfac.context.JobExecutionContext;
+import org.apache.airavata.gfac.provider.GFacProviderException;
+import org.apache.airavata.model.workspace.experiment.ComputationalResourceScheduling;
+import org.apache.airavata.model.workspace.experiment.TaskDetails;
+import org.apache.airavata.schemas.gfac.HpcApplicationDeploymentType;
+import org.apache.airavata.schemas.gfac.QueueType;
+import org.ggf.schemas.jsdl.x2005.x11.jsdl.JobDefinitionType;
+import org.ogf.schemas.jsdl.x2007.x02.jsdlSpmd.NumberOfProcessesType;
+
+public class ResourceProcessor {
+
+
+ public static void generateResourceElements(JobDefinitionType value, JobExecutionContext context) throws Exception{
+
+ HpcApplicationDeploymentType appDepType = (HpcApplicationDeploymentType) context
+ .getApplicationContext().getApplicationDeploymentDescription()
+ .getType();
+
+ createMemory(value, appDepType);
+ TaskDetails taskData = context.getTaskData();
+ if(taskData != null && taskData.isSetTaskScheduling()){
+ ComputationalResourceScheduling computionResource= taskData.getTaskScheduling();
+ try {
+ int cpuCount = computionResource.getTotalCPUCount();
+ if(cpuCount>0){
+// appDepType.setCpuCount(cpuCount);
+ NumberOfProcessesType num = NumberOfProcessesType.Factory.newInstance();
+ String processers = Integer.toString(cpuCount);
+ num.setStringValue(processers);
+ JSDLUtils.getOrCreateSPMDApplication(value).setNumberOfProcesses(num);
+ }
+ } catch (NullPointerException e) {
+ new GFacProviderException("No Value sent in WorkflowContextHeader for Node Count, value in the Deployment Descriptor will be used",e);
+ }
+ try {
+ int nodeCount = computionResource.getNodeCount();
+ if(nodeCount>0){
+ appDepType.setNodeCount(nodeCount);
+ }
+ } catch (NullPointerException e) {
+ new GFacProviderException("No Value sent in WorkflowContextHeader for Node Count, value in the Deployment Descriptor will be used",e);
+ }
+ try {
+ String queueName = computionResource.getQueueName();
+ if (queueName != null) {
+ if(appDepType.getQueue() == null){
+ QueueType queueType = appDepType.addNewQueue();
+ queueType.setQueueName(queueName);
+ }else{
+ appDepType.getQueue().setQueueName(queueName);
+ }
+ }
+ } catch (NullPointerException e) {
+ new GFacProviderException("No Value sent in WorkflowContextHeader for Node Count, value in the Deployment Descriptor will be used",e);
+ }
+ try {
+ int maxwallTime = computionResource.getWallTimeLimit();
+ if(maxwallTime>0){
+ appDepType.setMaxWallTime(maxwallTime);
+ }
+ } catch (NullPointerException e) {
+ new GFacProviderException("No Value sent in WorkflowContextHeader for Node Count, value in the Deployment Descriptor will be used",e);
+ }
+ }
+
+ if (appDepType.getCpuCount() > 0) {
+ RangeValueType rangeType = new RangeValueType();
+ rangeType.setLowerBound(Double.NaN);
+ rangeType.setUpperBound(Double.NaN);
+ rangeType.setExact(appDepType.getCpuCount());
+ JSDLUtils.setTotalCPUCountRequirements(value, rangeType);
+ }
+
+ if (appDepType.getProcessorsPerNode() > 0) {
+ RangeValueType rangeType = new RangeValueType();
+ rangeType.setLowerBound(Double.NaN);
+ rangeType.setUpperBound(Double.NaN);
+ rangeType.setExact(appDepType.getProcessorsPerNode());
+ JSDLUtils.setIndividualCPUCountRequirements(value, rangeType);
+ }
+
+ if (appDepType.getNodeCount() > 0) {
+ RangeValueType rangeType = new RangeValueType();
+ rangeType.setLowerBound(Double.NaN);
+ rangeType.setUpperBound(Double.NaN);
+ rangeType.setExact(appDepType.getNodeCount());
+ JSDLUtils.setTotalResourceCountRequirements(value, rangeType);
+ }
+
+ if(appDepType.getMaxWallTime() > 0) {
+ RangeValueType cpuTime = new RangeValueType();
+ cpuTime.setLowerBound(Double.NaN);
+ cpuTime.setUpperBound(Double.NaN);
+ long wallTime = appDepType.getMaxWallTime() * 60;
+ cpuTime.setExact(wallTime);
+ JSDLUtils.setIndividualCPUTimeRequirements(value, cpuTime);
+ }
+ }
+
+
+ private static void createMemory(JobDefinitionType value, HpcApplicationDeploymentType appDepType){
+ if (appDepType.getMinMemory() > 0 && appDepType.getMaxMemory() > 0) {
+ RangeValueType rangeType = new RangeValueType();
+ rangeType.setLowerBound(appDepType.getMinMemory());
+ rangeType.setUpperBound(appDepType.getMaxMemory());
+ JSDLUtils.setIndividualPhysicalMemoryRequirements(value, rangeType);
+ }
+
+ else if (appDepType.getMinMemory() > 0 && appDepType.getMaxMemory() <= 0) {
+ // TODO set Wall time
+ RangeValueType rangeType = new RangeValueType();
+ rangeType.setLowerBound(appDepType.getMinMemory());
+ JSDLUtils.setIndividualPhysicalMemoryRequirements(value, rangeType);
+ }
+
+ else if (appDepType.getMinMemory() <= 0 && appDepType.getMaxMemory() > 0) {
+ // TODO set Wall time
+ RangeValueType rangeType = new RangeValueType();
+ rangeType.setUpperBound(appDepType.getMinMemory());
+ JSDLUtils.setIndividualPhysicalMemoryRequirements(value, rangeType);
+ }
+
+ }
+
+
+
+
+
+}
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/SPMDProcessor.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/SPMDProcessor.java b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/SPMDProcessor.java
new file mode 100644
index 0000000..170d60d
--- /dev/null
+++ b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/SPMDProcessor.java
@@ -0,0 +1,33 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+
+package org.apache.airavata.gfac.utils;
+
+import org.apache.airavata.gfac.context.JobExecutionContext;
+import org.ggf.schemas.jsdl.x2005.x11.jsdl.JobDefinitionType;
+
+public class SPMDProcessor {
+
+ public static void generateSPMDElements(JobDefinitionType value, JobExecutionContext context) {
+
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/SPMDVariations.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/SPMDVariations.java b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/SPMDVariations.java
new file mode 100644
index 0000000..4b73fba
--- /dev/null
+++ b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/SPMDVariations.java
@@ -0,0 +1,52 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+
+package org.apache.airavata.gfac.utils;
+
+public enum SPMDVariations {
+
+ MPI ("http://www.ogf.org/jsdl/2007/02/jsdl-spmd/MPI"),
+ GridMPI ("http://www.ogf.org/jsdl/2007/02/jsdl-spmd/GridMPI"),
+ IntelMPI ("http://www.ogf.org/jsdl/2007/02/jsdl-spmd/IntelMPI"),
+ LAMMPI ("http://www.ogf.org/jsdl/2007/02/jsdl-spmd/LAM-MPI"),
+ MPICH1 ("http://www.ogf.org/jsdl/2007/02/jsdl-spmd/MPICH1"),
+ MPICH2 ("http://www.ogf.org/jsdl/2007/02/jsdl-spmd/MPICH2"),
+ MPICHGM ("http://www.ogf.org/jsdl/2007/02/jsdl-spmd/MPICH-GM"),
+ MPICHMX ("http://www.ogf.org/jsdl/2007/02/jsdl-spmd/MPICH-MX"),
+ MVAPICH ("http://www.ogf.org/jsdl/2007/02/jsdl-spmd/MVAPICH"),
+ MVAPICH2 ("http://www.ogf.org/jsdl/2007/02/jsdl-spmd/MVAPICH2"),
+ OpenMPI ("http://www.ogf.org/jsdl/2007/02/jsdl-spmd/OpenMPI"),
+ POE ("http://www.ogf.org/jsdl/2007/02/jsdl-spmd/POE"),
+ PVM ("http://www.ogf.org/jsdl/2007/02/jsdl-spmd/PVM");
+
+ private final String variation;
+
+ private SPMDVariations(String variation) {
+ this.variation = variation;
+ }
+
+ public String value(){
+ return variation;
+ }
+
+}
+
+
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/StorageCreator.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/StorageCreator.java b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/StorageCreator.java
new file mode 100644
index 0000000..40729c6
--- /dev/null
+++ b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/StorageCreator.java
@@ -0,0 +1,211 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+
+package org.apache.airavata.gfac.utils;
+
+import java.util.Calendar;
+
+import javax.security.auth.x500.X500Principal;
+
+import org.oasisOpen.docs.wsrf.sg2.EntryType;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.unigrids.services.atomic.types.PropertyType;
+import org.unigrids.x2006.x04.services.smf.CreateSMSDocument;
+import org.unigrids.x2006.x04.services.smf.StorageBackendParametersDocument.StorageBackendParameters;
+import org.unigrids.x2006.x04.services.smf.StorageDescriptionType;
+import org.w3.x2005.x08.addressing.EndpointReferenceType;
+
+import de.fzj.unicore.uas.StorageFactory;
+import de.fzj.unicore.uas.client.StorageClient;
+import de.fzj.unicore.uas.client.StorageFactoryClient;
+import de.fzj.unicore.wsrflite.xmlbeans.WSUtilities;
+import de.fzj.unicore.wsrflite.xmlbeans.client.RegistryClient;
+import de.fzj.unicore.wsrflite.xmlbeans.sg.Registry;
+
+
+import eu.unicore.util.httpclient.DefaultClientConfiguration;
+
+public class StorageCreator {
+ protected final Logger log = LoggerFactory.getLogger(this.getClass());
+
+ /**
+ * the initial lifetime (in days) for newly created SMSs
+ */
+ private int initialLifeTime;
+
+ /**
+ * factory URL to use
+ */
+ private String factoryUrl;
+
+ /**
+ * site where to create the storage
+ */
+ private String siteName;
+
+ /**
+ * storage type to create
+ */
+ private String storageType;
+
+ private DefaultClientConfiguration secProps;
+
+ private String userName;
+
+ public StorageCreator(DefaultClientConfiguration secProps, String besUrl, int initialLifetime, String storageType, String userName) {
+ this.secProps = secProps;
+ this.factoryUrl = getStorageFactoryUrl(besUrl);
+ this.storageType = storageType;
+ this.initialLifeTime = initialLifetime;
+ this.userName = userName;
+ }
+
+
+ public StorageCreator(DefaultClientConfiguration secProps, String besUrl, int initialLifetime, String userName) {
+ this.secProps = secProps;
+ this.factoryUrl = getStorageFactoryUrl(besUrl);
+ this.initialLifeTime = initialLifetime;
+ this.userName = userName;
+ }
+
+
+ // The target site must have storage factory deployed with bes factory
+ public StorageClient createStorage() throws Exception{
+
+ if(factoryUrl == null) {
+ throw new Exception("Cannot create Storage Factory Url");
+ }
+
+ EndpointReferenceType sfEpr= WSUtilities.makeServiceEPR(factoryUrl, StorageFactory.SMF_PORT);
+
+ String dn = findServerName(factoryUrl, sfEpr);
+
+ WSUtilities.addServerIdentity(sfEpr, dn);
+
+ secProps.getETDSettings().setReceiver(new X500Principal(dn));
+ secProps.getETDSettings().setIssuerCertificateChain(secProps.getCredential().getCertificateChain());
+
+ // TODO: remove it afterwards
+ if(userName != null) {
+ secProps.getETDSettings().getRequestedUserAttributes2().put("xlogin", new String[]{userName});
+ }
+
+ StorageFactoryClient sfc = new StorageFactoryClient(sfEpr, secProps);
+
+ if (log.isDebugEnabled()){
+ log.debug("Using storage factory at <"+sfc.getUrl()+">");
+ }
+
+ StorageClient sc = null;
+ try{
+ sc=sfc.createSMS(getCreateSMSDocument());
+
+ String addr=sc.getEPR().getAddress().getStringValue();
+ log.info(addr);
+
+ }catch(Exception ex){
+ log.error("Could not create storage",ex);
+ throw new Exception(ex);
+ }
+
+ return sc;
+ }
+
+ protected String findServerName(String besUrl, EndpointReferenceType smsEpr)throws Exception{
+
+ int besIndex = besUrl.indexOf("StorageFactory?res");
+ String ss = besUrl.substring(0, besIndex);
+ ss = ss + "Registry";
+
+ EndpointReferenceType eprt = WSUtilities.makeServiceEPR(ss, "default_registry", Registry.REGISTRY_PORT);
+
+ RegistryClient registry = new RegistryClient(eprt, secProps);
+
+ //first, check if server name is already in the EPR...
+ String dn=WSUtilities.extractServerIDFromEPR(smsEpr);
+ if(dn!=null){
+ return dn;
+ }
+ //otherwise find a matching service in the registry
+ String url=smsEpr.getAddress().getStringValue();
+ if(url.contains("/services/"))url=url.substring(0,url.indexOf("/services"));
+ if(log.isDebugEnabled()) log.debug("Checking for services at "+url);
+ for(EntryType entry:registry.listEntries()){
+ if(entry.getMemberServiceEPR().getAddress().getStringValue().startsWith(url)){
+ dn=WSUtilities.extractServerIDFromEPR(entry.getMemberServiceEPR());
+ if(dn!=null){
+ return dn;
+ }
+ }
+ }
+ return null;
+ }
+
+
+ public static String getStorageFactoryUrl(String besUrl){
+ int besIndex = besUrl.indexOf("BESFactory?res");
+ String ss = besUrl.substring(0, besIndex);
+ ss = ss + "StorageFactory?res=default_storage_factory";
+ return ss;
+ }
+
+ /**
+ * prepare request
+ * */
+ protected CreateSMSDocument getCreateSMSDocument(String ...keyValueParams){
+ CreateSMSDocument in=CreateSMSDocument.Factory.newInstance();
+ in.addNewCreateSMS();
+ if(initialLifeTime>0){
+ in.getCreateSMS().addNewTerminationTime().setCalendarValue(getTermTime());
+ }
+ if(storageType!=null){
+ if(log.isDebugEnabled()) {
+ log.debug("Will create storage of type : "+storageType);
+ }
+ StorageDescriptionType desc=in.getCreateSMS().addNewStorageDescription();
+ desc.setStorageBackendType(storageType);
+ if(keyValueParams.length>1){
+ //other parameters from the cmdline as key=value
+ StorageBackendParameters params=desc.addNewStorageBackendParameters();
+ for(int i=1;i<keyValueParams.length;i++){
+ String arg=keyValueParams[i];
+ String[]sp=arg.split("=",2);
+ PropertyType prop=params.addNewProperty();
+ prop.setName(sp[0]);
+ prop.setValue(sp[1]);
+ if(log.isDebugEnabled()) {
+ log.debug("Have parameter : "+arg);
+ }
+ }
+ }
+ }
+ return in;
+ }
+
+ protected Calendar getTermTime(){
+ Calendar c = Calendar.getInstance();
+ c.add(Calendar.DATE, initialLifeTime);
+ return c;
+ }
+
+
+}
http://git-wip-us.apache.org/repos/asf/airavata/blob/13b505ae/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/UASDataStagingProcessor.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/UASDataStagingProcessor.java b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/UASDataStagingProcessor.java
new file mode 100644
index 0000000..461ee0b
--- /dev/null
+++ b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/utils/UASDataStagingProcessor.java
@@ -0,0 +1,225 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+
+package org.apache.airavata.gfac.utils;
+
+import java.io.File;
+import java.util.Map;
+
+import org.apache.airavata.commons.gfac.type.ActualParameter;
+import org.apache.airavata.gfac.context.JobExecutionContext;
+import org.apache.airavata.gfac.context.MessageContext;
+import org.apache.airavata.schemas.gfac.HpcApplicationDeploymentType;
+import org.apache.airavata.schemas.gfac.StringArrayType;
+import org.apache.airavata.schemas.gfac.StringParameterType;
+import org.apache.airavata.schemas.gfac.URIArrayType;
+import org.apache.airavata.schemas.gfac.URIParameterType;
+import org.apache.airavata.schemas.gfac.UnicoreHostType;
+import org.ggf.schemas.jsdl.x2005.x11.jsdl.JobDefinitionType;
+
+public class UASDataStagingProcessor {
+
+ public static void generateDataStagingElements(JobDefinitionType value, JobExecutionContext context, String smsUrl) throws Exception{
+
+ HpcApplicationDeploymentType appDepType = (HpcApplicationDeploymentType) context
+ .getApplicationContext().getApplicationDeploymentDescription()
+ .getType();
+
+ smsUrl = "BFT:"+smsUrl;
+
+ if (context.getInMessageContext().getParameters().size() > 0) {
+ buildDataStagingFromInputContext(context, value, smsUrl, appDepType);
+ }
+ MessageContext outMessage = new MessageContext();
+ ActualParameter a1 = new ActualParameter();
+ a1.getType().changeType(StringParameterType.type);
+ ((StringParameterType)a1.getType()).setValue("output/analysis-results.tar");
+ outMessage.addParameter("o1", a1);
+ context.setOutMessageContext(outMessage);
+
+ if (context.getOutMessageContext().getParameters().size() > 0) {
+ buildFromOutputContext(context, value, smsUrl, appDepType);
+ }
+ createStdOutURIs(value, appDepType, smsUrl, isUnicoreEndpoint(context));
+ }
+
+ private static void createInURISMSElement(JobDefinitionType value,
+ String smsUrl, String inputDir, ActualParameter inParam)
+ throws Exception {
+
+ String uri = ((URIParameterType) inParam.getType()).getValue();
+ //TODO: To add this input file name setting part of Airavata API
+ String fileName = "input/" + new File(uri).getName();
+ if (uri.startsWith("file")) {
+ String fileUri = smsUrl+"#/"+fileName;
+
+ JSDLUtils.addDataStagingSourceElement(value, fileUri, null, fileName);
+ } else if (uri.startsWith("gsiftp") || uri.startsWith("http")
+ || uri.startsWith("rns")) {
+ // no need to stage-in those files to the input
+ // directory because unicore site will fetch them for the user
+ JSDLUtils.addDataStagingSourceElement(value, uri, null, fileName);
+ }
+
+ }
+
+ private static void createStdOutURIs(JobDefinitionType value,
+ HpcApplicationDeploymentType appDepType, String smsUrl,
+ boolean isUnicore) throws Exception {
+
+
+ String stdout = ApplicationProcessor.getApplicationStdOut(value, appDepType);
+
+ String stderr = ApplicationProcessor.getApplicationStdErr(value, appDepType);
+
+ String stdoutFileName = (stdout == null || stdout.equals("")) ? "stdout"
+ : stdout;
+ String stdoutURI = smsUrl+"#/output/"+stdoutFileName;
+ JSDLUtils.addDataStagingTargetElement(value, null, stdoutFileName,
+ stdoutURI);
+
+ String stderrFileName = (stdout == null || stderr.equals("")) ? "stderr"
+ : stderr;
+ String stderrURI = smsUrl+"#/output/"+stderrFileName;
+ JSDLUtils.addDataStagingTargetElement(value, null, stderrFileName,
+ stderrURI);
+
+ if(isUnicore) {
+ String scriptExitCodeFName = "UNICORE_SCRIPT_EXIT_CODE";
+ String scriptExitCode = smsUrl+"#/output/"+scriptExitCodeFName;
+ JSDLUtils.addDataStagingTargetElement(value, null,
+ scriptExitCodeFName, scriptExitCode.toString());
+ }
+
+ }
+
+
+ private static void createOutStringElements(JobDefinitionType value,
+ HpcApplicationDeploymentType appDeptype, String smsUrl, String prmValue) throws Exception {
+
+ if(prmValue == null || "".equals(prmValue)) return;
+
+ String finalSMSPath = smsUrl + "#/output/"+prmValue;
+
+ JSDLUtils.addDataStagingTargetElement(value, null, prmValue, finalSMSPath);
+ }
+
+
+ private static void createOutURIElement(JobDefinitionType value,
+ String prmValue) throws Exception {
+ String fileName = new File(prmValue.toString()).getName();
+ JSDLUtils.addDataStagingTargetElement(value, null, fileName, prmValue);
+ }
+
+
+ private static JobDefinitionType buildFromOutputContext(JobExecutionContext context,
+ JobDefinitionType value, String smsUrl,
+ HpcApplicationDeploymentType appDepType) throws Exception {
+
+ Map<String, Object> outputParams = context.getOutMessageContext()
+ .getParameters();
+
+ for (String paramKey : outputParams.keySet()) {
+
+ ActualParameter outParam = (ActualParameter) outputParams
+ .get(paramKey);
+
+ // if single urls then convert each url into jsdl source
+ // elements,
+ // that are formed by concat of gridftpurl+inputdir+filename
+
+ String paramDataType = outParam.getType().getType().toString();
+
+ if ("URI".equals(paramDataType)) {
+ String uriPrm = ((URIParameterType) outParam.getType())
+ .getValue();
+ createOutURIElement(value, uriPrm);
+ }
+
+ // string params are converted into the job arguments
+
+ else if (("URIArray").equals(paramDataType)) {
+ String[] uriArray = ((URIArrayType) outParam.getType())
+ .getValueArray();
+ for (String u : uriArray) {
+
+ createOutURIElement(value, u);
+ }
+
+ }
+ else if ("String".equals(paramDataType)) {
+ String stringPrm = ((StringParameterType) outParam
+ .getType()).getValue();
+ createOutStringElements(value, appDepType, smsUrl, stringPrm);
+ }
+
+ else if ("StringArray".equals(paramDataType)) {
+ String[] valueArray = ((StringArrayType) outParam.getType())
+ .getValueArray();
+ for (String v : valueArray) {
+ createOutStringElements(value, appDepType, smsUrl, v);
+ }
+ }
+ }
+
+ return value;
+ }
+
+
+ private static void buildDataStagingFromInputContext(JobExecutionContext context, JobDefinitionType value, String smsUrl, HpcApplicationDeploymentType appDepType)
+ throws Exception {
+
+ // TODO set data directory
+ Map<String, Object> inputParams = context.getInMessageContext()
+ .getParameters();
+
+ for (String paramKey : inputParams.keySet()) {
+
+ ActualParameter inParam = (ActualParameter) inputParams
+ .get(paramKey);
+
+ // if single urls then convert each url into jsdl source
+ // elements,
+ // that are formed by concat of gridftpurl+inputdir+filename
+
+ String paramDataType = inParam.getType().getType().toString();
+
+ if ("URI".equals(paramDataType)) {
+ createInURISMSElement(value, smsUrl,
+ appDepType.getInputDataDirectory(), inParam);
+ }
+
+ // string params are converted into the job arguments
+
+ else if ("String".equals(paramDataType)) {
+ String stringPrm = ((StringParameterType) inParam.getType())
+ .getValue();
+ ApplicationProcessor.addApplicationArgument(value, appDepType, stringPrm);
+ }
+ }
+
+ }
+
+ public static boolean isUnicoreEndpoint(JobExecutionContext context) {
+ return ( (context.getApplicationContext().getHostDescription().getType() instanceof UnicoreHostType)?true:false );
+ }
+
+}