You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airavata.apache.org by sh...@apache.org on 2016/11/08 17:57:55 UTC

[2/7] airavata git commit: Replaced cloud job submission command with generated script

http://git-wip-us.apache.org/repos/asf/airavata/blob/9f3810ef/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/appcatalog/computeresource/CloudJobSubmission.java
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/appcatalog/computeresource/CloudJobSubmission.java b/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/appcatalog/computeresource/CloudJobSubmission.java
index 7d540de..23469ac 100644
--- a/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/appcatalog/computeresource/CloudJobSubmission.java
+++ b/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/appcatalog/computeresource/CloudJobSubmission.java
@@ -62,10 +62,11 @@ public class CloudJobSubmission implements org.apache.thrift.TBase<CloudJobSubmi
 
   private static final org.apache.thrift.protocol.TField JOB_SUBMISSION_INTERFACE_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("jobSubmissionInterfaceId", org.apache.thrift.protocol.TType.STRING, (short)1);
   private static final org.apache.thrift.protocol.TField SECURITY_PROTOCOL_FIELD_DESC = new org.apache.thrift.protocol.TField("securityProtocol", org.apache.thrift.protocol.TType.I32, (short)2);
-  private static final org.apache.thrift.protocol.TField NODE_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("nodeId", org.apache.thrift.protocol.TType.STRING, (short)3);
-  private static final org.apache.thrift.protocol.TField EXECUTABLE_TYPE_FIELD_DESC = new org.apache.thrift.protocol.TField("executableType", org.apache.thrift.protocol.TType.STRING, (short)4);
-  private static final org.apache.thrift.protocol.TField PROVIDER_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("providerName", org.apache.thrift.protocol.TType.I32, (short)5);
-  private static final org.apache.thrift.protocol.TField USER_ACCOUNT_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("userAccountName", org.apache.thrift.protocol.TType.STRING, (short)6);
+  private static final org.apache.thrift.protocol.TField JOB_MANAGER_TYPE_FIELD_DESC = new org.apache.thrift.protocol.TField("jobManagerType", org.apache.thrift.protocol.TType.I32, (short)3);
+  private static final org.apache.thrift.protocol.TField NODE_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("nodeId", org.apache.thrift.protocol.TType.STRING, (short)4);
+  private static final org.apache.thrift.protocol.TField EXECUTABLE_TYPE_FIELD_DESC = new org.apache.thrift.protocol.TField("executableType", org.apache.thrift.protocol.TType.STRING, (short)5);
+  private static final org.apache.thrift.protocol.TField PROVIDER_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("providerName", org.apache.thrift.protocol.TType.I32, (short)6);
+  private static final org.apache.thrift.protocol.TField USER_ACCOUNT_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("userAccountName", org.apache.thrift.protocol.TType.STRING, (short)7);
 
   private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
   static {
@@ -75,6 +76,7 @@ public class CloudJobSubmission implements org.apache.thrift.TBase<CloudJobSubmi
 
   private String jobSubmissionInterfaceId; // required
   private org.apache.airavata.model.data.movement.SecurityProtocol securityProtocol; // required
+  private ResourceJobManagerType jobManagerType; // required
   private String nodeId; // required
   private String executableType; // required
   private ProviderName providerName; // required
@@ -88,14 +90,19 @@ public class CloudJobSubmission implements org.apache.thrift.TBase<CloudJobSubmi
      * @see org.apache.airavata.model.data.movement.SecurityProtocol
      */
     SECURITY_PROTOCOL((short)2, "securityProtocol"),
-    NODE_ID((short)3, "nodeId"),
-    EXECUTABLE_TYPE((short)4, "executableType"),
+    /**
+     * 
+     * @see ResourceJobManagerType
+     */
+    JOB_MANAGER_TYPE((short)3, "jobManagerType"),
+    NODE_ID((short)4, "nodeId"),
+    EXECUTABLE_TYPE((short)5, "executableType"),
     /**
      * 
      * @see ProviderName
      */
-    PROVIDER_NAME((short)5, "providerName"),
-    USER_ACCOUNT_NAME((short)6, "userAccountName");
+    PROVIDER_NAME((short)6, "providerName"),
+    USER_ACCOUNT_NAME((short)7, "userAccountName");
 
     private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
 
@@ -114,13 +121,15 @@ public class CloudJobSubmission implements org.apache.thrift.TBase<CloudJobSubmi
           return JOB_SUBMISSION_INTERFACE_ID;
         case 2: // SECURITY_PROTOCOL
           return SECURITY_PROTOCOL;
-        case 3: // NODE_ID
+        case 3: // JOB_MANAGER_TYPE
+          return JOB_MANAGER_TYPE;
+        case 4: // NODE_ID
           return NODE_ID;
-        case 4: // EXECUTABLE_TYPE
+        case 5: // EXECUTABLE_TYPE
           return EXECUTABLE_TYPE;
-        case 5: // PROVIDER_NAME
+        case 6: // PROVIDER_NAME
           return PROVIDER_NAME;
-        case 6: // USER_ACCOUNT_NAME
+        case 7: // USER_ACCOUNT_NAME
           return USER_ACCOUNT_NAME;
         default:
           return null;
@@ -169,6 +178,8 @@ public class CloudJobSubmission implements org.apache.thrift.TBase<CloudJobSubmi
         new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
     tmpMap.put(_Fields.SECURITY_PROTOCOL, new org.apache.thrift.meta_data.FieldMetaData("securityProtocol", org.apache.thrift.TFieldRequirementType.REQUIRED, 
         new org.apache.thrift.meta_data.EnumMetaData(org.apache.thrift.protocol.TType.ENUM, org.apache.airavata.model.data.movement.SecurityProtocol.class)));
+    tmpMap.put(_Fields.JOB_MANAGER_TYPE, new org.apache.thrift.meta_data.FieldMetaData("jobManagerType", org.apache.thrift.TFieldRequirementType.REQUIRED, 
+        new org.apache.thrift.meta_data.EnumMetaData(org.apache.thrift.protocol.TType.ENUM, ResourceJobManagerType.class)));
     tmpMap.put(_Fields.NODE_ID, new org.apache.thrift.meta_data.FieldMetaData("nodeId", org.apache.thrift.TFieldRequirementType.REQUIRED, 
         new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
     tmpMap.put(_Fields.EXECUTABLE_TYPE, new org.apache.thrift.meta_data.FieldMetaData("executableType", org.apache.thrift.TFieldRequirementType.REQUIRED, 
@@ -189,6 +200,7 @@ public class CloudJobSubmission implements org.apache.thrift.TBase<CloudJobSubmi
   public CloudJobSubmission(
     String jobSubmissionInterfaceId,
     org.apache.airavata.model.data.movement.SecurityProtocol securityProtocol,
+    ResourceJobManagerType jobManagerType,
     String nodeId,
     String executableType,
     ProviderName providerName,
@@ -197,6 +209,7 @@ public class CloudJobSubmission implements org.apache.thrift.TBase<CloudJobSubmi
     this();
     this.jobSubmissionInterfaceId = jobSubmissionInterfaceId;
     this.securityProtocol = securityProtocol;
+    this.jobManagerType = jobManagerType;
     this.nodeId = nodeId;
     this.executableType = executableType;
     this.providerName = providerName;
@@ -213,6 +226,9 @@ public class CloudJobSubmission implements org.apache.thrift.TBase<CloudJobSubmi
     if (other.isSetSecurityProtocol()) {
       this.securityProtocol = other.securityProtocol;
     }
+    if (other.isSetJobManagerType()) {
+      this.jobManagerType = other.jobManagerType;
+    }
     if (other.isSetNodeId()) {
       this.nodeId = other.nodeId;
     }
@@ -236,6 +252,7 @@ public class CloudJobSubmission implements org.apache.thrift.TBase<CloudJobSubmi
     this.jobSubmissionInterfaceId = "DO_NOT_SET_AT_CLIENTS";
 
     this.securityProtocol = null;
+    this.jobManagerType = null;
     this.nodeId = null;
     this.executableType = null;
     this.providerName = null;
@@ -296,6 +313,37 @@ public class CloudJobSubmission implements org.apache.thrift.TBase<CloudJobSubmi
     }
   }
 
+  /**
+   * 
+   * @see ResourceJobManagerType
+   */
+  public ResourceJobManagerType getJobManagerType() {
+    return this.jobManagerType;
+  }
+
+  /**
+   * 
+   * @see ResourceJobManagerType
+   */
+  public void setJobManagerType(ResourceJobManagerType jobManagerType) {
+    this.jobManagerType = jobManagerType;
+  }
+
+  public void unsetJobManagerType() {
+    this.jobManagerType = null;
+  }
+
+  /** Returns true if field jobManagerType is set (has been assigned a value) and false otherwise */
+  public boolean isSetJobManagerType() {
+    return this.jobManagerType != null;
+  }
+
+  public void setJobManagerTypeIsSet(boolean value) {
+    if (!value) {
+      this.jobManagerType = null;
+    }
+  }
+
   public String getNodeId() {
     return this.nodeId;
   }
@@ -414,6 +462,14 @@ public class CloudJobSubmission implements org.apache.thrift.TBase<CloudJobSubmi
       }
       break;
 
+    case JOB_MANAGER_TYPE:
+      if (value == null) {
+        unsetJobManagerType();
+      } else {
+        setJobManagerType((ResourceJobManagerType)value);
+      }
+      break;
+
     case NODE_ID:
       if (value == null) {
         unsetNodeId();
@@ -457,6 +513,9 @@ public class CloudJobSubmission implements org.apache.thrift.TBase<CloudJobSubmi
     case SECURITY_PROTOCOL:
       return getSecurityProtocol();
 
+    case JOB_MANAGER_TYPE:
+      return getJobManagerType();
+
     case NODE_ID:
       return getNodeId();
 
@@ -484,6 +543,8 @@ public class CloudJobSubmission implements org.apache.thrift.TBase<CloudJobSubmi
       return isSetJobSubmissionInterfaceId();
     case SECURITY_PROTOCOL:
       return isSetSecurityProtocol();
+    case JOB_MANAGER_TYPE:
+      return isSetJobManagerType();
     case NODE_ID:
       return isSetNodeId();
     case EXECUTABLE_TYPE:
@@ -527,6 +588,15 @@ public class CloudJobSubmission implements org.apache.thrift.TBase<CloudJobSubmi
         return false;
     }
 
+    boolean this_present_jobManagerType = true && this.isSetJobManagerType();
+    boolean that_present_jobManagerType = true && that.isSetJobManagerType();
+    if (this_present_jobManagerType || that_present_jobManagerType) {
+      if (!(this_present_jobManagerType && that_present_jobManagerType))
+        return false;
+      if (!this.jobManagerType.equals(that.jobManagerType))
+        return false;
+    }
+
     boolean this_present_nodeId = true && this.isSetNodeId();
     boolean that_present_nodeId = true && that.isSetNodeId();
     if (this_present_nodeId || that_present_nodeId) {
@@ -580,6 +650,11 @@ public class CloudJobSubmission implements org.apache.thrift.TBase<CloudJobSubmi
     if (present_securityProtocol)
       list.add(securityProtocol.getValue());
 
+    boolean present_jobManagerType = true && (isSetJobManagerType());
+    list.add(present_jobManagerType);
+    if (present_jobManagerType)
+      list.add(jobManagerType.getValue());
+
     boolean present_nodeId = true && (isSetNodeId());
     list.add(present_nodeId);
     if (present_nodeId)
@@ -631,6 +706,16 @@ public class CloudJobSubmission implements org.apache.thrift.TBase<CloudJobSubmi
         return lastComparison;
       }
     }
+    lastComparison = Boolean.valueOf(isSetJobManagerType()).compareTo(other.isSetJobManagerType());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetJobManagerType()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.jobManagerType, other.jobManagerType);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
     lastComparison = Boolean.valueOf(isSetNodeId()).compareTo(other.isSetNodeId());
     if (lastComparison != 0) {
       return lastComparison;
@@ -707,6 +792,14 @@ public class CloudJobSubmission implements org.apache.thrift.TBase<CloudJobSubmi
     }
     first = false;
     if (!first) sb.append(", ");
+    sb.append("jobManagerType:");
+    if (this.jobManagerType == null) {
+      sb.append("null");
+    } else {
+      sb.append(this.jobManagerType);
+    }
+    first = false;
+    if (!first) sb.append(", ");
     sb.append("nodeId:");
     if (this.nodeId == null) {
       sb.append("null");
@@ -752,6 +845,10 @@ public class CloudJobSubmission implements org.apache.thrift.TBase<CloudJobSubmi
       throw new org.apache.thrift.protocol.TProtocolException("Required field 'securityProtocol' is unset! Struct:" + toString());
     }
 
+    if (!isSetJobManagerType()) {
+      throw new org.apache.thrift.protocol.TProtocolException("Required field 'jobManagerType' is unset! Struct:" + toString());
+    }
+
     if (!isSetNodeId()) {
       throw new org.apache.thrift.protocol.TProtocolException("Required field 'nodeId' is unset! Struct:" + toString());
     }
@@ -821,7 +918,15 @@ public class CloudJobSubmission implements org.apache.thrift.TBase<CloudJobSubmi
               org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
             }
             break;
-          case 3: // NODE_ID
+          case 3: // JOB_MANAGER_TYPE
+            if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
+              struct.jobManagerType = org.apache.airavata.model.appcatalog.computeresource.ResourceJobManagerType.findByValue(iprot.readI32());
+              struct.setJobManagerTypeIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          case 4: // NODE_ID
             if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
               struct.nodeId = iprot.readString();
               struct.setNodeIdIsSet(true);
@@ -829,7 +934,7 @@ public class CloudJobSubmission implements org.apache.thrift.TBase<CloudJobSubmi
               org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
             }
             break;
-          case 4: // EXECUTABLE_TYPE
+          case 5: // EXECUTABLE_TYPE
             if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
               struct.executableType = iprot.readString();
               struct.setExecutableTypeIsSet(true);
@@ -837,7 +942,7 @@ public class CloudJobSubmission implements org.apache.thrift.TBase<CloudJobSubmi
               org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
             }
             break;
-          case 5: // PROVIDER_NAME
+          case 6: // PROVIDER_NAME
             if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
               struct.providerName = org.apache.airavata.model.appcatalog.computeresource.ProviderName.findByValue(iprot.readI32());
               struct.setProviderNameIsSet(true);
@@ -845,7 +950,7 @@ public class CloudJobSubmission implements org.apache.thrift.TBase<CloudJobSubmi
               org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
             }
             break;
-          case 6: // USER_ACCOUNT_NAME
+          case 7: // USER_ACCOUNT_NAME
             if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
               struct.userAccountName = iprot.readString();
               struct.setUserAccountNameIsSet(true);
@@ -876,6 +981,11 @@ public class CloudJobSubmission implements org.apache.thrift.TBase<CloudJobSubmi
         oprot.writeI32(struct.securityProtocol.getValue());
         oprot.writeFieldEnd();
       }
+      if (struct.jobManagerType != null) {
+        oprot.writeFieldBegin(JOB_MANAGER_TYPE_FIELD_DESC);
+        oprot.writeI32(struct.jobManagerType.getValue());
+        oprot.writeFieldEnd();
+      }
       if (struct.nodeId != null) {
         oprot.writeFieldBegin(NODE_ID_FIELD_DESC);
         oprot.writeString(struct.nodeId);
@@ -915,6 +1025,7 @@ public class CloudJobSubmission implements org.apache.thrift.TBase<CloudJobSubmi
       TTupleProtocol oprot = (TTupleProtocol) prot;
       oprot.writeString(struct.jobSubmissionInterfaceId);
       oprot.writeI32(struct.securityProtocol.getValue());
+      oprot.writeI32(struct.jobManagerType.getValue());
       oprot.writeString(struct.nodeId);
       oprot.writeString(struct.executableType);
       oprot.writeI32(struct.providerName.getValue());
@@ -928,6 +1039,8 @@ public class CloudJobSubmission implements org.apache.thrift.TBase<CloudJobSubmi
       struct.setJobSubmissionInterfaceIdIsSet(true);
       struct.securityProtocol = org.apache.airavata.model.data.movement.SecurityProtocol.findByValue(iprot.readI32());
       struct.setSecurityProtocolIsSet(true);
+      struct.jobManagerType = org.apache.airavata.model.appcatalog.computeresource.ResourceJobManagerType.findByValue(iprot.readI32());
+      struct.setJobManagerTypeIsSet(true);
       struct.nodeId = iprot.readString();
       struct.setNodeIdIsSet(true);
       struct.executableType = iprot.readString();

http://git-wip-us.apache.org/repos/asf/airavata/blob/9f3810ef/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/appcatalog/computeresource/ResourceJobManagerType.java
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/appcatalog/computeresource/ResourceJobManagerType.java b/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/appcatalog/computeresource/ResourceJobManagerType.java
index 2fc5c0b..2bc79c5 100644
--- a/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/appcatalog/computeresource/ResourceJobManagerType.java
+++ b/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/appcatalog/computeresource/ResourceJobManagerType.java
@@ -52,7 +52,8 @@ public enum ResourceJobManagerType implements org.apache.thrift.TEnum {
   PBS(1),
   SLURM(2),
   LSF(3),
-  UGE(4);
+  UGE(4),
+  CLOUD(5);
 
   private final int value;
 
@@ -83,6 +84,8 @@ public enum ResourceJobManagerType implements org.apache.thrift.TEnum {
         return LSF;
       case 4:
         return UGE;
+      case 5:
+        return CLOUD;
       default:
         return null;
     }

http://git-wip-us.apache.org/repos/asf/airavata/blob/9f3810ef/modules/configuration/server/src/main/resources/CLOUD_Groovy.template
----------------------------------------------------------------------
diff --git a/modules/configuration/server/src/main/resources/CLOUD_Groovy.template b/modules/configuration/server/src/main/resources/CLOUD_Groovy.template
new file mode 100644
index 0000000..b16beb7
--- /dev/null
+++ b/modules/configuration/server/src/main/resources/CLOUD_Groovy.template
@@ -0,0 +1,7 @@
+<%
+   if (workingDirectory != null)  out.print 'cd ' + workingDirectory +' && '
+   if (jobSubmitterCommand != null)  out.print jobSubmitterCommand + ' '
+   if (executablePath != null)  out.print  executablePath + ' '
+   if (inputs != null) for(input in inputs)  out.print input + ' '
+   out.print '\n'
+%>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/airavata/blob/9f3810ef/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/GFacUtils.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/GFacUtils.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/GFacUtils.java
index 66998c3..4de01ab 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/GFacUtils.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/GFacUtils.java
@@ -480,7 +480,7 @@ public class GFacUtils {
     public static String getZKGfacServersParentPath() {
         return ZKPaths.makePath(ZkConstants.ZOOKEEPER_SERVERS_NODE, ZkConstants.ZOOKEEPER_GFAC_SERVER_NODE);
     }
-    public static GroovyMap crateGroovyMap(ProcessContext processContext)
+    public static GroovyMap createGroovyMap(ProcessContext processContext)
             throws ApplicationSettingsException, AppCatalogException, GFacException {
         return createGroovyMap(processContext, null);
     }
@@ -488,140 +488,145 @@ public class GFacUtils {
             throws GFacException, AppCatalogException, ApplicationSettingsException {
 
         GroovyMap groovyMap = new GroovyMap();
-        ProcessModel processModel = processContext.getProcessModel();
-        ResourceJobManager resourceJobManager = getResourceJobManager(processContext);
-        setMailAddresses(processContext, groovyMap); // set email options and addresses
-
-        groovyMap.add(Script.INPUT_DIR, processContext.getInputDir());
-        groovyMap.add(Script.OUTPUT_DIR, processContext.getOutputDir());
-        groovyMap.add(Script.EXECUTABLE_PATH, processContext.getApplicationDeploymentDescription().getExecutablePath());
-        groovyMap.add(Script.STANDARD_OUT_FILE, processContext.getStdoutLocation());
-        groovyMap.add(Script.STANDARD_ERROR_FILE, processContext.getStderrLocation());
-        groovyMap.add(Script.SCRATCH_LOCATION, processContext.getScratchLocation());
-        groovyMap.add(Script.GATEWAY_ID, processContext.getGatewayId());
-        groovyMap.add(Script.GATEWAY_USER_NAME, processContext.getProcessModel().getUserName());
-        groovyMap.add(Script.APPLICATION_NAME, processContext.getApplicationInterfaceDescription().getApplicationName());
-
-        ComputeResourcePreference crp = getComputeResourcePreference(processContext);
-        if (isValid(crp.getAllocationProjectNumber())) {
-            groovyMap.add(Script.ACCOUNT_STRING, crp.getAllocationProjectNumber());
-        }
-        groovyMap.add(Script.RESERVATION, getReservation(crp));
-
-        // To make job name alpha numeric
-        groovyMap.add(Script.JOB_NAME, "A" + String.valueOf(generateJobName()));
-        groovyMap.add(Script.WORKING_DIR, processContext.getWorkingDir());
-
-        List<String> inputValues = getProcessInputValues(processModel.getProcessInputs());
-        inputValues.addAll(getProcessOutputValues(processModel.getProcessOutputs()));
-        groovyMap.add(Script.INPUTS, inputValues);
-
-        groovyMap.add(Script.USER_NAME, processContext.getJobSubmissionRemoteCluster().getServerInfo().getUserName());
-        groovyMap.add(Script.SHELL_NAME, "/bin/bash");
-        // get walltime
-        if (taskContext != null) {
-            try {
-                JobSubmissionTaskModel jobSubmissionTaskModel = ((JobSubmissionTaskModel) taskContext.getSubTaskModel());
-                if (jobSubmissionTaskModel.getWallTime() > 0) {
-                    groovyMap.add(Script.MAX_WALL_TIME,
-                            GFacUtils.maxWallTimeCalculator(jobSubmissionTaskModel.getWallTime()));
-                }
-            } catch (TException e) {
-                log.error("Error while getting job submission sub task model", e);
+        try {
+            ProcessModel processModel = processContext.getProcessModel();
+            ResourceJobManager resourceJobManager = getResourceJobManager(processContext);
+            setMailAddresses(processContext, groovyMap); // set email options and addresses
+
+            groovyMap.add(Script.INPUT_DIR, processContext.getInputDir());
+            groovyMap.add(Script.OUTPUT_DIR, processContext.getOutputDir());
+            groovyMap.add(Script.EXECUTABLE_PATH, processContext.getApplicationDeploymentDescription().getExecutablePath());
+            groovyMap.add(Script.STANDARD_OUT_FILE, processContext.getStdoutLocation());
+            groovyMap.add(Script.STANDARD_ERROR_FILE, processContext.getStderrLocation());
+            groovyMap.add(Script.SCRATCH_LOCATION, processContext.getScratchLocation());
+            groovyMap.add(Script.GATEWAY_ID, processContext.getGatewayId());
+            groovyMap.add(Script.GATEWAY_USER_NAME, processContext.getProcessModel().getUserName());
+            groovyMap.add(Script.APPLICATION_NAME, processContext.getApplicationInterfaceDescription().getApplicationName());
+
+            ComputeResourcePreference crp = getComputeResourcePreference(processContext);
+            if (isValid(crp.getAllocationProjectNumber())) {
+                groovyMap.add(Script.ACCOUNT_STRING, crp.getAllocationProjectNumber());
             }
-        }
+            groovyMap.add(Script.RESERVATION, getReservation(crp));
 
-        // NOTE: Give precedence to data comes with experiment
-        ComputationalResourceSchedulingModel scheduling = processModel.getProcessResourceSchedule();
-        if (scheduling != null) {
-            int totalNodeCount = scheduling.getNodeCount();
-            int totalCPUCount = scheduling.getTotalCPUCount();
+            // To make job name alpha numeric
+            groovyMap.add(Script.JOB_NAME, "A" + String.valueOf(generateJobName()));
+            groovyMap.add(Script.WORKING_DIR, processContext.getWorkingDir());
 
-            if (isValid(scheduling.getQueueName())) {
-                groovyMap.add(Script.QUEUE_NAME, scheduling.getQueueName());
-            }
-            if (totalNodeCount > 0) {
-                groovyMap.add(Script.NODES, totalNodeCount);
-            }
-            // qos per queue
-            String qoS = getQoS(crp.getQualityOfService(), scheduling.getQueueName());
-            if (qoS != null) {
-                groovyMap.add(Script.QUALITY_OF_SERVICE, qoS);
-            }
-            if (totalCPUCount > 0) {
-                int ppn = totalCPUCount / totalNodeCount;
-                groovyMap.add(Script.PROCESS_PER_NODE, ppn);
-                groovyMap.add(Script.CPU_COUNT, totalCPUCount);
-            }
-            // max wall time may be set before this level if jobsubmission task has wall time configured to this job,
-            // if so we ignore scheduling configuration.
-            if (scheduling.getWallTimeLimit() > 0 && groovyMap.get(Script.MAX_WALL_TIME) == null) {
-                groovyMap.add(Script.MAX_WALL_TIME,
-                        GFacUtils.maxWallTimeCalculator(scheduling.getWallTimeLimit()));
-                if (resourceJobManager != null) {
-                    if (resourceJobManager.getResourceJobManagerType().equals(ResourceJobManagerType.LSF)) {
+            List<String> inputValues = getProcessInputValues(processModel.getProcessInputs());
+            inputValues.addAll(getProcessOutputValues(processModel.getProcessOutputs()));
+            groovyMap.add(Script.INPUTS, inputValues);
+
+            groovyMap.add(Script.USER_NAME, processContext.getJobSubmissionRemoteCluster().getServerInfo().getUserName());
+            groovyMap.add(Script.SHELL_NAME, "/bin/bash");
+            // get walltime
+            if (taskContext != null) {
+                try {
+                    JobSubmissionTaskModel jobSubmissionTaskModel = ((JobSubmissionTaskModel) taskContext.getSubTaskModel());
+                    if (jobSubmissionTaskModel.getWallTime() > 0) {
                         groovyMap.add(Script.MAX_WALL_TIME,
-                                GFacUtils.maxWallTimeCalculator(scheduling.getWallTimeLimit()));
+                                GFacUtils.maxWallTimeCalculator(jobSubmissionTaskModel.getWallTime()));
                     }
+                } catch (TException e) {
+                    log.error("Error while getting job submission sub task model", e);
                 }
             }
-            if (scheduling.getTotalPhysicalMemory() > 0) {
-                groovyMap.add(Script.USED_MEM, scheduling.getTotalPhysicalMemory());
-            }
-            if (isValid(scheduling.getOverrideLoginUserName())) {
-                groovyMap.add(Script.USER_NAME, scheduling.getOverrideLoginUserName());
-            }
-            if (isValid(scheduling.getOverrideAllocationProjectNumber())) {
-                groovyMap.add(Script.ACCOUNT_STRING, scheduling.getOverrideAllocationProjectNumber());
-            }
-            if (isValid(scheduling.getStaticWorkingDir())) {
-                groovyMap.add(Script.WORKING_DIR, scheduling.getStaticWorkingDir());
+
+            // NOTE: Give precedence to data comes with experiment
+            ComputationalResourceSchedulingModel scheduling = processModel.getProcessResourceSchedule();
+            if (scheduling != null) {
+                int totalNodeCount = scheduling.getNodeCount();
+                int totalCPUCount = scheduling.getTotalCPUCount();
+
+                if (isValid(scheduling.getQueueName())) {
+                    groovyMap.add(Script.QUEUE_NAME, scheduling.getQueueName());
+                }
+                if (totalNodeCount > 0) {
+                    groovyMap.add(Script.NODES, totalNodeCount);
+                }
+                // qos per queue
+                String qoS = getQoS(crp.getQualityOfService(), scheduling.getQueueName());
+                if (qoS != null) {
+                    groovyMap.add(Script.QUALITY_OF_SERVICE, qoS);
+                }
+                if (totalCPUCount > 0) {
+                    int ppn = totalCPUCount / totalNodeCount;
+                    groovyMap.add(Script.PROCESS_PER_NODE, ppn);
+                    groovyMap.add(Script.CPU_COUNT, totalCPUCount);
+                }
+                // max wall time may be set before this level if jobsubmission task has wall time configured to this job,
+                // if so we ignore scheduling configuration.
+                if (scheduling.getWallTimeLimit() > 0 && groovyMap.get(Script.MAX_WALL_TIME) == null) {
+                    groovyMap.add(Script.MAX_WALL_TIME,
+                            GFacUtils.maxWallTimeCalculator(scheduling.getWallTimeLimit()));
+                    if (resourceJobManager != null) {
+                        if (resourceJobManager.getResourceJobManagerType().equals(ResourceJobManagerType.LSF)) {
+                            groovyMap.add(Script.MAX_WALL_TIME,
+                                    GFacUtils.maxWallTimeCalculator(scheduling.getWallTimeLimit()));
+                        }
+                    }
+                }
+                if (scheduling.getTotalPhysicalMemory() > 0) {
+                    groovyMap.add(Script.USED_MEM, scheduling.getTotalPhysicalMemory());
+                }
+                if (isValid(scheduling.getOverrideLoginUserName())) {
+                    groovyMap.add(Script.USER_NAME, scheduling.getOverrideLoginUserName());
+                }
+                if (isValid(scheduling.getOverrideAllocationProjectNumber())) {
+                    groovyMap.add(Script.ACCOUNT_STRING, scheduling.getOverrideAllocationProjectNumber());
+                }
+                if (isValid(scheduling.getStaticWorkingDir())) {
+                    groovyMap.add(Script.WORKING_DIR, scheduling.getStaticWorkingDir());
+                }
+            } else {
+                log.error("Task scheduling cannot be null at this point..");
             }
-        } else {
-            log.error("Task scheduling cannot be null at this point..");
-        }
 
-        ApplicationDeploymentDescription appDepDescription = processContext.getApplicationDeploymentDescription();
-        List<CommandObject> moduleCmds = appDepDescription.getModuleLoadCmds();
-        if (moduleCmds != null) {
-            List<String> modulesCmdCollect = moduleCmds.stream()
-                    .sorted((e1, e2) -> e1.getCommandOrder() - e2.getCommandOrder())
-                    .map(map -> map.getCommand())
-                    .collect(Collectors.toList());
-            groovyMap.add(Script.MODULE_COMMANDS, modulesCmdCollect);
-        }
+            ApplicationDeploymentDescription appDepDescription = processContext.getApplicationDeploymentDescription();
+            List<CommandObject> moduleCmds = appDepDescription.getModuleLoadCmds();
+            if (moduleCmds != null) {
+                List<String> modulesCmdCollect = moduleCmds.stream()
+                        .sorted((e1, e2) -> e1.getCommandOrder() - e2.getCommandOrder())
+                        .map(map -> map.getCommand())
+                        .collect(Collectors.toList());
+                groovyMap.add(Script.MODULE_COMMANDS, modulesCmdCollect);
+            }
 
-        List<CommandObject> preJobCommands = appDepDescription.getPreJobCommands();
-        if (preJobCommands != null) {
-            List<String> preJobCmdCollect = preJobCommands.stream()
-                    .sorted((e1, e2) -> e1.getCommandOrder() - e2.getCommandOrder())
-                    .map(map -> parseCommands(map.getCommand(), groovyMap))
-                    .collect(Collectors.toList());
-            groovyMap.add(Script.PRE_JOB_COMMANDS, preJobCmdCollect);
-        }
+            List<CommandObject> preJobCommands = appDepDescription.getPreJobCommands();
+            if (preJobCommands != null) {
+                List<String> preJobCmdCollect = preJobCommands.stream()
+                        .sorted((e1, e2) -> e1.getCommandOrder() - e2.getCommandOrder())
+                        .map(map -> parseCommands(map.getCommand(), groovyMap))
+                        .collect(Collectors.toList());
+                groovyMap.add(Script.PRE_JOB_COMMANDS, preJobCmdCollect);
+            }
 
-        List<CommandObject> postJobCommands = appDepDescription.getPostJobCommands();
-        if (postJobCommands != null) {
-            List<String> postJobCmdCollect = postJobCommands.stream()
-                    .sorted((e1, e2) -> e1.getCommandOrder() - e2.getCommandOrder())
-                    .map(map -> parseCommands(map.getCommand(), groovyMap))
-                    .collect(Collectors.toList());
-            groovyMap.add(Script.POST_JOB_COMMANDS, postJobCmdCollect);
-        }
+            List<CommandObject> postJobCommands = appDepDescription.getPostJobCommands();
+            if (postJobCommands != null) {
+                List<String> postJobCmdCollect = postJobCommands.stream()
+                        .sorted((e1, e2) -> e1.getCommandOrder() - e2.getCommandOrder())
+                        .map(map -> parseCommands(map.getCommand(), groovyMap))
+                        .collect(Collectors.toList());
+                groovyMap.add(Script.POST_JOB_COMMANDS, postJobCmdCollect);
+            }
 
-        ApplicationParallelismType parallelism = appDepDescription.getParallelism();
-        Map<ApplicationParallelismType, String> parallelismPrefix = processContext.getResourceJobManager().getParallelismPrefix();
-        if (parallelism != null) {
-            if (parallelism != ApplicationParallelismType.SERIAL) {
-                if (parallelismPrefix != null){
-                    String parallelismCommand = parallelismPrefix.get(parallelism);
-                    if (parallelismCommand != null){
-                        groovyMap.add(Script.JOB_SUBMITTER_COMMAND, parallelismCommand);
-                    }else {
-                        throw new GFacException("Parallelism prefix is not defined for given parallelism type " + parallelism + ".. Please define the parallelism prefix at App Catalog");
+            ApplicationParallelismType parallelism = appDepDescription.getParallelism();
+            Map<ApplicationParallelismType, String> parallelismPrefix = processContext.getResourceJobManager().getParallelismPrefix();
+            if (parallelism != null) {
+                if (parallelism != ApplicationParallelismType.SERIAL) {
+                    if (parallelismPrefix != null){
+                        String parallelismCommand = parallelismPrefix.get(parallelism);
+                        if (parallelismCommand != null){
+                            groovyMap.add(Script.JOB_SUBMITTER_COMMAND, parallelismCommand);
+                        }else {
+                            throw new GFacException("Parallelism prefix is not defined for given parallelism type " + parallelism + ".. Please define the parallelism prefix at App Catalog");
+                        }
                     }
-                }
+                } // FIXME what if type if SERIAL type
             }
+        } catch (Exception e) {
+            log.error("Error while creating groovy map", e);
+            throw e;
         }
         return groovyMap;
     }
@@ -897,26 +902,51 @@ public class GFacUtils {
     }
 
     public static File createJobFile(GroovyMap groovyMap, TaskContext tc, JobManagerConfiguration jMC)
-            throws GFacException{
+            throws GFacException {
+        try {
+            int number = new SecureRandom().nextInt();
+            number = (number < 0 ? -number : number);
+            File tempJobFile = new File(GFacUtils.getLocalDataDir(tc), "job_" + Integer.toString(number) + jMC.getScriptExtension());
+            FileUtils.writeStringToFile(tempJobFile, generateScript(groovyMap, jMC.getJobDescriptionTemplateName()));
+            return tempJobFile;
+        } catch (IOException e) {
+            throw new GFacException("Error while writing script content to temp file");
+        }
+    }
 
-        URL templateUrl = ApplicationSettings.loadFile(jMC.getJobDescriptionTemplateName());
+    public static String generateScript(GroovyMap groovyMap, String templateName) throws GFacException {
+        URL templateUrl = ApplicationSettings.loadFile(templateName);
         if (templateUrl == null) {
-            String error = "System configuration file '" + jMC.getJobDescriptionTemplateName()
-                    + "' not found in the classpath";
+            String error = "Template file '" + templateName + "' not found";
             throw new GFacException(error);
         }
+        File template = new File(templateUrl.getPath());
+        TemplateEngine engine = new GStringTemplateEngine();
+        Writable make;
         try {
-            File template = new File(templateUrl.getPath());
-            TemplateEngine engine = new GStringTemplateEngine();
-            Writable make = engine.createTemplate(template).make(groovyMap);
+            make = engine.createTemplate(template).make(groovyMap);
+        } catch (Exception e) {
+            throw new GFacException("Error while generating script using groovy map");
+        }
+        return make.toString();
+    }
 
-            int number = new SecureRandom().nextInt();
-            number = (number < 0 ? -number : number);
-            File tempJobFile = new File(GFacUtils.getLocalDataDir(tc), "job_" + Integer.toString(number) + jMC.getScriptExtension());
-            FileUtils.writeStringToFile(tempJobFile, make.toString());
-            return tempJobFile;
-        } catch (ClassNotFoundException | IOException e) {
-            throw new GFacException("Error while parsing template and generating script file");
+    public static String getTemplateFileName(ResourceJobManagerType resourceJobManagerType) {
+        switch (resourceJobManagerType) {
+            case FORK:
+                return "UGE_Groovy.template";
+            case PBS:
+                return "PBS_Groovy.template";
+            case SLURM:
+                return "SLURM_Groovy.template";
+            case UGE:
+                return "UGE_Groovy.template";
+            case LSF:
+                return "LSF_Groovy.template";
+            case CLOUD:
+                return "CLOUD_Groovy.template";
+            default:
+                return null;
         }
     }
 

http://git-wip-us.apache.org/repos/asf/airavata/blob/9f3810ef/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/GroovyMap.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/GroovyMap.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/GroovyMap.java
index 1abc878..a1f8132 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/GroovyMap.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/GroovyMap.java
@@ -63,6 +63,15 @@ public class GroovyMap extends HashMap<String, Object> {
         return get(script.name);
     }
 
+    public String getStringValue(Script script) {
+        Object obj = get(script);
+        if (obj instanceof String) {
+            return ((String) obj);
+        }else {
+            throw new ClassCastException("Value is not type for String");
+        }
+    }
+
     private void addDefaultValues() {
         this.add(Script.SHELL_NAME, null)
                 .add(Script.QUEUE_NAME, null)

http://git-wip-us.apache.org/repos/asf/airavata/blob/9f3810ef/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/Factory.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/Factory.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/Factory.java
index 5e8de6d..2a5afc1 100644
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/Factory.java
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/Factory.java
@@ -199,22 +199,25 @@ public abstract class Factory {
 					.getResourceJobManagerType().name());
 		}
 
+		String templateFileName = GFacUtils.getTemplateFileName(resourceJobManager.getResourceJobManagerType());
+
 		switch (resourceJobManager.getResourceJobManagerType()) {
 			case PBS:
-				return new PBSJobConfiguration("PBS_Groovy.template", ".pbs", resourceJobManager.getJobManagerBinPath(),
+				return new PBSJobConfiguration(templateFileName, ".pbs", resourceJobManager.getJobManagerBinPath(),
 						resourceJobManager.getJobManagerCommands(), outputParser);
 			case SLURM:
-				return new SlurmJobConfiguration("SLURM_Groovy.template", ".slurm", resourceJobManager
+				return new SlurmJobConfiguration(templateFileName, ".slurm", resourceJobManager
 						.getJobManagerBinPath(), resourceJobManager.getJobManagerCommands(), outputParser);
 			case LSF:
-				return new LSFJobConfiguration("LSF_Groovy.template", ".lsf", resourceJobManager.getJobManagerBinPath(),
+				return new LSFJobConfiguration(templateFileName, ".lsf", resourceJobManager.getJobManagerBinPath(),
 						resourceJobManager.getJobManagerCommands(), outputParser);
 			case UGE:
-				return new UGEJobConfiguration("UGE_Groovy.template", ".pbs", resourceJobManager.getJobManagerBinPath(),
+				return new UGEJobConfiguration(templateFileName, ".pbs", resourceJobManager.getJobManagerBinPath(),
 						resourceJobManager.getJobManagerCommands(), outputParser);
 			case FORK:
-				return new ForkJobConfiguration("FORK_Groovy.template", ".sh", resourceJobManager.getJobManagerBinPath(),
+				return new ForkJobConfiguration(templateFileName, ".sh", resourceJobManager.getJobManagerBinPath(),
 						resourceJobManager.getJobManagerCommands(), outputParser);
+            // We don't have a job configuration manager for CLOUD type
 			default:
 				return null;
 		}

http://git-wip-us.apache.org/repos/asf/airavata/blob/9f3810ef/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/task/AuroraJobSubmission.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/task/AuroraJobSubmission.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/task/AuroraJobSubmission.java
index 0941c85..6a3d898 100644
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/task/AuroraJobSubmission.java
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/task/AuroraJobSubmission.java
@@ -37,11 +37,14 @@ import org.apache.airavata.cloud.aurora.util.AuroraThriftClientUtil;
 import org.apache.airavata.common.utils.AiravataUtils;
 import org.apache.airavata.gfac.core.GFacException;
 import org.apache.airavata.gfac.core.GFacUtils;
+import org.apache.airavata.gfac.core.GroovyMap;
+import org.apache.airavata.gfac.core.Script;
 import org.apache.airavata.gfac.core.context.ProcessContext;
 import org.apache.airavata.gfac.core.context.TaskContext;
 import org.apache.airavata.gfac.core.task.JobSubmissionTask;
 import org.apache.airavata.gfac.core.task.TaskException;
 import org.apache.airavata.gfac.impl.AuroraUtils;
+import org.apache.airavata.model.appcatalog.computeresource.ResourceJobManagerType;
 import org.apache.airavata.model.commons.ErrorModel;
 import org.apache.airavata.model.job.JobModel;
 import org.apache.airavata.model.status.JobState;
@@ -82,19 +85,15 @@ public class AuroraJobSubmission implements JobSubmissionTask{
         try {
             JobKeyBean jobKey = new JobKeyBean(AuroraUtils.ENVIRONMENT, AuroraUtils.ROLE, jobIdAndName);
             IdentityBean owner = new IdentityBean(AuroraUtils.ROLE);
-            // only autodoc vina
-            String workingDir = taskContext.getWorkingDir();
-//            ProcessBean proc1 = new ProcessBean("process_1", "mkdir -p " + workingDir, false);
-//            ProcessBean proc2 = new ProcessBean("process_2", "cp -rf /home/centos/efs-mount-point/autodock-vina/* " + workingDir , false);
-            String executablePath = processContext.getApplicationDeploymentDescription().getExecutablePath();
-            ProcessBean proc3 = new ProcessBean("process_3", "cd " + workingDir + " && sh " + executablePath, false);
-            Set<ProcessBean> processes = new LinkedHashSet<>();
-//            processes.add(proc1);
-//            processes.add(proc2);
-            processes.add(proc3);
+            GroovyMap groovyMap = GFacUtils.createGroovyMap(processContext, taskContext);
+            groovyMap.add(Script.JOB_SUBMITTER_COMMAND, "sh");
+            String templateFileName = GFacUtils.getTemplateFileName(ResourceJobManagerType.CLOUD);
+            String script = GFacUtils.generateScript(groovyMap, templateFileName);
+            ProcessBean process_1 = new ProcessBean("process_1", script, false);
 
+            Set<ProcessBean> processes = new LinkedHashSet<>();
+            processes.add(process_1);
             ResourceBean resources = new ResourceBean(1.5, 512, 512);
-
             TaskConfigBean taskConfig = new TaskConfigBean("Airavata-Aurora-" + jobIdAndName, processes, resources);
             JobConfigBean jobConfig = new JobConfigBean(jobKey, owner, taskConfig, AuroraUtils.CLUSTER);
 
@@ -115,8 +114,8 @@ public class AuroraJobSubmission implements JobSubmissionTask{
             GFacUtils.saveJobModel(processContext, jobModel);
             GFacUtils.saveJobStatus(processContext, jobModel);
             taskStatus.setReason("Successfully submitted job to Aurora");
-        } catch (Exception e) {
-            String msg = "Error occurred while submitting the job";
+        } catch (Throwable e) {
+            String msg = "Error occurred while submitting Aurora job";
             log.error(msg, e);
             taskStatus.setState(TaskState.FAILED);
             taskStatus.setReason(msg);

http://git-wip-us.apache.org/repos/asf/airavata/blob/9f3810ef/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/task/DefaultJobSubmissionTask.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/task/DefaultJobSubmissionTask.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/task/DefaultJobSubmissionTask.java
index 657de00..deabb95 100644
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/task/DefaultJobSubmissionTask.java
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/task/DefaultJobSubmissionTask.java
@@ -73,7 +73,7 @@ public class DefaultJobSubmissionTask implements JobSubmissionTask {
 		    jobModel.setTaskId(taskContext.getTaskId());
 		    RemoteCluster remoteCluster = processContext.getJobSubmissionRemoteCluster();
 			GroovyMap groovyMap = GFacUtils.createGroovyMap(processContext, taskContext);
-			jobModel.setJobName(groovyMap.get(Script.JOB_NAME).toString());
+			jobModel.setJobName(groovyMap.getStringValue(Script.JOB_NAME));
 			ResourceJobManager resourceJobManager = GFacUtils.getResourceJobManager(processContext);
 		    JobManagerConfiguration jConfig = null;
 		    if (resourceJobManager != null) {
@@ -278,8 +278,16 @@ public class DefaultJobSubmissionTask implements JobSubmissionTask {
 		    errorModel.setActualErrorMessage(e.getMessage());
 		    errorModel.setUserFriendlyMessage(msg);
 		    taskContext.getTaskModel().setTaskErrors(Arrays.asList(errorModel));
-	    } catch (RegistryException e) {
-            e.printStackTrace();
+	    } catch (Throwable e) {
+			String msg = "JobSubmission failed";
+			log.error(msg, e);
+			taskStatus.setState(TaskState.FAILED);
+			taskStatus.setReason(msg);
+			taskStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
+			ErrorModel errorModel = new ErrorModel();
+			errorModel.setActualErrorMessage(e.getMessage());
+			errorModel.setUserFriendlyMessage(msg);
+			taskContext.getTaskModel().setTaskErrors(Arrays.asList(errorModel));
         }
 
         taskContext.setTaskStatus(taskStatus);

http://git-wip-us.apache.org/repos/asf/airavata/blob/9f3810ef/thrift-interface-descriptions/data-models/resource-catalog-models/compute_resource_model.thrift
----------------------------------------------------------------------
diff --git a/thrift-interface-descriptions/data-models/resource-catalog-models/compute_resource_model.thrift b/thrift-interface-descriptions/data-models/resource-catalog-models/compute_resource_model.thrift
index 5ca2728..5d15c19 100644
--- a/thrift-interface-descriptions/data-models/resource-catalog-models/compute_resource_model.thrift
+++ b/thrift-interface-descriptions/data-models/resource-catalog-models/compute_resource_model.thrift
@@ -52,7 +52,8 @@ enum ResourceJobManagerType {
     PBS,
     SLURM,
     LSF,
-    UGE
+    UGE,
+    CLOUD
 }
 
 /**
@@ -287,10 +288,11 @@ enum ProviderName {
 struct CloudJobSubmission {
     1: required string jobSubmissionInterfaceId = airavata_commons.DEFAULT_ID,
     2: required data_movement_models.SecurityProtocol securityProtocol,
-    3: required string nodeId,
-    4: required string executableType,
-    5: required ProviderName providerName,
-    6: required string userAccountName
+    3: required ResourceJobManagerType jobManagerType,
+    4: required string nodeId,
+    5: required string executableType,
+    6: required ProviderName providerName,
+    7: required string userAccountName
 }
 
 /**