You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airavata.apache.org by ra...@apache.org on 2014/08/07 22:33:07 UTC

[1/2] git commit: Fixed AIRAVATA-1371 and AIRAVATA-1367.

Repository: airavata
Updated Branches:
  refs/heads/master 7536fb689 -> d6ffefa81


Fixed AIRAVATA-1371 and AIRAVATA-1367. 

Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/c29a6a9b
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/c29a6a9b
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/c29a6a9b

Branch: refs/heads/master
Commit: c29a6a9be0dfad105cb8b741bad88b5135ddd39d
Parents: 3f21634
Author: raminder <ra...@apache.org>
Authored: Thu Aug 7 16:32:15 2014 -0400
Committer: raminder <ra...@apache.org>
Committed: Thu Aug 7 16:32:15 2014 -0400

----------------------------------------------------------------------
 .../samples/CreateLaunchExperimentUS3.java      | 142 ++++++++++-
 .../client/tools/RegisterUS3Application.java    | 105 +++++++--
 .../server/src/main/resources/SGETemplate.xslt  |   4 +-
 .../airavata/gfac/core/cpi/BetterGfacImpl.java  | 235 +++++++++++--------
 .../apache/airavata/gfac/core/cpi/GFacImpl.java |  21 +-
 .../gfac/gsissh/util/GFACGSISSHUtils.java       |   2 +-
 .../gsissh/src/main/resources/SGETemplate.xslt  |   4 +-
 7 files changed, 369 insertions(+), 144 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/c29a6a9b/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperimentUS3.java
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperimentUS3.java b/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperimentUS3.java
index 25f27dc..b8aebc8 100644
--- a/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperimentUS3.java
+++ b/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperimentUS3.java
@@ -60,7 +60,9 @@ public class CreateLaunchExperimentUS3 {
             System.out.println("API version is " + airavata.getAPIVersion());
 //            addDescriptors();
 //            final String expId = createUS3ExperimentForTrestles(airavata);
-            final String expId = createUS3ExperimentForStampede(airavata);
+//            final String expId = createUS3ExperimentForStampede(airavata);
+//            final String expId = createUS3ExperimentForLonestar(airavata);
+            final String expId =  createUS3ExperimentForAlamo(airavata);
             System.out.println("Experiment ID : " + expId);
             launchExperiment(airavata, expId);
             System.out.println("Launched successfully");
@@ -200,8 +202,8 @@ public class CreateLaunchExperimentUS3 {
             List<DataObjectType> exOut = new ArrayList<DataObjectType>();
             DataObjectType output = new DataObjectType();
             output.setKey("output");
-//           output.setType(DataType.URI);
-           output.setValue("");
+            output.setType(DataType.URI);
+            output.setValue("");
 //            DataObjectType output1 = new DataObjectType();
 //            output1.setKey("stdout");
 //            output1.setType(DataType.STDOUT);
@@ -217,13 +219,13 @@ public class CreateLaunchExperimentUS3 {
            // Project project = ProjectModelUtil.createProject("ultrascan", "ultrascan", "test project");
             //String projectId = client.createProject(project);
 
-            Experiment simpleExperiment = ExperimentModelUtil.createSimpleExperiment("ultrascan_41574ef5-b054-4d03-ab20-2cfe768d5096", "ultrascan", "US3ExperimentTrestles", "US3AppTrestles", "ultrascan_68d397d9-ffc2-470e-bdf7-8d7b4f1cab2e", exInputs);
+            Experiment simpleExperiment = ExperimentModelUtil.createSimpleExperiment("ultrascan_41574ef5-b054-4d03-ab20-2cfe768d5096", "ultrascan", "US3ExperimentTrestles", "US3AppTrestles", "ultrascan_e76ab5cf-79f6-44df-a244-10a734183fec", exInputs);
             simpleExperiment.setExperimentOutputs(exOut);
 
-            ComputationalResourceScheduling scheduling = ExperimentModelUtil.createComputationResourceScheduling("trestles.sdsc.xsede.org_fcf21cd7-d8ce-4359-bd7e-49062ce80265", 32, 2, 0, "shared", 30, 0, 0, "uot111");
+            ComputationalResourceScheduling scheduling = ExperimentModelUtil.createComputationResourceScheduling("trestles.sdsc.xsede.org_1ccc526f-ab74-4a5a-970a-c464cb9def5a", 32, 2, 0, "shared", 30, 0, 0, "uot111");
             UserConfigurationData userConfigurationData = new UserConfigurationData();
             
-            scheduling.setResourceHostId("trestles.sdsc.xsede.org_fcf21cd7-d8ce-4359-bd7e-49062ce80265");
+            scheduling.setResourceHostId("trestles.sdsc.xsede.org_1ccc526f-ab74-4a5a-970a-c464cb9def5a");
             userConfigurationData.setAiravataAutoSchedule(false);
             userConfigurationData.setOverrideManualScheduledParams(false);
         
@@ -277,12 +279,134 @@ public class CreateLaunchExperimentUS3 {
 //            Project project = ProjectModelUtil.createProject("project1", "admin", "test project");
 //            String projectId = client.createProject(project);
 
-            Experiment simpleExperiment = ExperimentModelUtil.createSimpleExperiment("ultrascan_41574ef5-b054-4d03-ab20-2cfe768d5096", "ultrascan", "US3ExperimentStampede", "US3AppStampede", "ultrascan_68d397d9-ffc2-470e-bdf7-8d7b4f1cab2e", exInputs);
+            Experiment simpleExperiment = ExperimentModelUtil.createSimpleExperiment("ultrascan_41574ef5-b054-4d03-ab20-2cfe768d5096", "ultrascan", "US3ExperimentStampede", "US3AppStampede", "ultrascan_e76ab5cf-79f6-44df-a244-10a734183fec", exInputs);
+            simpleExperiment.setExperimentOutputs(exOut);
+
+            ComputationalResourceScheduling scheduling = ExperimentModelUtil.createComputationResourceScheduling("stampede.tacc.xsede.org_af57850b-103b-49a1-aab2-27cb070d3bd9", 16, 2, 0, "normal", 30, 0, 0, "TG-MCB070039N");
+
+            scheduling.setResourceHostId("stampede.tacc.xsede.org_af57850b-103b-49a1-aab2-27cb070d3bd9");
+            UserConfigurationData userConfigurationData = new UserConfigurationData();
+           
+            userConfigurationData.setAiravataAutoSchedule(false);
+            userConfigurationData.setOverrideManualScheduledParams(false);
+            userConfigurationData.setComputationalResourceScheduling(scheduling);
+        
+            AdvancedOutputDataHandling dataHandling = new AdvancedOutputDataHandling();
+            dataHandling.setOutputDataDir("/home/airavata/output/");
+            userConfigurationData.setAdvanceOutputDataHandling(dataHandling);
+        
+            simpleExperiment.setUserConfigurationData(userConfigurationData);
+            return client.createExperiment(simpleExperiment);
+        } catch (AiravataSystemException e) {
+            logger.error("Error occured while creating the experiment...", e.getMessage());
+            throw new AiravataSystemException(e);
+        } catch (InvalidRequestException e) {
+            logger.error("Error occured while creating the experiment...", e.getMessage());
+            throw new InvalidRequestException(e);
+        } catch (AiravataClientException e) {
+            logger.error("Error occured while creating the experiment...", e.getMessage());
+            throw new AiravataClientException(e);
+        }catch (TException e) {
+            logger.error("Error occured while creating the experiment...", e.getMessage());
+            throw new TException(e);
+        }
+    }
+    public static String createUS3ExperimentForLonestar (Airavata.Client client) throws AiravataSystemException, InvalidRequestException, AiravataClientException, TException  {
+        try{
+            List<DataObjectType> exInputs = new ArrayList<DataObjectType>();
+            DataObjectType input = new DataObjectType();
+            input.setKey("input");
+            input.setType(DataType.URI);
+            input.setValue("file:///home/airavata/input/hpcinput.tar");
+            exInputs.add(input);
+
+            List<DataObjectType> exOut = new ArrayList<DataObjectType>();
+            DataObjectType output = new DataObjectType();
+            output.setKey("output");
+            output.setType(DataType.URI);
+            output.setValue("");
+//            DataObjectType output1 = new DataObjectType();
+//            output1.setKey("stdout");
+//            output1.setType(DataType.STDOUT);
+//            output1.setValue("");
+//            DataObjectType output2 = new DataObjectType();
+//            output2.setKey("stderr");
+//            output2.setType(DataType.STDERR);
+//            output2.setValue("");
+            exOut.add(output);
+//            exOut.add(output1);
+//            exOut.add(output2);
+
+//            Project project = ProjectModelUtil.createProject("project1", "admin", "test project");
+//            String projectId = client.createProject(project);
+
+            Experiment simpleExperiment = ExperimentModelUtil.createSimpleExperiment("ultrascan_41574ef5-b054-4d03-ab20-2cfe768d5096", "ultrascan", "US3ExperimentLonestar", "US3AppLonestar", "ultrascan_e76ab5cf-79f6-44df-a244-10a734183fec", exInputs);
+            simpleExperiment.setExperimentOutputs(exOut);
+
+            ComputationalResourceScheduling scheduling = ExperimentModelUtil.createComputationResourceScheduling("lonestar.tacc.teragrid.org_2e0273bc-324b-419b-9786-38a360d44772", 12, 2, 0, "normal", 30, 0, 0, "ULTRASCAN");
+
+            scheduling.setResourceHostId("lonestar.tacc.teragrid.org_2e0273bc-324b-419b-9786-38a360d44772");
+            UserConfigurationData userConfigurationData = new UserConfigurationData();
+           
+            userConfigurationData.setAiravataAutoSchedule(false);
+            userConfigurationData.setOverrideManualScheduledParams(false);
+            userConfigurationData.setComputationalResourceScheduling(scheduling);
+        
+            AdvancedOutputDataHandling dataHandling = new AdvancedOutputDataHandling();
+            dataHandling.setOutputDataDir("/home/airavata/output/");
+            userConfigurationData.setAdvanceOutputDataHandling(dataHandling);
+        
+            simpleExperiment.setUserConfigurationData(userConfigurationData);
+            return client.createExperiment(simpleExperiment);
+        } catch (AiravataSystemException e) {
+            logger.error("Error occured while creating the experiment...", e.getMessage());
+            throw new AiravataSystemException(e);
+        } catch (InvalidRequestException e) {
+            logger.error("Error occured while creating the experiment...", e.getMessage());
+            throw new InvalidRequestException(e);
+        } catch (AiravataClientException e) {
+            logger.error("Error occured while creating the experiment...", e.getMessage());
+            throw new AiravataClientException(e);
+        }catch (TException e) {
+            logger.error("Error occured while creating the experiment...", e.getMessage());
+            throw new TException(e);
+        }
+    }
+    public static String createUS3ExperimentForAlamo (Airavata.Client client) throws AiravataSystemException, InvalidRequestException, AiravataClientException, TException  {
+        try{
+            List<DataObjectType> exInputs = new ArrayList<DataObjectType>();
+            DataObjectType input = new DataObjectType();
+            input.setKey("input");
+            input.setType(DataType.URI);
+            input.setValue("file:///home/airavata/input/hpcinput.tar");
+            exInputs.add(input);
+
+            List<DataObjectType> exOut = new ArrayList<DataObjectType>();
+            DataObjectType output = new DataObjectType();
+            output.setKey("output");
+            output.setType(DataType.URI);
+            output.setValue("");
+//            DataObjectType output1 = new DataObjectType();
+//            output1.setKey("stdout");
+//            output1.setType(DataType.STDOUT);
+//            output1.setValue("");
+//            DataObjectType output2 = new DataObjectType();
+//            output2.setKey("stderr");
+//            output2.setType(DataType.STDERR);
+//            output2.setValue("");
+            exOut.add(output);
+//            exOut.add(output1);
+//            exOut.add(output2);
+
+//            Project project = ProjectModelUtil.createProject("project1", "admin", "test project");
+//            String projectId = client.createProject(project);
+
+            Experiment simpleExperiment = ExperimentModelUtil.createSimpleExperiment("ultrascan_41574ef5-b054-4d03-ab20-2cfe768d5096", "ultrascan", "US3ExperimentStampede", "US3AppStampede", "ultrascan_e76ab5cf-79f6-44df-a244-10a734183fec", exInputs);
             simpleExperiment.setExperimentOutputs(exOut);
 
-            ComputationalResourceScheduling scheduling = ExperimentModelUtil.createComputationResourceScheduling("stampede.tacc.xsede.org_7e291aa8-319a-4c70-a3b5-b2e6f91c8f5d", 16, 2, 0, "normal", 30, 0, 0, "TG-MCB070039N");
+            ComputationalResourceScheduling scheduling = ExperimentModelUtil.createComputationResourceScheduling("alamo.uthscsa.edu_7b6cf99a-af2e-4e8b-9eff-998a5ef60fe5", 4, 2, 0, "default", 30, 0, 0, null);
 
-            scheduling.setResourceHostId("stampede.tacc.xsede.org_7e291aa8-319a-4c70-a3b5-b2e6f91c8f5d");
+            scheduling.setResourceHostId("alamo.uthscsa.edu_7b6cf99a-af2e-4e8b-9eff-998a5ef60fe5");
             UserConfigurationData userConfigurationData = new UserConfigurationData();
            
             userConfigurationData.setAiravataAutoSchedule(false);

http://git-wip-us.apache.org/repos/asf/airavata/blob/c29a6a9b/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/tools/RegisterUS3Application.java
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/tools/RegisterUS3Application.java b/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/tools/RegisterUS3Application.java
index 1339e29..e98aa07 100644
--- a/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/tools/RegisterUS3Application.java
+++ b/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/tools/RegisterUS3Application.java
@@ -1,5 +1,7 @@
 package org.apache.airavata.client.tools;
 
+import java.math.BigInteger;
+import java.security.SecureRandom;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
@@ -35,13 +37,13 @@ public class RegisterUS3Application {
     private static Airavata.Client airavataClient;
 
     //Host Id's
-    private static String stampedeResourceId = "stampede.tacc.xsede.org_7e291aa8-319a-4c70-a3b5-b2e6f91c8f5d";
-    private static String trestlesResourceId = "trestles.sdsc.xsede.org_fcf21cd7-d8ce-4359-bd7e-49062ce80265";
-    private static String lonestarResourceId = "lonestar.tacc.teragrid.org_fe9b698a-4f68-459b-acc4-09c457a769b2";
-    private static String alamoResourceId = "alamo.uthscsa.edu_2ff1f99d-565a-48c6-9f7d-a2dc5019d238";
+    private static String stampedeResourceId = "stampede.tacc.xsede.org_af57850b-103b-49a1-aab2-27cb070d3bd9";
+    private static String trestlesResourceId = "trestles.sdsc.xsede.org_1ccc526f-ab74-4a5a-970a-c464cb9def5a";
+    private static String lonestarResourceId = "lonestar.tacc.teragrid.org_2e0273bc-324b-419b-9786-38a360d44772";
+    private static String alamoResourceId = "alamo.uthscsa.edu_7b6cf99a-af2e-4e8b-9eff-998a5ef60fe5";
     
-    private static String ultrascanModuleId = "ultrascan_c393abae-f75d-442f-8ad3-f0b59376131a";
-    private static String ultrascanAppId = "ultrascan_68d397d9-ffc2-470e-bdf7-8d7b4f1cab2e";
+    private static String ultrascanModuleId = "ultrascan_f8e80bc9-c0da-48d8-bb2a-30fe7e3fbab6";
+    private static String ultrascanAppId = "ultrascan_e76ab5cf-79f6-44df-a244-10a734183fec";
     
     public static void main(String[] args) {
         try {
@@ -49,18 +51,23 @@ public class RegisterUS3Application {
 			System.out.println("API version is " + airavataClient.getAPIVersion());
 
 			//Register all compute hosts
-//			registerXSEDEHosts();
+			registerXSEDEHosts();
 			
+			// Register Gateway Details
+			registerGatewayResourceProfile();
+		
 			//Register module
-//			registerAppModules();
+			registerAppModules();
 			
 			//Register Application
-//			registerUltrascanInterface();
+			registerUltrascanInterface();
 			
 			//Register Deployment 
-//			registerApplicationDeployment();
-			registerGatewayResourceProfile();
-			
+			registerApplicationDeployment();
+	
+			// update 
+//			updateXSEDEHosts();
+
         } catch (Exception e) {
             logger.error("Error while connecting with server", e.getMessage());
             e.printStackTrace();
@@ -73,22 +80,51 @@ public class RegisterUS3Application {
 
             //Register Stampede
             stampedeResourceId = registerComputeHost("stampede.tacc.xsede.org", "TACC Stampede Cluster",
-                    ResourceJobManagerType.SLURM, "push", "/usr/bin", SecurityProtocol.GSI, 2222, null);
+                    ResourceJobManagerType.SLURM, "push", "/usr/bin", SecurityProtocol.GSI, 2222, "/usr/local/bin/ibrun");
             System.out.println("Stampede Resource Id is " + stampedeResourceId);
 
             //Register Trestles
             trestlesResourceId = registerComputeHost("trestles.sdsc.xsede.org", "SDSC Trestles Cluster",
-                    ResourceJobManagerType.PBS, "push", "/opt/torque/bin/", SecurityProtocol.GSI, 22, null);
+                    ResourceJobManagerType.PBS, "push", "/opt/torque/bin/", SecurityProtocol.GSI, 22, "/opt/mvapich2/pgi/ib/bin/mpiexec -np");
             System.out.println("Trestles Resource Id is " + trestlesResourceId);
             
             //Register Lonestar
             lonestarResourceId = registerComputeHost("lonestar.tacc.teragrid.org", "TACC Lonestar Cluster",
-                    ResourceJobManagerType.UGE, "push", "/opt/torque/bin/", SecurityProtocol.GSI, 22, null);
+                    ResourceJobManagerType.UGE, "push", "/opt/sge6.2/bin/lx24-amd64", SecurityProtocol.GSI, 22, "/sge_common/default/pe_scripts/ibrun");
             System.out.println("Lonestar Resource Id is " + lonestarResourceId);
 
             //Register Alamo
             alamoResourceId = registerComputeHost("alamo.uthscsa.edu", "Alamo Cluster",
-                    ResourceJobManagerType.PBS, "push", "/opt/torque/bin/", SecurityProtocol.SSH_KEYS, 22, null);
+                    ResourceJobManagerType.PBS, "push", "/opt/torque/bin/", SecurityProtocol.SSH_KEYS, 22, "/share/apps/openmpi/bin/mpiexec - n" );
+            System.out.println("Alamo Cluster " + alamoResourceId);
+
+        } catch (TException e) {
+            e.printStackTrace();
+        }
+
+    }
+    public static void updateXSEDEHosts() {
+        try {
+            System.out.println("\n #### Registering XSEDE Computational Resources #### \n");
+
+            //Register Stampede
+            stampedeResourceId = updateComputeHost( stampedeResourceId,"stampede.tacc.xsede.org", "TACC Stampede Cluster",
+                    ResourceJobManagerType.SLURM, "push", "/usr/bin", SecurityProtocol.GSI, 2222, "/usr/local/bin/ibrun");
+            System.out.println("Stampede Resource Id is " + stampedeResourceId);
+
+            //Register Trestles
+            trestlesResourceId = updateComputeHost(trestlesResourceId,"trestles.sdsc.xsede.org", "SDSC Trestles Cluster",
+                    ResourceJobManagerType.PBS, "push", "/opt/torque/bin/", SecurityProtocol.GSI, 22, "/opt/mvapich2/pgi/ib/bin/mpiexec -np");
+            System.out.println("Trestles Resource Id is " + trestlesResourceId);
+            
+            //Register Lonestar
+            lonestarResourceId = updateComputeHost(lonestarResourceId,"lonestar.tacc.teragrid.org", "TACC Lonestar Cluster",
+                    ResourceJobManagerType.UGE, "push", "/opt/sge6.2/bin/lx24-amd64", SecurityProtocol.GSI, 22, "/sge_common/default/pe_scripts/ibrun");
+            System.out.println("Lonestar Resource Id is " + lonestarResourceId);
+
+            //Register Alamo
+            alamoResourceId = updateComputeHost(alamoResourceId,"alamo.uthscsa.edu", "Alamo Cluster",
+                    ResourceJobManagerType.PBS, "push", "/opt/torque/bin/", SecurityProtocol.SSH_KEYS, 22, "/share/apps/openmpi/bin/mpiexec -n");
             System.out.println("Alamo Cluster " + alamoResourceId);
 
         } catch (TException e) {
@@ -184,7 +220,7 @@ public class RegisterUS3Application {
                             "/scratch/01623/us3/airavata/");
             
             ComputeResourcePreference alamoResourcePreferences = RegisterSampleApplicationsUtils.
-                    createComputeResourcePreference(lonestarResourceId, null, false, null, null, null,
+                    createComputeResourcePreference(alamoResourceId, null, false, null, null, null,
                             "/mnt/glusterfs/work/");
 
             GatewayResourceProfile gatewayResourceProfile = new GatewayResourceProfile();
@@ -221,7 +257,6 @@ public class RegisterUS3Application {
 			jobManagerCommandStringMap.put(JobManagerCommand.SUBMISSION, jobManagerCommand);
 			resourceJobManager.setJobManagerCommands(jobManagerCommandStringMap);
 		}
-
 		SSHJobSubmission sshJobSubmission = new SSHJobSubmission();
 		sshJobSubmission.setResourceJobManager(resourceJobManager);
 		sshJobSubmission.setSecurityProtocol(securityProtocol);
@@ -241,4 +276,38 @@ public class RegisterUS3Application {
 
 		return computeResourceId;
 	}
+	public static String updateComputeHost( String computeResourceId, String hostName, String hostDesc, ResourceJobManagerType resourceJobManagerType, String monitoringEndPoint,
+			String jobMangerBinPath, SecurityProtocol securityProtocol, int portNumber, String jobManagerCommand) throws TException {
+
+		if (computeResourceId.isEmpty())
+			throw new AiravataClientException();
+
+		ResourceJobManager resourceJobManager = RegisterSampleApplicationsUtils.createResourceJobManager(resourceJobManagerType, monitoringEndPoint,
+				jobMangerBinPath, null);
+
+		if (jobManagerCommand != null) {
+			Map<JobManagerCommand, String> jobManagerCommandStringMap = new HashMap<JobManagerCommand, String>();
+			jobManagerCommandStringMap.put(JobManagerCommand.SUBMISSION, jobManagerCommand);
+			resourceJobManager.setJobManagerCommands(jobManagerCommandStringMap);
+		}
+		SSHJobSubmission sshJobSubmission = new SSHJobSubmission();
+		sshJobSubmission.setResourceJobManager(resourceJobManager);
+		sshJobSubmission.setSecurityProtocol(securityProtocol);
+		sshJobSubmission.setSshPort(portNumber);
+		boolean sshAddStatus = airavataClient.addSSHJobSubmissionDetails(computeResourceId, 1, sshJobSubmission);
+		ComputeResourceDescription computeResourceDescription = airavataClient.getComputeResource(computeResourceId);
+		computeResourceDescription.getJobSubmissionInterfacesIterator();
+		if (!sshAddStatus)
+			throw new AiravataClientException();
+
+		SCPDataMovement scpDataMovement = new SCPDataMovement();
+		scpDataMovement.setSecurityProtocol(securityProtocol);
+		scpDataMovement.setSshPort(portNumber);
+		boolean scpAddStatus = airavataClient.addSCPDataMovementDetails(computeResourceId, 1, scpDataMovement);
+
+		if (!scpAddStatus)
+			throw new AiravataClientException();
+
+		return computeResourceId;
+	}
 }

http://git-wip-us.apache.org/repos/asf/airavata/blob/c29a6a9b/modules/configuration/server/src/main/resources/SGETemplate.xslt
----------------------------------------------------------------------
diff --git a/modules/configuration/server/src/main/resources/SGETemplate.xslt b/modules/configuration/server/src/main/resources/SGETemplate.xslt
index 90fbf01..3b8557c 100644
--- a/modules/configuration/server/src/main/resources/SGETemplate.xslt
+++ b/modules/configuration/server/src/main/resources/SGETemplate.xslt
@@ -8,7 +8,7 @@
 <xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform" xmlns:ns="http://airavata.apache.org/gsi/ssh/2012/12">
 <xsl:output method="text" />
 <xsl:template match="/ns:JobDescriptor">
-#! /bin/sh
+#! /bin/bash
 # Grid Engine batch job script built by Apache Airavata
 #   <xsl:choose>
     <xsl:when test="ns:shellName">
@@ -52,7 +52,7 @@
     </xsl:choose>
     <xsl:choose>
     <xsl:when test="(ns:nodes) and (ns:processesPerNode)">
-#$ -pe <xsl:value-of select="ns:nodes"/>way 12*<xsl:value-of select="ns:processesPerNode"/>
+#$ -pe <xsl:value-of select="ns:processesPerNode"/>way <xsl:value-of select="12 * ns:nodes"/>
 <xsl:text>&#xa;</xsl:text>
     </xsl:when>
     </xsl:choose>

http://git-wip-us.apache.org/repos/asf/airavata/blob/c29a6a9b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
index 30e5c0a..86d1960 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
@@ -53,9 +53,11 @@ import org.apache.airavata.gfac.core.handler.GFacHandlerConfig;
 import org.apache.airavata.gfac.core.handler.GFacHandlerException;
 import org.apache.airavata.gfac.core.handler.GFacRecoverableHandler;
 import org.apache.airavata.gfac.core.handler.ThreadedHandler;
+import org.apache.airavata.gfac.core.monitor.ExperimentIdentity;
 import org.apache.airavata.gfac.core.monitor.JobIdentity;
 import org.apache.airavata.gfac.core.monitor.MonitorID;
 import org.apache.airavata.gfac.core.monitor.TaskIdentity;
+import org.apache.airavata.gfac.core.monitor.state.ExperimentStatusChangedEvent;
 import org.apache.airavata.gfac.core.monitor.state.GfacExperimentStateChangeRequest;
 import org.apache.airavata.gfac.core.monitor.state.JobStatusChangeRequest;
 import org.apache.airavata.gfac.core.monitor.state.TaskStatusChangeRequest;
@@ -83,6 +85,7 @@ import org.apache.airavata.model.appcatalog.gatewayprofile.ComputeResourcePrefer
 import org.apache.airavata.model.workspace.experiment.ComputationalResourceScheduling;
 import org.apache.airavata.model.workspace.experiment.DataObjectType;
 import org.apache.airavata.model.workspace.experiment.Experiment;
+import org.apache.airavata.model.workspace.experiment.ExperimentState;
 import org.apache.airavata.model.workspace.experiment.JobState;
 import org.apache.airavata.model.workspace.experiment.TaskDetails;
 import org.apache.airavata.model.workspace.experiment.TaskState;
@@ -525,108 +528,136 @@ public class BetterGfacImpl implements GFac {
         return true;
     }
 
-    private void reLaunch(JobExecutionContext jobExecutionContext, int stateVal) throws GFacException {
-        // Scheduler will decide the execution flow of handlers and provider which handles
-        // the job.
-        String experimentID = jobExecutionContext.getExperimentID();
-        try {
-            Scheduler.schedule(jobExecutionContext);
-
-            // Executing in handlers in the order as they have configured in GFac configuration
-            // here we do not skip handler if some handler does not have to be run again during re-run it can implement
-            // that logic in to the handler
-            reInvokeInFlowHandlers(jobExecutionContext);
-
-            // After executing the in handlers provider instance should be set to job execution context.
-            // We get the provider instance and execute it.
-            if (stateVal == 2 || stateVal == 3) {
-                invokeProvider(jobExecutionContext);     // provider never ran in previous invocation
-            } else if (stateVal == 4) {   // whether sync or async job have to invoke the recovering because it crashed in the Handler
-                reInvokeProvider(jobExecutionContext);
-            } else if (stateVal >= 5 && GFacUtils.isSynchronousMode(jobExecutionContext)) {
-                // In this case we do nothing because provider ran successfully, no need to re-run the job
-                log.info("Provider does not have to be recovered because it ran successfully for experiment: " + experimentID);
-            } else if (stateVal == 5 && !GFacUtils.isSynchronousMode(jobExecutionContext)) {
-                // this is async mode where monitoring of jobs is hapenning, we have to recover
-                reInvokeProvider(jobExecutionContext);
-            } else if (stateVal == 6) {
-                reInvokeOutFlowHandlers(jobExecutionContext);
-            } else {
-                log.info("We skip invoking Handler, because the experiment:" + stateVal + " state is beyond the Provider Invocation !!!");
-                log.info("ExperimentId: " + experimentID + " taskId: " + jobExecutionContext.getTaskData().getTaskID());
-            }
-        } catch (Exception e) {
-            try {
-                // we make the experiment as failed due to exception scenario
-                monitorPublisher.publish(new GfacExperimentStateChangeRequest(new MonitorID(jobExecutionContext), GfacExperimentState.FAILED));
-//                monitorPublisher.publish(new
-//                        ExperimentStatusChangedEvent(new ExperimentIdentity(jobExecutionContext.getExperimentID()),
-//                        ExperimentState.FAILED));
-                // Updating the task status if there's any task associated
-//                monitorPublisher.publish(new TaskStatusChangedEvent(
-//                        new TaskIdentity(jobExecutionContext.getExperimentID(),
-//                                jobExecutionContext.getWorkflowNodeDetails().getNodeInstanceId(),
-//                                jobExecutionContext.getTaskData().getTaskID()), TaskState.FAILED
-//                ));
-                monitorPublisher.publish(new JobStatusChangeRequest(new MonitorID(jobExecutionContext),
-                        new JobIdentity(jobExecutionContext.getExperimentID(),
-                                jobExecutionContext.getWorkflowNodeDetails().getNodeInstanceId(),
-                                jobExecutionContext.getTaskData().getTaskID(), jobExecutionContext.getJobDetails().getJobID()), JobState.FAILED
-                ));
-            } catch (NullPointerException e1) {
-                log.error("Error occured during updating the statuses of Experiments,tasks or Job statuses to failed, " +
-                        "NullPointerException occurred because at this point there might not have Job Created", e1, e);
-            }
-            jobExecutionContext.setProperty(ERROR_SENT, "true");
-            jobExecutionContext.getNotifier().publish(new ExecutionFailEvent(e.getCause()));
-            throw new GFacException(e.getMessage(), e);
-        }
-    }
-
-    private void launch(JobExecutionContext jobExecutionContext) throws GFacException {
-        // Scheduler will decide the execution flow of handlers and provider which handles
-        // the job.
-        try {
-            Scheduler.schedule(jobExecutionContext);
-
-            // Executing in handlers in the order as they have configured in GFac configuration
-            // here we do not skip handler if some handler does not have to be run again during re-run it can implement
-            // that logic in to the handler
-            invokeInFlowHandlers(jobExecutionContext);               // to keep the consistency we always try to re-run to avoid complexity
-            //            if (experimentID != null){
-            //                registry2.changeStatus(jobExecutionContext.getExperimentID(),AiravataJobState.State.INHANDLERSDONE);
-            //            }
-
-            // After executing the in handlers provider instance should be set to job execution context.
-            // We get the provider instance and execute it.
-            invokeProvider(jobExecutionContext);
-        } catch (Exception e) {
-            try {
-                // we make the experiment as failed due to exception scenario
-                monitorPublisher.publish(new GfacExperimentStateChangeRequest(new MonitorID(jobExecutionContext), GfacExperimentState.FAILED));
-//                monitorPublisher.publish(new
-//                        ExperimentStatusChangedEvent(new ExperimentIdentity(jobExecutionContext.getExperimentID()),
-//                        ExperimentState.FAILED));
-                // Updating the task status if there's any task associated
-//                monitorPublisher.publish(new TaskStatusChangeRequest(
-//                        new TaskIdentity(jobExecutionContext.getExperimentID(),
-//                                jobExecutionContext.getWorkflowNodeDetails().getNodeInstanceId(),
-//                                jobExecutionContext.getTaskData().getTaskID()), TaskState.FAILED
-//                ));
-                monitorPublisher.publish(new JobStatusChangeRequest(new MonitorID(jobExecutionContext),
-                        new JobIdentity(jobExecutionContext.getExperimentID(),
-                                jobExecutionContext.getWorkflowNodeDetails().getNodeInstanceId(),
-                                jobExecutionContext.getTaskData().getTaskID(), jobExecutionContext.getJobDetails().getJobID()), JobState.FAILED
-                ));
-            } catch (NullPointerException e1) {
-                log.error("Error occured during updating the statuses of Experiments,tasks or Job statuses to failed, " +
-                        "NullPointerException occurred because at this point there might not have Job Created", e1, e);
-            }
-            jobExecutionContext.setProperty(ERROR_SENT, "true");
-            jobExecutionContext.getNotifier().publish(new ExecutionFailEvent(e.getCause()));
-            throw new GFacException(e.getMessage(), e);
-        }
-    }
+	private void reLaunch(JobExecutionContext jobExecutionContext, int stateVal) throws GFacException {
+		// Scheduler will decide the execution flow of handlers and provider
+		// which handles
+		// the job.
+		String experimentID = jobExecutionContext.getExperimentID();
+		try {
+			Scheduler.schedule(jobExecutionContext);
+
+			// Executing in handlers in the order as they have configured in
+			// GFac configuration
+			// here we do not skip handler if some handler does not have to be
+			// run again during re-run it can implement
+			// that logic in to the handler
+			reInvokeInFlowHandlers(jobExecutionContext);
+
+			// After executing the in handlers provider instance should be set
+			// to job execution context.
+			// We get the provider instance and execute it.
+			if (stateVal == 2 || stateVal == 3) {
+				invokeProvider(jobExecutionContext); // provider never ran in
+														// previous invocation
+			} else if (stateVal == 4) { // whether sync or async job have to
+										// invoke the recovering because it
+										// crashed in the Handler
+				reInvokeProvider(jobExecutionContext);
+			} else if (stateVal >= 5 && GFacUtils.isSynchronousMode(jobExecutionContext)) {
+				// In this case we do nothing because provider ran successfully,
+				// no need to re-run the job
+				log.info("Provider does not have to be recovered because it ran successfully for experiment: " + experimentID);
+			} else if (stateVal == 5 && !GFacUtils.isSynchronousMode(jobExecutionContext)) {
+				// this is async mode where monitoring of jobs is hapenning, we
+				// have to recover
+				reInvokeProvider(jobExecutionContext);
+			} else if (stateVal == 6) {
+				reInvokeOutFlowHandlers(jobExecutionContext);
+			} else {
+				log.info("We skip invoking Handler, because the experiment:" + stateVal + " state is beyond the Provider Invocation !!!");
+				log.info("ExperimentId: " + experimentID + " taskId: " + jobExecutionContext.getTaskData().getTaskID());
+			}
+		} catch (Exception e) {
+			try {
+				// we make the experiment as failed due to exception scenario
+				monitorPublisher.publish(new GfacExperimentStateChangeRequest(new MonitorID(jobExecutionContext), GfacExperimentState.FAILED));
+				// monitorPublisher.publish(new
+				// ExperimentStatusChangedEvent(new
+				// ExperimentIdentity(jobExecutionContext.getExperimentID()),
+				// ExperimentState.FAILED));
+				// Updating the task status if there's any task associated
+				// monitorPublisher.publish(new TaskStatusChangedEvent(
+				// new TaskIdentity(jobExecutionContext.getExperimentID(),
+				// jobExecutionContext.getWorkflowNodeDetails().getNodeInstanceId(),
+				// jobExecutionContext.getTaskData().getTaskID()),
+				// TaskState.FAILED
+				// ));
+				monitorPublisher.publish(new JobStatusChangeRequest(new MonitorID(jobExecutionContext), new JobIdentity(jobExecutionContext.getExperimentID(),
+						jobExecutionContext.getWorkflowNodeDetails().getNodeInstanceId(), jobExecutionContext.getTaskData().getTaskID(), jobExecutionContext
+								.getJobDetails().getJobID()), JobState.FAILED));
+			} catch (NullPointerException e1) {
+				log.error("Error occured during updating the statuses of Experiments,tasks or Job statuses to failed, "
+						+ "NullPointerException occurred because at this point there might not have Job Created", e1, e);
+				monitorPublisher
+						.publish(new ExperimentStatusChangedEvent(new ExperimentIdentity(jobExecutionContext.getExperimentID()), ExperimentState.FAILED));
+				// Updating the task status if there's any task associated
+				monitorPublisher.publish(new TaskStatusChangedEvent(new TaskIdentity(jobExecutionContext.getExperimentID(), jobExecutionContext
+						.getWorkflowNodeDetails().getNodeInstanceId(), jobExecutionContext.getTaskData().getTaskID()), TaskState.FAILED));
+
+			}
+			jobExecutionContext.setProperty(ERROR_SENT, "true");
+			jobExecutionContext.getNotifier().publish(new ExecutionFailEvent(e.getCause()));
+			throw new GFacException(e.getMessage(), e);
+		}
+	}
+
+	private void launch(JobExecutionContext jobExecutionContext) throws GFacException {
+		// Scheduler will decide the execution flow of handlers and provider
+		// which handles
+		// the job.
+		try {
+			Scheduler.schedule(jobExecutionContext);
+
+			// Executing in handlers in the order as they have configured in
+			// GFac configuration
+			// here we do not skip handler if some handler does not have to be
+			// run again during re-run it can implement
+			// that logic in to the handler
+			invokeInFlowHandlers(jobExecutionContext); // to keep the
+														// consistency we always
+														// try to re-run to
+														// avoid complexity
+			// if (experimentID != null){
+			// registry2.changeStatus(jobExecutionContext.getExperimentID(),AiravataJobState.State.INHANDLERSDONE);
+			// }
+
+			// After executing the in handlers provider instance should be set
+			// to job execution context.
+			// We get the provider instance and execute it.
+			invokeProvider(jobExecutionContext);
+		} catch (Exception e) {
+			try {
+				// we make the experiment as failed due to exception scenario
+				monitorPublisher.publish(new GfacExperimentStateChangeRequest(new MonitorID(jobExecutionContext), GfacExperimentState.FAILED));
+				// monitorPublisher.publish(new
+				// ExperimentStatusChangedEvent(new
+				// ExperimentIdentity(jobExecutionContext.getExperimentID()),
+				// ExperimentState.FAILED));
+				// Updating the task status if there's any task associated
+				// monitorPublisher.publish(new TaskStatusChangeRequest(
+				// new TaskIdentity(jobExecutionContext.getExperimentID(),
+				// jobExecutionContext.getWorkflowNodeDetails().getNodeInstanceId(),
+				// jobExecutionContext.getTaskData().getTaskID()),
+				// TaskState.FAILED
+				// ));
+				monitorPublisher.publish(new JobStatusChangeRequest(new MonitorID(jobExecutionContext), new JobIdentity(jobExecutionContext.getExperimentID(),
+						jobExecutionContext.getWorkflowNodeDetails().getNodeInstanceId(), jobExecutionContext.getTaskData().getTaskID(), jobExecutionContext
+								.getJobDetails().getJobID()), JobState.FAILED));
+			} catch (NullPointerException e1) {
+				log.error("Error occured during updating the statuses of Experiments,tasks or Job statuses to failed, "
+						+ "NullPointerException occurred because at this point there might not have Job Created", e1, e);
+				monitorPublisher
+						.publish(new ExperimentStatusChangedEvent(new ExperimentIdentity(jobExecutionContext.getExperimentID()), ExperimentState.FAILED));
+				// Updating the task status if there's any task associated
+				monitorPublisher.publish(new TaskStatusChangeRequest(new TaskIdentity(jobExecutionContext.getExperimentID(), jobExecutionContext
+						.getWorkflowNodeDetails().getNodeInstanceId(), jobExecutionContext.getTaskData().getTaskID()), TaskState.FAILED));
+
+			}
+			jobExecutionContext.setProperty(ERROR_SENT, "true");
+			jobExecutionContext.getNotifier().publish(new ExecutionFailEvent(e.getCause()));
+			throw new GFacException(e.getMessage(), e);
+		}
+	}
 
     private void invokeProvider(JobExecutionContext jobExecutionContext) throws GFacException, ApplicationSettingsException, InterruptedException, KeeperException {
         GFacProvider provider = jobExecutionContext.getProvider();

http://git-wip-us.apache.org/repos/asf/airavata/blob/c29a6a9b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/GFacImpl.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/GFacImpl.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/GFacImpl.java
index 83fb43a..5c8d16f 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/GFacImpl.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/GFacImpl.java
@@ -50,11 +50,14 @@ import org.apache.airavata.gfac.core.handler.GFacHandler;
 import org.apache.airavata.gfac.core.handler.GFacHandlerConfig;
 import org.apache.airavata.gfac.core.handler.GFacHandlerException;
 import org.apache.airavata.gfac.core.handler.ThreadedHandler;
+import org.apache.airavata.gfac.core.monitor.ExperimentIdentity;
 import org.apache.airavata.gfac.core.monitor.JobIdentity;
 import org.apache.airavata.gfac.core.monitor.MonitorID;
 import org.apache.airavata.gfac.core.monitor.TaskIdentity;
+import org.apache.airavata.gfac.core.monitor.state.ExperimentStatusChangedEvent;
 import org.apache.airavata.gfac.core.monitor.state.JobStatusChangeRequest;
 import org.apache.airavata.gfac.core.monitor.state.TaskStatusChangeRequest;
+import org.apache.airavata.gfac.core.monitor.state.TaskStatusChangedEvent;
 import org.apache.airavata.gfac.core.notification.events.ExecutionFailEvent;
 import org.apache.airavata.gfac.core.notification.listeners.LoggingListener;
 import org.apache.airavata.gfac.core.notification.listeners.WorkflowTrackingListener;
@@ -64,6 +67,7 @@ import org.apache.airavata.gfac.core.states.GfacExperimentState;
 import org.apache.airavata.gfac.core.utils.GFacUtils;
 import org.apache.airavata.model.workspace.experiment.DataObjectType;
 import org.apache.airavata.model.workspace.experiment.Experiment;
+import org.apache.airavata.model.workspace.experiment.ExperimentState;
 import org.apache.airavata.model.workspace.experiment.JobState;
 import org.apache.airavata.model.workspace.experiment.TaskDetails;
 import org.apache.airavata.model.workspace.experiment.TaskState;
@@ -327,16 +331,6 @@ public class GFacImpl implements GFac {
             }
         } catch (Exception e) {
             try {
-                // we make the experiment as failed due to exception scenario
-//                monitorPublisher.publish(new
-//                        ExperimentStatusChangedEvent(new ExperimentIdentity(jobExecutionContext.getExperimentID()),
-//                        ExperimentState.FAILED));
-                // Updating the task status if there's any task associated
-//                monitorPublisher.publish(new TaskStatusChangedEvent(
-//                        new TaskIdentity(jobExecutionContext.getExperimentID(),
-//                                jobExecutionContext.getWorkflowNodeDetails().getNodeInstanceId(),
-//                                jobExecutionContext.getTaskData().getTaskID()), TaskState.FAILED
-//                ));
                 monitorPublisher.publish(new JobStatusChangeRequest(new MonitorID(jobExecutionContext),
                         new JobIdentity(jobExecutionContext.getExperimentID(),
                         jobExecutionContext.getWorkflowNodeDetails().getNodeInstanceId(),
@@ -344,6 +338,13 @@ public class GFacImpl implements GFac {
             } catch (NullPointerException e1) {
                 log.error("Error occured during updating the statuses of Experiments,tasks or Job statuses to failed, " +
                         "NullPointerException occurred because at this point there might not have Job Created", e1, e);
+                // Updating status if job id is not set
+				monitorPublisher
+						.publish(new ExperimentStatusChangedEvent(new ExperimentIdentity(jobExecutionContext.getExperimentID()), ExperimentState.FAILED));
+				// Updating the task status if there's any task associated
+				monitorPublisher.publish(new TaskStatusChangedEvent(new TaskIdentity(jobExecutionContext.getExperimentID(), jobExecutionContext
+						.getWorkflowNodeDetails().getNodeInstanceId(), jobExecutionContext.getTaskData().getTaskID()), TaskState.FAILED));
+
             }
             jobExecutionContext.setProperty(ERROR_SENT, "true");
             jobExecutionContext.getNotifier().publish(new ExecutionFailEvent(e.getCause()));

http://git-wip-us.apache.org/repos/asf/airavata/blob/c29a6a9b/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/util/GFACGSISSHUtils.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/util/GFACGSISSHUtils.java b/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/util/GFACGSISSHUtils.java
index e287e3a..5de902f 100644
--- a/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/util/GFACGSISSHUtils.java
+++ b/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/util/GFACGSISSHUtils.java
@@ -119,7 +119,7 @@ public class GFACGSISSHUtils {
         jobDescriptor.setStandardErrorFile(app.getStandardError());
         Random random = new Random();
         int i = random.nextInt(Integer.MAX_VALUE); // We always set the job name
-        jobDescriptor.setJobName(String.valueOf(i+99999999));
+        jobDescriptor.setJobName("A" + String.valueOf(i+99999999));
         jobDescriptor.setWorkingDirectory(app.getStaticWorkingDirectory());
 
         List<String> inputValues = new ArrayList<String>();

http://git-wip-us.apache.org/repos/asf/airavata/blob/c29a6a9b/tools/gsissh/src/main/resources/SGETemplate.xslt
----------------------------------------------------------------------
diff --git a/tools/gsissh/src/main/resources/SGETemplate.xslt b/tools/gsissh/src/main/resources/SGETemplate.xslt
index 90fbf01..3b8557c 100644
--- a/tools/gsissh/src/main/resources/SGETemplate.xslt
+++ b/tools/gsissh/src/main/resources/SGETemplate.xslt
@@ -8,7 +8,7 @@
 <xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform" xmlns:ns="http://airavata.apache.org/gsi/ssh/2012/12">
 <xsl:output method="text" />
 <xsl:template match="/ns:JobDescriptor">
-#! /bin/sh
+#! /bin/bash
 # Grid Engine batch job script built by Apache Airavata
 #   <xsl:choose>
     <xsl:when test="ns:shellName">
@@ -52,7 +52,7 @@
     </xsl:choose>
     <xsl:choose>
     <xsl:when test="(ns:nodes) and (ns:processesPerNode)">
-#$ -pe <xsl:value-of select="ns:nodes"/>way 12*<xsl:value-of select="ns:processesPerNode"/>
+#$ -pe <xsl:value-of select="ns:processesPerNode"/>way <xsl:value-of select="12 * ns:nodes"/>
 <xsl:text>&#xa;</xsl:text>
     </xsl:when>
     </xsl:choose>


[2/2] git commit: Merge branch 'master' of https://git-wip-us.apache.org/repos/asf/airavata

Posted by ra...@apache.org.
Merge branch 'master' of https://git-wip-us.apache.org/repos/asf/airavata


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/d6ffefa8
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/d6ffefa8
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/d6ffefa8

Branch: refs/heads/master
Commit: d6ffefa815ccfc3598d2eca9f7ec5c3f461b293d
Parents: c29a6a9 7536fb6
Author: raminder <ra...@apache.org>
Authored: Thu Aug 7 16:32:44 2014 -0400
Committer: raminder <ra...@apache.org>
Committed: Thu Aug 7 16:32:44 2014 -0400

----------------------------------------------------------------------
 .../airavata/api/server/WorkflowServer.java     |   4 +-
 .../server/handler/WorkflowServerHandler.java   |  18 +-
 .../AiravataExperimentStatusUpdator.java        | 116 +++++++
 .../listener/ExperimentStatusChangedEvent.java  |  64 ++++
 .../util/AiravataExperimentStatusUpdator.java   | 117 -------
 .../airavata/api/server/util/Constants.java     |   4 +-
 .../api/server/util/DataModelUtils.java         |   3 -
 .../client/samples/CreateLaunchExperiment.java  |   4 +-
 .../org/airavata/appcatalog/cpi/AppCatalog.java |   7 +
 .../appcatalog/cpi/WorkflowCatalog.java         |  20 ++
 .../catalog/data/impl/AppCatalogImpl.java       |   5 +
 .../catalog/data/impl/WorkflowCatalogImpl.java  | 137 ++++++++
 .../catalog/data/model/Workflow.java            |  80 +++++
 .../data/resources/AbstractResource.java        |  11 +-
 .../data/resources/WorkflowResource.java        | 336 +++++++++++++++++++
 .../catalog/data/util/AppCatalogJPAUtils.java   | 101 ++----
 .../data/util/AppCatalogResourceType.java       |   3 +-
 .../data/util/AppCatalogThriftConversion.java   |   9 +
 .../src/main/resources/META-INF/persistence.xml |   1 +
 .../src/main/resources/appcatalog-derby.sql     |   9 +
 .../src/main/resources/appcatalog-mysql.sql     |   9 +
 .../src/test/resources/appcatalog-derby.sql     |   9 +
 modules/app-catalog/pom.xml                     |   1 -
 .../common/utils/AbstractActivityListener.java  |  27 --
 .../listener/AbstractActivityListener.java      |  27 ++
 .../listener/AbstractStateChangeRequest.java    |  27 ++
 .../common/utils/listener/PublisherMessage.java |  26 ++
 .../main/resources/airavata-server.properties   |   6 +-
 .../airavata/gfac/core/cpi/BetterGfacImpl.java  |   2 +-
 .../apache/airavata/gfac/core/cpi/GFacImpl.java |   2 +-
 .../core/monitor/AbstractActivityListener.java  |  27 --
 .../AiravataExperimentStatusUpdator.java        | 109 ------
 .../core/monitor/AiravataJobStatusUpdator.java  |   2 +-
 .../core/monitor/AiravataTaskStatusUpdator.java |   2 +-
 .../AiravataWorkflowNodeStatusUpdator.java      |   2 +-
 .../core/monitor/GfacInternalStatusUpdator.java |   2 +-
 .../state/AbstractStateChangeRequest.java       |  27 --
 .../state/ExperimentStatusChangedEvent.java     |  63 ----
 .../monitor/state/JobStatusChangeRequest.java   |   1 +
 .../monitor/state/JobStatusChangedEvent.java    |   1 +
 .../state/TaskOutputDataChangedEvent.java       |  64 ++++
 .../monitor/state/TaskStatusChangeRequest.java  |   1 +
 .../monitor/state/TaskStatusChangedEvent.java   |   1 +
 .../state/WorkflowNodeStatusChangedEvent.java   |   1 +
 .../gfac/local/provider/impl/LocalProvider.java |  22 +-
 .../main/java/generators/JPAClassGenerator.java |   2 +-
 .../src/main/java/generators/SQLGenerator.java  |   2 +-
 modules/workflow-model/workflow-engine/pom.xml  |  10 +
 .../workflow/catalog/WorkflowCatalog.java       |  53 ---
 .../catalog/WorkflowCatalogException.java       |  17 -
 .../catalog/WorkflowCatalogFactory.java         |  24 +-
 .../workflow/catalog/WorkflowCatalogImpl.java   | 167 ---------
 .../workflow/engine/WorkflowEngineImpl.java     |   2 +-
 .../airavata/workflow/engine/WorkflowUtils.java |   3 -
 .../engine/interpretor/WorkflowInterpreter.java |  57 ++--
 .../engine/util/ProxyMonitorPublisher.java      |   2 +-
 .../RegistryWorkflowPublisherWindow.java        |   2 -
 57 files changed, 1096 insertions(+), 755 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/d6ffefa8/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/airavata/blob/d6ffefa8/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/GFacImpl.java
----------------------------------------------------------------------