You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airavata.apache.org by ch...@apache.org on 2014/11/11 21:12:05 UTC

[01/50] [abbrv] airavata git commit: adding util methods to get job submission

Repository: airavata
Updated Branches:
  refs/heads/master f7de359dc -> c36ab2419


adding util methods to get job submission


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/14bd9416
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/14bd9416
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/14bd9416

Branch: refs/heads/master
Commit: 14bd9416af4ced1702086a43f82dacc35b4b8a46
Parents: add6b58
Author: chathuriw <ka...@gmail.com>
Authored: Thu Oct 30 17:06:06 2014 -0400
Committer: Chathuri Wimalasena <ka...@gmail.com>
Committed: Thu Oct 30 17:06:06 2014 -0400

----------------------------------------------------------------------
 .../org/apache/airavata/gfac/Scheduler.java     |  4 +--
 .../core/handler/AppDescriptorCheckHandler.java |  6 ++--
 .../airavata/gfac/core/utils/GFacUtils.java     | 38 ++++++++++++++++++++
 3 files changed, 43 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/14bd9416/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/Scheduler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/Scheduler.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/Scheduler.java
index 8f5847f..9e642fe 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/Scheduler.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/Scheduler.java
@@ -109,7 +109,7 @@ public class Scheduler {
             if (provider == null) {
 
                 List<JobSubmissionInterface> jobSubmissionInterfaces = jobExecutionContext.getApplicationContext().getComputeResourceDescription().getJobSubmissionInterfaces();
-                String hostClass = jobExecutionContext.getPrefferedJobSubmissionProtocal();
+                String hostClass = jobExecutionContext.getPreferredJobSubmissionProtocol().toString();
                 providerClassName = GFacConfiguration.getAttributeValue(GFacConfiguration.getHandlerDoc(), Constants.XPATH_EXPR_PROVIDER_ON_HOST + hostClass + "']", Constants.GFAC_CONFIG_CLASS_ATTRIBUTE);
                 Class<? extends GFacProvider> aClass1 = Class.forName(providerClassName).asSubclass(GFacProvider.class);
                 provider = aClass1.newInstance();
@@ -162,7 +162,7 @@ public class Scheduler {
             // This should be have a single element only.
 
             if (executionMode == null || "".equals(executionMode)) {
-                String hostClass = jobExecutionContext.getPrefferedJobSubmissionProtocal();
+                String hostClass = jobExecutionContext.getPreferredJobSubmissionProtocol().toString();
                 executionMode = GFacConfiguration.getAttributeValue(GFacConfiguration.getHandlerDoc(), Constants.XPATH_EXPR_PROVIDER_ON_HOST + hostClass + "']", Constants.GFAC_CONFIG_EXECUTION_MODE_ATTRIBUTE);
             }
         } catch (XPathExpressionException e) {

http://git-wip-us.apache.org/repos/asf/airavata/blob/14bd9416/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/handler/AppDescriptorCheckHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/handler/AppDescriptorCheckHandler.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/handler/AppDescriptorCheckHandler.java
index 4627bf5..72a8f1f 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/handler/AppDescriptorCheckHandler.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/handler/AppDescriptorCheckHandler.java
@@ -53,7 +53,7 @@ public class AppDescriptorCheckHandler implements GFacRecoverableHandler {
         /*
         * Stdout and Stderr for Shell
         */
-        data.append(",").append(jobExecutionContext.getStandaredOutput()).append(",").append(jobExecutionContext.getStandaredError());
+        data.append(",").append(jobExecutionContext.getStandardOutput()).append(",").append(jobExecutionContext.getStandardError());
 
 
         logger.info("Recoverable data is saving to zk: " + data.toString());
@@ -74,8 +74,8 @@ public class AppDescriptorCheckHandler implements GFacRecoverableHandler {
             jobExecutionContext.setWorkingDir(split[1]);
             jobExecutionContext.setInputDir(split[2]);
             jobExecutionContext.setOutputDir(split[3]);
-            jobExecutionContext.setStandaredOutput(split[4]);
-            jobExecutionContext.setStandaredError(split[5]);
+            jobExecutionContext.setStandardOutput(split[4]);
+            jobExecutionContext.setStandardError(split[5]);
         } catch (Exception e) {
             throw new GFacHandlerException(e);
         }

http://git-wip-us.apache.org/repos/asf/airavata/blob/14bd9416/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/GFacUtils.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/GFacUtils.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/GFacUtils.java
index 71f8cfe..695c943 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/GFacUtils.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/GFacUtils.java
@@ -20,6 +20,9 @@
  */
 package org.apache.airavata.gfac.core.utils;
 
+import org.airavata.appcatalog.cpi.AppCatalog;
+import org.airavata.appcatalog.cpi.AppCatalogException;
+import org.apache.aiaravata.application.catalog.data.impl.AppCatalogFactory;
 import org.apache.airavata.common.exception.ApplicationSettingsException;
 import org.apache.airavata.common.utils.AiravataZKUtils;
 import org.apache.airavata.common.utils.DBUtil;
@@ -36,6 +39,8 @@ import org.apache.airavata.gfac.core.context.JobExecutionContext;
 import org.apache.airavata.gfac.core.handler.GFacHandlerException;
 import org.apache.airavata.gfac.core.states.GfacExperimentState;
 import org.apache.airavata.gfac.core.states.GfacPluginState;
+import org.apache.airavata.model.appcatalog.computeresource.LOCALSubmission;
+import org.apache.airavata.model.appcatalog.computeresource.UnicoreJobSubmission;
 import org.apache.airavata.model.workspace.experiment.*;
 import org.apache.airavata.model.workspace.experiment.DataType;
 import org.apache.airavata.persistance.registry.jpa.impl.RegistryFactory;
@@ -1236,4 +1241,37 @@ public class GFacUtils {
 		}
 	}
 
+    public static LOCALSubmission getLocalJobSubmission (String submissionId) throws AppCatalogException{
+        try {
+            AppCatalog appCatalog = AppCatalogFactory.getAppCatalog();
+            return appCatalog.getComputeResource().getLocalJobSubmission(submissionId);
+        }catch (Exception e){
+            String errorMsg = "Error while retrieving local job submission with submission id : " + submissionId;
+            log.error(errorMsg, e);
+            throw new AppCatalogException(errorMsg, e);
+        }
+    }
+
+    public static UnicoreJobSubmission getUnicoreJobSubmission (String submissionId) throws AppCatalogException{
+        try {
+            AppCatalog appCatalog = AppCatalogFactory.getAppCatalog();
+            return appCatalog.getComputeResource().getUNICOREJobSubmission(submissionId);
+        }catch (Exception e){
+            String errorMsg = "Error while retrieving local job submission with submission id : " + submissionId;
+            log.error(errorMsg, e);
+            throw new AppCatalogException(errorMsg, e);
+        }
+    }
+
+    public static UnicoreJobSubmission getJobSubmission (String submissionId) throws AppCatalogException{
+        try {
+            AppCatalog appCatalog = AppCatalogFactory.getAppCatalog();
+            return appCatalog.getComputeResource().getUNICOREJobSubmission(submissionId);
+        }catch (Exception e){
+            String errorMsg = "Error while retrieving local job submission with submission id : " + submissionId;
+            log.error(errorMsg, e);
+            throw new AppCatalogException(errorMsg, e);
+        }
+    }
+
 }


[13/50] [abbrv] airavata git commit: adding util methods to get job submission

Posted by ch...@apache.org.
adding util methods to get job submission


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/51361579
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/51361579
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/51361579

Branch: refs/heads/master
Commit: 51361579602458f56e9b70250c76397305941696
Parents: e28919c
Author: chathuriw <ka...@gmail.com>
Authored: Thu Oct 30 17:06:06 2014 -0400
Committer: Chathuri Wimalasena <ka...@gmail.com>
Committed: Wed Nov 5 11:16:15 2014 -0500

----------------------------------------------------------------------
 .../org/apache/airavata/gfac/Scheduler.java     |  4 +--
 .../core/handler/AppDescriptorCheckHandler.java |  6 ++--
 .../airavata/gfac/core/utils/GFacUtils.java     | 38 ++++++++++++++++++++
 3 files changed, 43 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/51361579/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/Scheduler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/Scheduler.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/Scheduler.java
index 8f5847f..9e642fe 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/Scheduler.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/Scheduler.java
@@ -109,7 +109,7 @@ public class Scheduler {
             if (provider == null) {
 
                 List<JobSubmissionInterface> jobSubmissionInterfaces = jobExecutionContext.getApplicationContext().getComputeResourceDescription().getJobSubmissionInterfaces();
-                String hostClass = jobExecutionContext.getPrefferedJobSubmissionProtocal();
+                String hostClass = jobExecutionContext.getPreferredJobSubmissionProtocol().toString();
                 providerClassName = GFacConfiguration.getAttributeValue(GFacConfiguration.getHandlerDoc(), Constants.XPATH_EXPR_PROVIDER_ON_HOST + hostClass + "']", Constants.GFAC_CONFIG_CLASS_ATTRIBUTE);
                 Class<? extends GFacProvider> aClass1 = Class.forName(providerClassName).asSubclass(GFacProvider.class);
                 provider = aClass1.newInstance();
@@ -162,7 +162,7 @@ public class Scheduler {
             // This should be have a single element only.
 
             if (executionMode == null || "".equals(executionMode)) {
-                String hostClass = jobExecutionContext.getPrefferedJobSubmissionProtocal();
+                String hostClass = jobExecutionContext.getPreferredJobSubmissionProtocol().toString();
                 executionMode = GFacConfiguration.getAttributeValue(GFacConfiguration.getHandlerDoc(), Constants.XPATH_EXPR_PROVIDER_ON_HOST + hostClass + "']", Constants.GFAC_CONFIG_EXECUTION_MODE_ATTRIBUTE);
             }
         } catch (XPathExpressionException e) {

http://git-wip-us.apache.org/repos/asf/airavata/blob/51361579/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/handler/AppDescriptorCheckHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/handler/AppDescriptorCheckHandler.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/handler/AppDescriptorCheckHandler.java
index 4627bf5..72a8f1f 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/handler/AppDescriptorCheckHandler.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/handler/AppDescriptorCheckHandler.java
@@ -53,7 +53,7 @@ public class AppDescriptorCheckHandler implements GFacRecoverableHandler {
         /*
         * Stdout and Stderr for Shell
         */
-        data.append(",").append(jobExecutionContext.getStandaredOutput()).append(",").append(jobExecutionContext.getStandaredError());
+        data.append(",").append(jobExecutionContext.getStandardOutput()).append(",").append(jobExecutionContext.getStandardError());
 
 
         logger.info("Recoverable data is saving to zk: " + data.toString());
@@ -74,8 +74,8 @@ public class AppDescriptorCheckHandler implements GFacRecoverableHandler {
             jobExecutionContext.setWorkingDir(split[1]);
             jobExecutionContext.setInputDir(split[2]);
             jobExecutionContext.setOutputDir(split[3]);
-            jobExecutionContext.setStandaredOutput(split[4]);
-            jobExecutionContext.setStandaredError(split[5]);
+            jobExecutionContext.setStandardOutput(split[4]);
+            jobExecutionContext.setStandardError(split[5]);
         } catch (Exception e) {
             throw new GFacHandlerException(e);
         }

http://git-wip-us.apache.org/repos/asf/airavata/blob/51361579/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/GFacUtils.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/GFacUtils.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/GFacUtils.java
index ce74e4e..ff6f2c2 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/GFacUtils.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/GFacUtils.java
@@ -20,6 +20,9 @@
  */
 package org.apache.airavata.gfac.core.utils;
 
+import org.airavata.appcatalog.cpi.AppCatalog;
+import org.airavata.appcatalog.cpi.AppCatalogException;
+import org.apache.aiaravata.application.catalog.data.impl.AppCatalogFactory;
 import org.apache.airavata.common.exception.ApplicationSettingsException;
 import org.apache.airavata.common.utils.AiravataZKUtils;
 import org.apache.airavata.common.utils.DBUtil;
@@ -36,6 +39,8 @@ import org.apache.airavata.gfac.core.context.JobExecutionContext;
 import org.apache.airavata.gfac.core.handler.GFacHandlerException;
 import org.apache.airavata.gfac.core.states.GfacExperimentState;
 import org.apache.airavata.gfac.core.states.GfacPluginState;
+import org.apache.airavata.model.appcatalog.computeresource.LOCALSubmission;
+import org.apache.airavata.model.appcatalog.computeresource.UnicoreJobSubmission;
 import org.apache.airavata.model.workspace.experiment.*;
 import org.apache.airavata.model.workspace.experiment.DataType;
 import org.apache.airavata.persistance.registry.jpa.impl.RegistryFactory;
@@ -1237,4 +1242,37 @@ public class GFacUtils {
 		}
 	}
 
+    public static LOCALSubmission getLocalJobSubmission (String submissionId) throws AppCatalogException{
+        try {
+            AppCatalog appCatalog = AppCatalogFactory.getAppCatalog();
+            return appCatalog.getComputeResource().getLocalJobSubmission(submissionId);
+        }catch (Exception e){
+            String errorMsg = "Error while retrieving local job submission with submission id : " + submissionId;
+            log.error(errorMsg, e);
+            throw new AppCatalogException(errorMsg, e);
+        }
+    }
+
+    public static UnicoreJobSubmission getUnicoreJobSubmission (String submissionId) throws AppCatalogException{
+        try {
+            AppCatalog appCatalog = AppCatalogFactory.getAppCatalog();
+            return appCatalog.getComputeResource().getUNICOREJobSubmission(submissionId);
+        }catch (Exception e){
+            String errorMsg = "Error while retrieving local job submission with submission id : " + submissionId;
+            log.error(errorMsg, e);
+            throw new AppCatalogException(errorMsg, e);
+        }
+    }
+
+    public static UnicoreJobSubmission getJobSubmission (String submissionId) throws AppCatalogException{
+        try {
+            AppCatalog appCatalog = AppCatalogFactory.getAppCatalog();
+            return appCatalog.getComputeResource().getUNICOREJobSubmission(submissionId);
+        }catch (Exception e){
+            String errorMsg = "Error while retrieving local job submission with submission id : " + submissionId;
+            log.error(errorMsg, e);
+            throw new AppCatalogException(errorMsg, e);
+        }
+    }
+
 }


[08/50] [abbrv] airavata git commit: Integrated appCatalog model to GFac local and hpc monitor modules, commented out test calsses

Posted by ch...@apache.org.
Integrated appCatalog model to GFac local and hpc monitor modules, commented out test calsses


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/83ecde91
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/83ecde91
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/83ecde91

Branch: refs/heads/master
Commit: 83ecde91a83778771bf9aac84f503f7b44f71844
Parents: 2dd94e6
Author: shamrath <sh...@gmail.com>
Authored: Tue Nov 4 17:51:53 2014 -0500
Committer: shamrath <sh...@gmail.com>
Committed: Tue Nov 4 17:51:53 2014 -0500

----------------------------------------------------------------------
 .../gfac/core/context/JobExecutionContext.java  |  12 +
 .../airavata/gfac/core/cpi/BetterGfacImpl.java  |   4 +
 .../handler/LocalDirectorySetupHandler.java     |  19 +-
 .../gfac/local/provider/impl/LocalProvider.java |  48 ++-
 .../gfac/local/utils/LocalProviderUtil.java     |  15 +-
 .../gfac/services/impl/LocalProviderTest.java   | 368 +++++++++----------
 .../airavata/gfac/monitor/HPCMonitorID.java     |  11 +-
 .../airavata/gfac/monitor/HostMonitorData.java  |  38 +-
 .../handlers/GridPullMonitorHandler.java        |   2 +-
 .../monitor/impl/pull/qstat/HPCPullMonitor.java |  34 +-
 .../airavata/gfac/monitor/util/CommonUtils.java |  31 +-
 .../job/QstatMonitorTestWithMyProxyAuth.java    | 344 ++++++++---------
 12 files changed, 472 insertions(+), 454 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/83ecde91/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
index d344e8b..ff764a0 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
@@ -72,6 +72,10 @@ public class JobExecutionContext extends AbstractContext implements Serializable
 
     private String credentialStoreToken;
     /**
+     * User defined scratch/temp directory
+     */
+    private String scratchLocation;
+    /**
      * User defined working directory.
      */
     private String workingDir;
@@ -359,6 +363,14 @@ public class JobExecutionContext extends AbstractContext implements Serializable
         this.credentialStoreToken = credentialStoreToken;
     }
 
+    public String getScratchLocation() {
+        return scratchLocation;
+    }
+
+    public void setScratchLocation(String scratchLocation) {
+        this.scratchLocation = scratchLocation;
+    }
+
     public String getWorkingDir() {
         return workingDir;
     }

http://git-wip-us.apache.org/repos/asf/airavata/blob/83ecde91/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
index 0455f7e..d063dac 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
@@ -372,6 +372,10 @@ public class BetterGfacImpl implements GFac,Watcher {
     }
 
     private void setUpWorkingLocation(JobExecutionContext jobExecutionContext, ApplicationInterfaceDescription applicationInterface, String scratchLocation) {
+        /**
+         * Scratch location
+         */
+        jobExecutionContext.setScratchLocation(scratchLocation);
 
         /**
          * Working dir

http://git-wip-us.apache.org/repos/asf/airavata/blob/83ecde91/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/local/handler/LocalDirectorySetupHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/local/handler/LocalDirectorySetupHandler.java b/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/local/handler/LocalDirectorySetupHandler.java
index de516c0..394cfaa 100644
--- a/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/local/handler/LocalDirectorySetupHandler.java
+++ b/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/local/handler/LocalDirectorySetupHandler.java
@@ -20,12 +20,9 @@
 */
 package org.apache.airavata.gfac.local.handler;
 
-import org.apache.airavata.commons.gfac.type.ApplicationDescription;
 import org.apache.airavata.gfac.core.context.JobExecutionContext;
 import org.apache.airavata.gfac.core.handler.GFacHandler;
 import org.apache.airavata.gfac.core.handler.GFacHandlerException;
-import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
-import org.apache.airavata.schemas.gfac.HostDescriptionType;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -37,18 +34,14 @@ public class LocalDirectorySetupHandler implements GFacHandler {
 
     public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
         log.info("Invoking LocalDirectorySetupHandler ...");
-        HostDescriptionType type = jobExecutionContext.getApplicationContext().getHostDescription().getType();
-        ApplicationDescription applicationDeploymentDescription = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription();
-        ApplicationDeploymentDescriptionType app = applicationDeploymentDescription.getType();
-        log.debug("working directory = " + app.getStaticWorkingDirectory());
-        log.debug("temp directory = " + app.getScratchWorkingDirectory());
+        log.debug("working directory = " + jobExecutionContext.getWorkingDir());
+        log.debug("temp directory = " + jobExecutionContext.getWorkingDir());
 
-        makeFileSystemDir(app.getStaticWorkingDirectory(),jobExecutionContext);
-        makeFileSystemDir(app.getScratchWorkingDirectory(),jobExecutionContext);
-        makeFileSystemDir(app.getInputDataDirectory(),jobExecutionContext);
-        makeFileSystemDir(app.getOutputDataDirectory(),jobExecutionContext);
+        makeFileSystemDir(jobExecutionContext.getWorkingDir());
+        makeFileSystemDir(jobExecutionContext.getInputDir());
+        makeFileSystemDir(jobExecutionContext.getOutputDir());
     }
-    private void makeFileSystemDir(String dir, JobExecutionContext jobExecutionContext) throws GFacHandlerException {
+    private void makeFileSystemDir(String dir) throws GFacHandlerException {
            File f = new File(dir);
            if (f.isDirectory() && f.exists()) {
                return;

http://git-wip-us.apache.org/repos/asf/airavata/blob/83ecde91/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/local/provider/impl/LocalProvider.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/local/provider/impl/LocalProvider.java b/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/local/provider/impl/LocalProvider.java
index 51da68a..4cdd0c0 100644
--- a/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/local/provider/impl/LocalProvider.java
+++ b/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/local/provider/impl/LocalProvider.java
@@ -37,6 +37,8 @@ import org.apache.airavata.gfac.core.utils.GFacUtils;
 import org.apache.airavata.gfac.core.utils.OutputUtils;
 import org.apache.airavata.gfac.local.utils.InputStreamToFileWriter;
 import org.apache.airavata.gfac.local.utils.InputUtils;
+import org.apache.airavata.model.appcatalog.appdeployment.ApplicationDeploymentDescription;
+import org.apache.airavata.model.appcatalog.appdeployment.SetEnvPaths;
 import org.apache.airavata.model.messaging.event.JobIdentifier;
 import org.apache.airavata.model.messaging.event.JobStatusChangeEvent;
 import org.apache.airavata.model.messaging.event.TaskIdentifier;
@@ -104,18 +106,16 @@ public class LocalProvider extends AbstractProvider {
 
     public void initialize(JobExecutionContext jobExecutionContext) throws GFacProviderException,GFacException {
     	super.initialize(jobExecutionContext);
-        ApplicationDeploymentDescriptionType app = jobExecutionContext.getApplicationContext().
-                getApplicationDeploymentDescription().getType();
 
-        buildCommand(app.getExecutableLocation(), ProviderUtils.getInputParameters(jobExecutionContext));
-        initProcessBuilder(app);
+        buildCommand(jobExecutionContext.getExecutablePath(), ProviderUtils.getInputParameters(jobExecutionContext));
+        initProcessBuilder(jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription());
 
         // extra environment variables
-        builder.environment().put(Constants.INPUT_DATA_DIR_VAR_NAME, app.getInputDataDirectory());
-        builder.environment().put(Constants.OUTPUT_DATA_DIR_VAR_NAME, app.getOutputDataDirectory());
+        builder.environment().put(Constants.INPUT_DATA_DIR_VAR_NAME, jobExecutionContext.getInputDir());
+        builder.environment().put(Constants.OUTPUT_DATA_DIR_VAR_NAME, jobExecutionContext.getOutputDir());
 
         // set working directory
-        builder.directory(new File(app.getStaticWorkingDirectory()));
+        builder.directory(new File(jobExecutionContext.getWorkingDir()));
 
         // log info
         log.info("Command = " + InputUtils.buildCommand(cmdList));
@@ -127,21 +127,19 @@ public class LocalProvider extends AbstractProvider {
 
     public void execute(JobExecutionContext jobExecutionContext) throws GFacProviderException {
         jobExecutionContext.getNotifier().publish(new StartExecutionEvent());
-         ApplicationDeploymentDescriptionType app = jobExecutionContext.
-                 getApplicationContext().getApplicationDeploymentDescription().getType();
         JobDetails jobDetails = new JobDetails();
         try {
         	jobId = jobExecutionContext.getTaskData().getTaskID();
             jobDetails.setJobID(jobId);
-            jobDetails.setJobDescription(app.toString());
+            jobDetails.setJobDescription(jobExecutionContext.getApplicationContext()
+                    .getApplicationDeploymentDescription().getAppDeploymentDescription());
             jobExecutionContext.setJobDetails(jobDetails);
-            jobDetails.setJobDescription(app.toString());
             GFacUtils.saveJobStatus(jobExecutionContext,jobDetails, JobState.SETUP);
         	// running cmd
             Process process = builder.start();
 
-            Thread standardOutWriter = new InputStreamToFileWriter(process.getInputStream(), app.getStandardOutput());
-            Thread standardErrorWriter = new InputStreamToFileWriter(process.getErrorStream(), app.getStandardError());
+            Thread standardOutWriter = new InputStreamToFileWriter(process.getInputStream(), jobExecutionContext.getStandardOutput());
+            Thread standardErrorWriter = new InputStreamToFileWriter(process.getErrorStream(), jobExecutionContext.getStandardError());
 
             // start output threads
             standardOutWriter.setDaemon(true);
@@ -167,9 +165,10 @@ public class LocalProvider extends AbstractProvider {
 
             StringBuffer buf = new StringBuffer();
             buf.append("Executed ").append(InputUtils.buildCommand(cmdList))
-                    .append(" on the localHost, working directory = ").append(app.getStaticWorkingDirectory())
-                    .append(" tempDirectory = ").append(app.getScratchWorkingDirectory()).append(" With the status ")
+                    .append(" on the localHost, working directory = ").append(jobExecutionContext.getWorkingDir())
+                    .append(" tempDirectory = ").append(jobExecutionContext.getScratchLocation()).append(" With the status ")
                     .append(String.valueOf(returnValue));
+
             log.info(buf.toString());
 
             // updating the job status to complete because there's nothing to monitor in local jobs
@@ -219,12 +218,10 @@ public class LocalProvider extends AbstractProvider {
 //	}
 
     public void dispose(JobExecutionContext jobExecutionContext) throws GFacProviderException {
-        ApplicationDeploymentDescriptionType app = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription().getType();
-
         try {
         	List<DataObjectType> outputArray = new ArrayList<DataObjectType>();
-            String stdOutStr = GFacUtils.readFileToString(app.getStandardOutput());
-            String stdErrStr = GFacUtils.readFileToString(app.getStandardError());
+            String stdOutStr = GFacUtils.readFileToString(jobExecutionContext.getStandardOutput());
+            String stdErrStr = GFacUtils.readFileToString(jobExecutionContext.getStandardError());
 			Map<String, Object> output = jobExecutionContext.getOutMessageContext().getParameters();
             OutputUtils.fillOutputFromStdout(output, stdOutStr, stdErrStr, outputArray);
             TaskDetails taskDetails = (TaskDetails)registry.get(RegistryModelType.TASK_DETAIL, jobExecutionContext.getTaskData().getTaskID());
@@ -257,15 +254,14 @@ public class LocalProvider extends AbstractProvider {
         cmdList.addAll(inputParameterList);
     }
 
-    private void initProcessBuilder(ApplicationDeploymentDescriptionType app){
+    private void initProcessBuilder(ApplicationDeploymentDescription app){
         builder = new ProcessBuilder(cmdList);
 
-        NameValuePairType[] env = app.getApplicationEnvironmentArray();
-
-        if(env != null && env.length > 0){
-            Map<String,String> builderEnv = builder.environment();
-            for (NameValuePairType entry : env) {
-                builderEnv.put(entry.getName(), entry.getValue());
+        List<SetEnvPaths> setEnvironment = app.getSetEnvironment();
+        if (setEnvironment != null) {
+            for (SetEnvPaths envPath : setEnvironment) {
+                Map<String,String> builderEnv = builder.environment();
+                builderEnv.put(envPath.getName(), envPath.getValue());
             }
         }
     }

http://git-wip-us.apache.org/repos/asf/airavata/blob/83ecde91/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/local/utils/LocalProviderUtil.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/local/utils/LocalProviderUtil.java b/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/local/utils/LocalProviderUtil.java
index 932c693..2b45df7 100644
--- a/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/local/utils/LocalProviderUtil.java
+++ b/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/local/utils/LocalProviderUtil.java
@@ -22,7 +22,6 @@ package org.apache.airavata.gfac.local.utils;
 
 import org.apache.airavata.gfac.core.context.JobExecutionContext;
 import org.apache.airavata.gfac.core.provider.GFacProviderException;
-import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -41,14 +40,12 @@ public class LocalProviderUtil {
     }
 
     public void makeDirectory(JobExecutionContext jobExecutionContext) throws GFacProviderException {
-        ApplicationDeploymentDescriptionType app = jobExecutionContext.
-                getApplicationContext().getApplicationDeploymentDescription().getType();
-        log.info("working diectroy = " + app.getStaticWorkingDirectory());
-        log.info("temp directory = " + app.getScratchWorkingDirectory());
-        makeFileSystemDir(app.getStaticWorkingDirectory());
-        makeFileSystemDir(app.getScratchWorkingDirectory());
-        makeFileSystemDir(app.getInputDataDirectory());
-        makeFileSystemDir(app.getOutputDataDirectory());
+        log.info("working diectroy = " + jobExecutionContext.getWorkingDir());
+        log.info("temp directory = " + jobExecutionContext.getScratchLocation());
+        makeFileSystemDir(jobExecutionContext.getWorkingDir());
+        makeFileSystemDir(jobExecutionContext.getScratchLocation());
+        makeFileSystemDir(jobExecutionContext.getInputDir());
+        makeFileSystemDir(jobExecutionContext.getOutputDir());
     }
 
 }

http://git-wip-us.apache.org/repos/asf/airavata/blob/83ecde91/modules/gfac/gfac-local/src/test/java/org/apache/airavata/core/gfac/services/impl/LocalProviderTest.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-local/src/test/java/org/apache/airavata/core/gfac/services/impl/LocalProviderTest.java b/modules/gfac/gfac-local/src/test/java/org/apache/airavata/core/gfac/services/impl/LocalProviderTest.java
index 343b4bf..aeb8158 100644
--- a/modules/gfac/gfac-local/src/test/java/org/apache/airavata/core/gfac/services/impl/LocalProviderTest.java
+++ b/modules/gfac/gfac-local/src/test/java/org/apache/airavata/core/gfac/services/impl/LocalProviderTest.java
@@ -1,184 +1,184 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.core.gfac.services.impl;
-
-import java.io.File;
-import java.net.URL;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.airavata.common.utils.MonitorPublisher;
-import org.apache.airavata.commons.gfac.type.ActualParameter;
-import org.apache.airavata.commons.gfac.type.ApplicationDescription;
-import org.apache.airavata.commons.gfac.type.HostDescription;
-import org.apache.airavata.commons.gfac.type.ServiceDescription;
-import org.apache.airavata.gfac.GFacConfiguration;
-import org.apache.airavata.gfac.GFacException;
-import org.apache.airavata.gfac.core.context.ApplicationContext;
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.context.MessageContext;
-import org.apache.airavata.gfac.core.provider.GFacProviderException;
-import org.apache.airavata.gfac.local.handler.LocalDirectorySetupHandler;
-import org.apache.airavata.gfac.local.provider.impl.LocalProvider;
-import org.apache.airavata.model.workspace.experiment.ExecutionUnit;
-import org.apache.airavata.model.workspace.experiment.Experiment;
-import org.apache.airavata.model.workspace.experiment.TaskDetails;
-import org.apache.airavata.model.workspace.experiment.WorkflowNodeDetails;
-import org.apache.airavata.persistance.registry.jpa.impl.LoggingRegistryImpl;
-import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
-import org.apache.airavata.schemas.gfac.InputParameterType;
-import org.apache.airavata.schemas.gfac.OutputParameterType;
-import org.apache.airavata.schemas.gfac.StringParameterType;
-import org.apache.commons.lang.SystemUtils;
-import org.testng.annotations.BeforeTest;
-import org.testng.annotations.Test;
-
-import com.google.common.eventbus.EventBus;
-
-public class LocalProviderTest {
-    private JobExecutionContext jobExecutionContext;
-    @BeforeTest
-    public void setUp() throws Exception {
-
-        URL resource = this.getClass().getClassLoader().getResource(org.apache.airavata.common.utils.Constants.GFAC_CONFIG_XML);
-        File configFile = new File(resource.getPath());
-        GFacConfiguration gFacConfiguration = GFacConfiguration.create(configFile, null);
-        //have to set InFlwo Handlers and outFlowHandlers
-        ApplicationContext applicationContext = new ApplicationContext();
-        HostDescription host = new HostDescription();
-        host.getType().setHostName("localhost");
-        host.getType().setHostAddress("localhost");
-        applicationContext.setHostDescription(host);
-        /*
-           * App
-           */
-        ApplicationDescription appDesc = new ApplicationDescription();
-        ApplicationDeploymentDescriptionType app = appDesc.getType();
-        ApplicationDeploymentDescriptionType.ApplicationName name = ApplicationDeploymentDescriptionType.ApplicationName.Factory.newInstance();
-        name.setStringValue("EchoLocal");
-        app.setApplicationName(name);
-
-        /*
-           * Use bat file if it is compiled on Windows
-           */
-        if (SystemUtils.IS_OS_WINDOWS) {
-            URL url = this.getClass().getClassLoader().getResource("echo.bat");
-            app.setExecutableLocation(url.getFile());
-        } else {
-            //for unix and Mac
-            app.setExecutableLocation("/bin/echo");
-        }
-
-        /*
-           * Default tmp location
-           */
-        String tempDir = System.getProperty("java.io.tmpdir");
-        if (tempDir == null) {
-            tempDir = "/tmp";
-        }
-
-        app.setScratchWorkingDirectory(tempDir);
-        app.setStaticWorkingDirectory(tempDir);
-        app.setInputDataDirectory(tempDir + File.separator + "input");
-        app.setOutputDataDirectory(tempDir + File.separator + "output");
-        app.setStandardOutput(tempDir + File.separator + "echo.stdout");
-        app.setStandardError(tempDir + File.separator + "echo.stderr");
-
-        applicationContext.setApplicationDeploymentDescription(appDesc);
-
-        /*
-           * Service
-           */
-        ServiceDescription serv = new ServiceDescription();
-        serv.getType().setName("SimpleEcho");
-
-        List<InputParameterType> inputList = new ArrayList<InputParameterType>();
-        InputParameterType input = InputParameterType.Factory.newInstance();
-        input.setParameterName("echo_input");
-        input.setParameterType(StringParameterType.Factory.newInstance());
-        inputList.add(input);
-        InputParameterType[] inputParamList = inputList.toArray(new InputParameterType[inputList
-                .size()]);
-
-        List<OutputParameterType> outputList = new ArrayList<OutputParameterType>();
-        OutputParameterType output = OutputParameterType.Factory.newInstance();
-        output.setParameterName("echo_output");
-        output.setParameterType(StringParameterType.Factory.newInstance());
-        outputList.add(output);
-        OutputParameterType[] outputParamList = outputList
-                .toArray(new OutputParameterType[outputList.size()]);
-
-        serv.getType().setInputParametersArray(inputParamList);
-        serv.getType().setOutputParametersArray(outputParamList);
-
-        jobExecutionContext = new JobExecutionContext(gFacConfiguration, serv.getType().getName());
-        jobExecutionContext.setApplicationContext(applicationContext);
-        /*
-        * Host
-        */
-        applicationContext.setServiceDescription(serv);
-
-        MessageContext inMessage = new MessageContext();
-        ActualParameter echo_input = new ActualParameter();
-        ((StringParameterType) echo_input.getType()).setValue("echo_output=hello");
-        inMessage.addParameter("echo_input", echo_input);
-
-        jobExecutionContext.setInMessageContext(inMessage);
-
-        MessageContext outMessage = new MessageContext();
-        ActualParameter echo_out = new ActualParameter();
-        outMessage.addParameter("echo_output", echo_out);
-
-        jobExecutionContext.setOutMessageContext(outMessage);
-
-        jobExecutionContext.setExperimentID("test123");
-        jobExecutionContext.setExperiment(new Experiment("test123","project1","admin","testExp"));
-        jobExecutionContext.setTaskData(new TaskDetails(jobExecutionContext.getExperimentID()));
-        jobExecutionContext.setRegistry(new LoggingRegistryImpl());
-        jobExecutionContext.setWorkflowNodeDetails(new WorkflowNodeDetails(jobExecutionContext.getExperimentID(),"none", ExecutionUnit.APPLICATION));
-
-
-    }
-
-    @Test
-    public void testLocalDirectorySetupHandler() throws GFacException {
-        LocalDirectorySetupHandler localDirectorySetupHandler = new LocalDirectorySetupHandler();
-        localDirectorySetupHandler.invoke(jobExecutionContext);
-
-        ApplicationDescription applicationDeploymentDescription = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription();
-        ApplicationDeploymentDescriptionType app = applicationDeploymentDescription.getType();
-        junit.framework.Assert.assertTrue(new File(app.getStaticWorkingDirectory()).exists());
-        junit.framework.Assert.assertTrue(new File(app.getScratchWorkingDirectory()).exists());
-        junit.framework.Assert.assertTrue(new File(app.getInputDataDirectory()).exists());
-        junit.framework.Assert.assertTrue(new File(app.getOutputDataDirectory()).exists());
-    }
-
-    @Test
-    public void testLocalProvider() throws GFacException,GFacProviderException {
-        LocalDirectorySetupHandler localDirectorySetupHandler = new LocalDirectorySetupHandler();
-        localDirectorySetupHandler.invoke(jobExecutionContext);
-        LocalProvider localProvider = new LocalProvider();
-        localProvider.setMonitorPublisher(new MonitorPublisher(new EventBus()));
-        localProvider.initialize(jobExecutionContext);
-        localProvider.execute(jobExecutionContext);
-        localProvider.dispose(jobExecutionContext);
-    }
-}
+///*
+// *
+// * Licensed to the Apache Software Foundation (ASF) under one
+// * or more contributor license agreements.  See the NOTICE file
+// * distributed with this work for additional information
+// * regarding copyright ownership.  The ASF licenses this file
+// * to you under the Apache License, Version 2.0 (the
+// * "License"); you may not use this file except in compliance
+// * with the License.  You may obtain a copy of the License at
+// *
+// *   http://www.apache.org/licenses/LICENSE-2.0
+// *
+// * Unless required by applicable law or agreed to in writing,
+// * software distributed under the License is distributed on an
+// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// * KIND, either express or implied.  See the License for the
+// * specific language governing permissions and limitations
+// * under the License.
+// *
+//*/
+//package org.apache.airavata.core.gfac.services.impl;
+//
+//import java.io.File;
+//import java.net.URL;
+//import java.util.ArrayList;
+//import java.util.List;
+//
+//import org.apache.airavata.common.utils.MonitorPublisher;
+//import org.apache.airavata.commons.gfac.type.ActualParameter;
+//import org.apache.airavata.commons.gfac.type.ApplicationDescription;
+//import org.apache.airavata.commons.gfac.type.HostDescription;
+//import org.apache.airavata.commons.gfac.type.ServiceDescription;
+//import org.apache.airavata.gfac.GFacConfiguration;
+//import org.apache.airavata.gfac.GFacException;
+//import org.apache.airavata.gfac.core.context.ApplicationContext;
+//import org.apache.airavata.gfac.core.context.JobExecutionContext;
+//import org.apache.airavata.gfac.core.context.MessageContext;
+//import org.apache.airavata.gfac.core.provider.GFacProviderException;
+//import org.apache.airavata.gfac.local.handler.LocalDirectorySetupHandler;
+//import org.apache.airavata.gfac.local.provider.impl.LocalProvider;
+//import org.apache.airavata.model.workspace.experiment.ExecutionUnit;
+//import org.apache.airavata.model.workspace.experiment.Experiment;
+//import org.apache.airavata.model.workspace.experiment.TaskDetails;
+//import org.apache.airavata.model.workspace.experiment.WorkflowNodeDetails;
+//import org.apache.airavata.persistance.registry.jpa.impl.LoggingRegistryImpl;
+//import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
+//import org.apache.airavata.schemas.gfac.InputParameterType;
+//import org.apache.airavata.schemas.gfac.OutputParameterType;
+//import org.apache.airavata.schemas.gfac.StringParameterType;
+//import org.apache.commons.lang.SystemUtils;
+//import org.testng.annotations.BeforeTest;
+//import org.testng.annotations.Test;
+//
+//import com.google.common.eventbus.EventBus;
+//
+//public class LocalProviderTest {
+//    private JobExecutionContext jobExecutionContext;
+//    @BeforeTest
+//    public void setUp() throws Exception {
+//
+//        URL resource = this.getClass().getClassLoader().getResource(org.apache.airavata.common.utils.Constants.GFAC_CONFIG_XML);
+//        File configFile = new File(resource.getPath());
+//        GFacConfiguration gFacConfiguration = GFacConfiguration.create(configFile, null);
+//        //have to set InFlwo Handlers and outFlowHandlers
+//        ApplicationContext applicationContext = new ApplicationContext();
+//        HostDescription host = new HostDescription();
+//        host.getType().setHostName("localhost");
+//        host.getType().setHostAddress("localhost");
+//        applicationContext.setHostDescription(host);
+//        /*
+//           * App
+//           */
+//        ApplicationDescription appDesc = new ApplicationDescription();
+//        ApplicationDeploymentDescriptionType app = appDesc.getType();
+//        ApplicationDeploymentDescriptionType.ApplicationName name = ApplicationDeploymentDescriptionType.ApplicationName.Factory.newInstance();
+//        name.setStringValue("EchoLocal");
+//        app.setApplicationName(name);
+//
+//        /*
+//           * Use bat file if it is compiled on Windows
+//           */
+//        if (SystemUtils.IS_OS_WINDOWS) {
+//            URL url = this.getClass().getClassLoader().getResource("echo.bat");
+//            app.setExecutableLocation(url.getFile());
+//        } else {
+//            //for unix and Mac
+//            app.setExecutableLocation("/bin/echo");
+//        }
+//
+//        /*
+//           * Default tmp location
+//           */
+//        String tempDir = System.getProperty("java.io.tmpdir");
+//        if (tempDir == null) {
+//            tempDir = "/tmp";
+//        }
+//
+//        app.setScratchWorkingDirectory(tempDir);
+//        app.setStaticWorkingDirectory(tempDir);
+//        app.setInputDataDirectory(tempDir + File.separator + "input");
+//        app.setOutputDataDirectory(tempDir + File.separator + "output");
+//        app.setStandardOutput(tempDir + File.separator + "echo.stdout");
+//        app.setStandardError(tempDir + File.separator + "echo.stderr");
+//
+//        applicationContext.setApplicationDeploymentDescription(appDesc);
+//
+//        /*
+//           * Service
+//           */
+//        ServiceDescription serv = new ServiceDescription();
+//        serv.getType().setName("SimpleEcho");
+//
+//        List<InputParameterType> inputList = new ArrayList<InputParameterType>();
+//        InputParameterType input = InputParameterType.Factory.newInstance();
+//        input.setParameterName("echo_input");
+//        input.setParameterType(StringParameterType.Factory.newInstance());
+//        inputList.add(input);
+//        InputParameterType[] inputParamList = inputList.toArray(new InputParameterType[inputList
+//                .size()]);
+//
+//        List<OutputParameterType> outputList = new ArrayList<OutputParameterType>();
+//        OutputParameterType output = OutputParameterType.Factory.newInstance();
+//        output.setParameterName("echo_output");
+//        output.setParameterType(StringParameterType.Factory.newInstance());
+//        outputList.add(output);
+//        OutputParameterType[] outputParamList = outputList
+//                .toArray(new OutputParameterType[outputList.size()]);
+//
+//        serv.getType().setInputParametersArray(inputParamList);
+//        serv.getType().setOutputParametersArray(outputParamList);
+//
+//        jobExecutionContext = new JobExecutionContext(gFacConfiguration, serv.getType().getName());
+//        jobExecutionContext.setApplicationContext(applicationContext);
+//        /*
+//        * Host
+//        */
+//        applicationContext.setServiceDescription(serv);
+//
+//        MessageContext inMessage = new MessageContext();
+//        ActualParameter echo_input = new ActualParameter();
+//        ((StringParameterType) echo_input.getType()).setValue("echo_output=hello");
+//        inMessage.addParameter("echo_input", echo_input);
+//
+//        jobExecutionContext.setInMessageContext(inMessage);
+//
+//        MessageContext outMessage = new MessageContext();
+//        ActualParameter echo_out = new ActualParameter();
+//        outMessage.addParameter("echo_output", echo_out);
+//
+//        jobExecutionContext.setOutMessageContext(outMessage);
+//
+//        jobExecutionContext.setExperimentID("test123");
+//        jobExecutionContext.setExperiment(new Experiment("test123","project1","admin","testExp"));
+//        jobExecutionContext.setTaskData(new TaskDetails(jobExecutionContext.getExperimentID()));
+//        jobExecutionContext.setRegistry(new LoggingRegistryImpl());
+//        jobExecutionContext.setWorkflowNodeDetails(new WorkflowNodeDetails(jobExecutionContext.getExperimentID(),"none", ExecutionUnit.APPLICATION));
+//
+//
+//    }
+//
+//    @Test
+//    public void testLocalDirectorySetupHandler() throws GFacException {
+//        LocalDirectorySetupHandler localDirectorySetupHandler = new LocalDirectorySetupHandler();
+//        localDirectorySetupHandler.invoke(jobExecutionContext);
+//
+//        ApplicationDescription applicationDeploymentDescription = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription();
+//        ApplicationDeploymentDescriptionType app = applicationDeploymentDescription.getType();
+//        junit.framework.Assert.assertTrue(new File(app.getStaticWorkingDirectory()).exists());
+//        junit.framework.Assert.assertTrue(new File(app.getScratchWorkingDirectory()).exists());
+//        junit.framework.Assert.assertTrue(new File(app.getInputDataDirectory()).exists());
+//        junit.framework.Assert.assertTrue(new File(app.getOutputDataDirectory()).exists());
+//    }
+//
+//    @Test
+//    public void testLocalProvider() throws GFacException,GFacProviderException {
+//        LocalDirectorySetupHandler localDirectorySetupHandler = new LocalDirectorySetupHandler();
+//        localDirectorySetupHandler.invoke(jobExecutionContext);
+//        LocalProvider localProvider = new LocalProvider();
+//        localProvider.setMonitorPublisher(new MonitorPublisher(new EventBus()));
+//        localProvider.initialize(jobExecutionContext);
+//        localProvider.execute(jobExecutionContext);
+//        localProvider.dispose(jobExecutionContext);
+//    }
+//}

http://git-wip-us.apache.org/repos/asf/airavata/blob/83ecde91/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/HPCMonitorID.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/HPCMonitorID.java b/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/HPCMonitorID.java
index a4a131d..c788ace 100644
--- a/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/HPCMonitorID.java
+++ b/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/HPCMonitorID.java
@@ -31,6 +31,7 @@ import org.apache.airavata.gfac.ssh.security.TokenizedSSHAuthInfo;
 import org.apache.airavata.gsi.ssh.api.ServerInfo;
 import org.apache.airavata.gsi.ssh.api.authentication.AuthenticationInfo;
 import org.apache.airavata.gsi.ssh.impl.authentication.MyProxyAuthenticationInfo;
+import org.apache.airavata.model.appcatalog.computeresource.ComputeResourceDescription;
 import org.apache.airavata.model.workspace.experiment.JobState;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -45,10 +46,10 @@ public class HPCMonitorID extends MonitorID {
 
     private AuthenticationInfo authenticationInfo = null;
 
-    public HPCMonitorID(HostDescription host, String jobID, String taskID, String workflowNodeID,
+    public HPCMonitorID(ComputeResourceDescription computeResourceDescription, String jobID, String taskID, String workflowNodeID,
                         String experimentID, String userName,String jobName) {
-        super(host, jobID, taskID, workflowNodeID, experimentID, userName,jobName);
-        setHost(host);
+        super(computeResourceDescription, jobID, taskID, workflowNodeID, experimentID, userName,jobName);
+        setComputeResourceDescription(computeResourceDescription);
         setJobStartedTime(new Timestamp((new Date()).getTime()));
         setUserName(userName);
         setJobID(jobID);
@@ -84,8 +85,8 @@ public class HPCMonitorID extends MonitorID {
         }
     }
 
-    public HPCMonitorID(HostDescription host, String jobID, String taskID, String workflowNodeID, String experimentID, String userName, AuthenticationInfo authenticationInfo) {
-        setHost(host);
+    public HPCMonitorID(ComputeResourceDescription computeResourceDescription, String jobID, String taskID, String workflowNodeID, String experimentID, String userName, AuthenticationInfo authenticationInfo) {
+        setComputeResourceDescription(computeResourceDescription);
         setJobStartedTime(new Timestamp((new Date()).getTime()));
         this.authenticationInfo = authenticationInfo;
         // if we give myproxyauthenticationInfo, so we try to use myproxy user as the user

http://git-wip-us.apache.org/repos/asf/airavata/blob/83ecde91/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/HostMonitorData.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/HostMonitorData.java b/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/HostMonitorData.java
index 0480925..c2017a0 100644
--- a/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/HostMonitorData.java
+++ b/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/HostMonitorData.java
@@ -20,34 +20,36 @@
 */
 package org.apache.airavata.gfac.monitor;
 
-import org.apache.airavata.commons.gfac.type.HostDescription;
+import org.apache.airavata.gfac.core.context.JobExecutionContext;
 import org.apache.airavata.gfac.core.monitor.MonitorID;
 import org.apache.airavata.gfac.monitor.exception.AiravataMonitorException;
+import org.apache.airavata.model.appcatalog.computeresource.ComputeResourceDescription;
+import org.apache.airavata.model.appcatalog.computeresource.DataMovementProtocol;
+import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol;
 
-import java.util.ArrayList;
 import java.util.List;
 
 public class HostMonitorData {
-    private HostDescription host;
+//    private HostDescription host;
+    private ComputeResourceDescription computeResourceDescription;
+    private JobSubmissionProtocol jobSubmissionProtocol;
+    private DataMovementProtocol dataMovementProtocol;
 
     private List<MonitorID> monitorIDs;
 
-    public HostMonitorData(HostDescription host) {
-        this.host = host;
-        monitorIDs = new ArrayList<MonitorID>();
-    }
+    public HostMonitorData(JobExecutionContext jobExecutionContext) {
+        this.computeResourceDescription = jobExecutionContext.getApplicationContext().getComputeResourceDescription();
+        this.jobSubmissionProtocol = jobExecutionContext.getPreferredJobSubmissionProtocol();
+        this.dataMovementProtocol = jobExecutionContext.getPreferredDataMovementProtocol();
 
-    public HostMonitorData(HostDescription host, List<MonitorID> monitorIDs) {
-        this.host = host;
-        this.monitorIDs = monitorIDs;
     }
 
-    public HostDescription getHost() {
-        return host;
+    public ComputeResourceDescription getComputeResourceDescription() {
+        return computeResourceDescription;
     }
 
-    public void setHost(HostDescription host) {
-        this.host = host;
+    public void setComputeResourceDescription(ComputeResourceDescription computeResourceDescription) {
+        this.computeResourceDescription = computeResourceDescription;
     }
 
     public List<MonitorID> getMonitorIDs() {
@@ -67,4 +69,12 @@ public class HostMonitorData {
     public void addMonitorIDForHost(MonitorID monitorID)throws AiravataMonitorException {
         monitorIDs.add(monitorID);
     }
+
+    public JobSubmissionProtocol getJobSubmissionProtocol() {
+        return jobSubmissionProtocol;
+    }
+
+    public DataMovementProtocol getDataMovementProtocol() {
+        return dataMovementProtocol;
+    }
 }

http://git-wip-us.apache.org/repos/asf/airavata/blob/83ecde91/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/handlers/GridPullMonitorHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/handlers/GridPullMonitorHandler.java b/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/handlers/GridPullMonitorHandler.java
index ceb440c..3a0e44d 100644
--- a/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/handlers/GridPullMonitorHandler.java
+++ b/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/handlers/GridPullMonitorHandler.java
@@ -99,7 +99,7 @@ public class GridPullMonitorHandler extends ThreadedHandler implements Watcher{
             } catch (InterruptedException e) {
                 e.printStackTrace();
             }
-            CommonUtils.addMonitortoQueue(hpcPullMonitor.getQueue(), monitorID);
+            CommonUtils.addMonitortoQueue(hpcPullMonitor.getQueue(), monitorID, jobExecutionContext);
             CommonUtils.increaseZkJobCount(monitorID); // update change job count to zookeeper
         } catch (AiravataMonitorException e) {
             logger.errorId(monitorID.getJobID(), "Error adding job {} monitorID object to the queue with experiment {}",

http://git-wip-us.apache.org/repos/asf/airavata/blob/83ecde91/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/HPCPullMonitor.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/HPCPullMonitor.java b/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/HPCPullMonitor.java
index d3c3df8..97825a4 100644
--- a/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/HPCPullMonitor.java
+++ b/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/HPCPullMonitor.java
@@ -38,6 +38,7 @@ import org.apache.airavata.gfac.monitor.impl.push.amqp.SimpleJobFinishConsumer;
 import org.apache.airavata.gfac.monitor.util.CommonUtils;
 import org.apache.airavata.gsi.ssh.api.SSHApiException;
 import org.apache.airavata.gsi.ssh.api.authentication.AuthenticationInfo;
+import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol;
 import org.apache.airavata.model.messaging.event.JobIdentifier;
 import org.apache.airavata.model.messaging.event.JobStatusChangeRequestEvent;
 import org.apache.airavata.model.workspace.experiment.JobState;
@@ -160,21 +161,19 @@ public class HPCPullMonitor extends PullMonitor {
             Map<String,MonitorID> completedJobs = new HashMap<String,MonitorID>();
             List<HostMonitorData> hostMonitorData = take.getHostMonitorData();
             for (HostMonitorData iHostMonitorData : hostMonitorData) {
-                if (iHostMonitorData.getHost().getType() instanceof GsisshHostType
-                        || iHostMonitorData.getHost().getType() instanceof SSHHostType) {
-                    currentHostDescription = iHostMonitorData.getHost();
-                    String hostName =  iHostMonitorData.getHost().getType().getHostAddress();
+                if (iHostMonitorData.getJobSubmissionProtocol() == JobSubmissionProtocol.SSH) {
+                    String hostName = iHostMonitorData.getComputeResourceDescription().getHostName();
                     ResourceConnection connection = null;
                     if (connections.containsKey(hostName)) {
-                        if(!connections.get(hostName).isConnected()){
-                            connection = new ResourceConnection(iHostMonitorData,getAuthenticationInfo());
+                        if (!connections.get(hostName).isConnected()) {
+                            connection = new ResourceConnection(iHostMonitorData, getAuthenticationInfo());
                             connections.put(hostName, connection);
-                        }else{
+                        } else {
                             logger.debug("We already have this connection so not going to create one");
                             connection = connections.get(hostName);
                         }
                     } else {
-                        connection = new ResourceConnection(iHostMonitorData,getAuthenticationInfo());
+                        connection = new ResourceConnection(iHostMonitorData, getAuthenticationInfo());
                         connections.put(hostName, connection);
                     }
 
@@ -182,8 +181,8 @@ public class HPCPullMonitor extends PullMonitor {
                     List<MonitorID> monitorID = iHostMonitorData.getMonitorIDs();
                     Iterator<String> iterator1 = cancelJobList.iterator();
 
-                    for(MonitorID iMonitorID:monitorID){
-                        while(iterator1.hasNext()) {
+                    for (MonitorID iMonitorID : monitorID) {
+                        while (iterator1.hasNext()) {
                             String cancelMId = iterator1.next();
                             if (cancelMId.equals(iMonitorID.getExperimentID() + "+" + iMonitorID.getTaskID())) {
                                 iMonitorID.setStatus(JobState.CANCELED);
@@ -202,7 +201,7 @@ public class HPCPullMonitor extends PullMonitor {
                         for (MonitorID iMonitorID : monitorID) {
                             String completeId = null;
                             while (iterator.hasNext()) {
-                                 completeId = iterator.next();
+                                completeId = iterator.next();
                                 if (completeId.equals(iMonitorID.getUserName() + "," + iMonitorID.getJobName())) {
                                     logger.info("This job is finished because push notification came with <username,jobName> " + completeId);
                                     completedJobs.put(iMonitorID.getJobName(), iMonitorID);
@@ -222,21 +221,20 @@ public class HPCPullMonitor extends PullMonitor {
                     while (iterator.hasNext()) {
                         MonitorID iMonitorID = iterator.next();
                         currentMonitorID = iMonitorID;
-                        if (!JobState.CANCELED.equals(iMonitorID.getStatus())&&
+                        if (!JobState.CANCELED.equals(iMonitorID.getStatus()) &&
                                 !JobState.COMPLETE.equals(iMonitorID.getStatus())) {
                             iMonitorID.setStatus(jobStatuses.get(iMonitorID.getJobID() + "," + iMonitorID.getJobName()));    //IMPORTANT this is NOT a simple setter we have a logic
-                        }else if(JobState.COMPLETE.equals(iMonitorID.getStatus())){
+                        } else if (JobState.COMPLETE.equals(iMonitorID.getStatus())) {
                             completedJobs.put(iMonitorID.getJobName(), iMonitorID);
                             logger.debugId(iMonitorID.getJobID(), "Moved job {} to completed jobs map, experiment {}, " +
                                     "task {}", iMonitorID.getJobID(), iMonitorID.getExperimentID(), iMonitorID.getTaskID());
                         }
                         jobStatus = new JobStatusChangeRequestEvent();
-                        iMonitorID.setStatus(jobStatuses.get(iMonitorID.getJobID()+","+iMonitorID.getJobName()));    //IMPORTANT this is not a simple setter we have a logic
+                        iMonitorID.setStatus(jobStatuses.get(iMonitorID.getJobID() + "," + iMonitorID.getJobName()));    //IMPORTANT this is not a simple setter we have a logic
 
                         if (iMonitorID.getFailedCount() > FAILED_COUNT) {
                             iMonitorID.setLastMonitored(new Timestamp((new Date()).getTime()));
-                            String outputDir = iMonitorID.getJobExecutionContext().getApplicationContext()
-                                    .getApplicationDeploymentDescription().getType().getOutputDataDirectory();
+                            String outputDir = iMonitorID.getJobExecutionContext().getOutputDir();
                             List<String> stdOut = null;
                             try {
                                 stdOut = connection.getCluster().listDirectory(outputDir); // check the outputs directory
@@ -280,8 +278,8 @@ public class HPCPullMonitor extends PullMonitor {
                         iMonitorID.setLastMonitored(new Timestamp((new Date()).getTime()));
                     }
                 } else {
-                    logger.debug("Qstat Monitor doesn't handle non-gsissh hosts , host {}", iHostMonitorData.getHost()
-                            .getType().getHostAddress());
+                    logger.debug("Qstat Monitor doesn't handle non-gsissh hosts , host {}", iHostMonitorData.
+                            getComputeResourceDescription().getHostName());
                 }
             }
             // We have finished all the HostMonitorData object in userMonitorData, now we need to put it back

http://git-wip-us.apache.org/repos/asf/airavata/blob/83ecde91/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/util/CommonUtils.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/util/CommonUtils.java b/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/util/CommonUtils.java
index 3abcf1d..a503154 100644
--- a/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/util/CommonUtils.java
+++ b/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/util/CommonUtils.java
@@ -34,6 +34,7 @@ import org.apache.airavata.gfac.core.monitor.MonitorID;
 import org.apache.airavata.gfac.monitor.HostMonitorData;
 import org.apache.airavata.gfac.monitor.UserMonitorData;
 import org.apache.airavata.gfac.monitor.exception.AiravataMonitorException;
+import org.apache.airavata.model.appcatalog.computeresource.ComputeResourceDescription;
 import org.apache.airavata.schemas.gfac.GsisshHostType;
 import org.apache.zookeeper.CreateMode;
 import org.apache.zookeeper.KeeperException;
@@ -79,11 +80,11 @@ public class CommonUtils {
         }
     }
     public static String getChannelID(MonitorID monitorID) {
-        return monitorID.getUserName() + "-" + monitorID.getHost().getType().getHostName();
+        return monitorID.getUserName() + "-" + monitorID.getComputeResourceDescription().getHostName();
     }
 
     public static String getRoutingKey(MonitorID monitorID) {
-        return "*." + monitorID.getUserName() + "." + monitorID.getHost().getType().getHostAddress();
+        return "*." + monitorID.getUserName() + "." + monitorID.getComputeResourceDescription().getIpAddresses().get(0);
     }
 
     public static String getChannelID(String userName,String hostAddress) {
@@ -94,7 +95,7 @@ public class CommonUtils {
         return "*." + userName + "." + hostAddress;
     }
 
-    public static void addMonitortoQueue(BlockingQueue<UserMonitorData> queue, MonitorID monitorID) throws AiravataMonitorException {
+    public static void addMonitortoQueue(BlockingQueue<UserMonitorData> queue, MonitorID monitorID, JobExecutionContext jobExecutionContext) throws AiravataMonitorException {
         synchronized (queue) {
             Iterator<UserMonitorData> iterator = queue.iterator();
             while (iterator.hasNext()) {
@@ -103,7 +104,7 @@ public class CommonUtils {
                     // then this is the right place to update
                     List<HostMonitorData> monitorIDs = next.getHostMonitorData();
                     for (HostMonitorData host : monitorIDs) {
-                        if (host.getHost().toXML().equals(monitorID.getHost().toXML())) {
+                        if (isEqual(host.getComputeResourceDescription(), monitorID.getComputeResourceDescription())) {
                             // ok we found right place to add this monitorID
                             host.addMonitorIDForHost(monitorID);
                             logger.debugId(monitorID.getJobID(), "Added new job to the monitoring queue, experiment {}," +
@@ -113,7 +114,7 @@ public class CommonUtils {
                     }
                     // there is a userMonitor object for this user name but no Hosts for this host
                     // so we have to create new Hosts
-                    HostMonitorData hostMonitorData = new HostMonitorData(monitorID.getHost());
+                    HostMonitorData hostMonitorData = new HostMonitorData(jobExecutionContext);
                     hostMonitorData.addMonitorIDForHost(monitorID);
                     next.addHostMonitorData(hostMonitorData);
                     logger.debugId(monitorID.getJobID(), "Added new job to the monitoring queue, experiment {}," +
@@ -121,7 +122,7 @@ public class CommonUtils {
                     return;
                 }
             }
-            HostMonitorData hostMonitorData = new HostMonitorData(monitorID.getHost());
+            HostMonitorData hostMonitorData = new HostMonitorData(jobExecutionContext);
             hostMonitorData.addMonitorIDForHost(monitorID);
 
             UserMonitorData userMonitorData = new UserMonitorData(monitorID.getUserName());
@@ -135,11 +136,18 @@ public class CommonUtils {
             }
         }
     }
+
+    private static boolean isEqual(ComputeResourceDescription comRes_1, ComputeResourceDescription comRes_2) {
+        return comRes_1.getComputeResourceId().equals(comRes_2.getComputeResourceId()) &&
+                comRes_1.getHostName().equals(comRes_2.getHostName());
+    }
+
     public static boolean isTheLastJobInQueue(BlockingQueue<MonitorID> queue,MonitorID monitorID){
         Iterator<MonitorID> iterator = queue.iterator();
         while(iterator.hasNext()){
             MonitorID next = iterator.next();
-            if(monitorID.getUserName().equals(next.getUserName()) && CommonUtils.isEqual(monitorID.getHost(), next.getHost())){
+            if (monitorID.getUserName().equals(next.getUserName()) &&
+                    CommonUtils.isEqual(monitorID.getComputeResourceDescription(), next.getComputeResourceDescription())) {
                 return false;
             }
         }
@@ -162,7 +170,7 @@ public class CommonUtils {
                     Iterator<HostMonitorData> iterator1 = hostMonitorData.iterator();
                     while (iterator1.hasNext()) {
                         HostMonitorData iHostMonitorID = iterator1.next();
-                        if (iHostMonitorID.getHost().toXML().equals(monitorID.getHost().toXML())) {
+                        if (isEqual(iHostMonitorID.getComputeResourceDescription(), monitorID.getComputeResourceDescription())) {
                             Iterator<MonitorID> iterator2 = iHostMonitorID.getMonitorIDs().iterator();
                             while (iterator2.hasNext()) {
                                 MonitorID iMonitorID = iterator2.next();
@@ -172,11 +180,10 @@ public class CommonUtils {
                                     // could be different, thats why we check the jobID
                                     iterator2.remove();
                                     logger.infoId(monitorID.getJobID(), "Removed the jobId: {} JobName: {} from monitoring last " +
-                                            "status:{}", monitorID.getJobID(),monitorID.getJobName(), monitorID.getStatus().toString());
+                                            "status:{}", monitorID.getJobID(), monitorID.getJobName(), monitorID.getStatus().toString());
                                     if (iHostMonitorID.getMonitorIDs().size() == 0) {
                                         iterator1.remove();
-                                        logger.debug("Removed host {} from monitoring queue", iHostMonitorID.getHost()
-                                                .getType().getHostAddress());
+                                        logger.debug("Removed host {} from monitoring queue", iHostMonitorID.getComputeResourceDescription().getHostName());
                                         if (hostMonitorData.size() == 0) {
                                             // no useful data so we have to remove the element from the queue
                                             queue.remove(next);
@@ -330,7 +337,7 @@ public class CommonUtils {
      */
     public static String getJobCountUpdatePath(MonitorID monitorID){
         return new StringBuilder("/").append(Constants.STAT).append("/").append(monitorID.getUserName())
-                .append("/").append(monitorID.getHost().getType().getHostAddress()).append("/").append(Constants.JOB).toString();
+                .append("/").append(monitorID.getComputeResourceDescription().getHostName()).append("/").append(Constants.JOB).toString();
     }
 
     /**

http://git-wip-us.apache.org/repos/asf/airavata/blob/83ecde91/modules/gfac/gfac-monitor/src/test/java/org/apache/airavata/job/QstatMonitorTestWithMyProxyAuth.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-monitor/src/test/java/org/apache/airavata/job/QstatMonitorTestWithMyProxyAuth.java b/modules/gfac/gfac-monitor/src/test/java/org/apache/airavata/job/QstatMonitorTestWithMyProxyAuth.java
index 537d8bb..610934e 100644
--- a/modules/gfac/gfac-monitor/src/test/java/org/apache/airavata/job/QstatMonitorTestWithMyProxyAuth.java
+++ b/modules/gfac/gfac-monitor/src/test/java/org/apache/airavata/job/QstatMonitorTestWithMyProxyAuth.java
@@ -1,172 +1,172 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.job;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.concurrent.BlockingQueue;
-import java.util.concurrent.LinkedBlockingQueue;
-
-import org.apache.airavata.common.utils.MonitorPublisher;
-import org.apache.airavata.commons.gfac.type.HostDescription;
-import org.apache.airavata.gfac.core.monitor.MonitorID;
-import org.apache.airavata.gfac.monitor.HPCMonitorID;
-import org.apache.airavata.gfac.monitor.UserMonitorData;
-import org.apache.airavata.gfac.monitor.impl.pull.qstat.HPCPullMonitor;
-import org.apache.airavata.gsi.ssh.api.Cluster;
-import org.apache.airavata.gsi.ssh.api.SSHApiException;
-import org.apache.airavata.gsi.ssh.api.ServerInfo;
-import org.apache.airavata.gsi.ssh.api.authentication.GSIAuthenticationInfo;
-import org.apache.airavata.gsi.ssh.api.job.JobDescriptor;
-import org.apache.airavata.gsi.ssh.impl.PBSCluster;
-import org.apache.airavata.gsi.ssh.impl.authentication.MyProxyAuthenticationInfo;
-import org.apache.airavata.gsi.ssh.util.CommonUtils;
-import org.apache.airavata.model.messaging.event.JobStatusChangeEvent;
-import org.apache.airavata.schemas.gfac.GsisshHostType;
-import org.junit.Assert;
-import org.testng.annotations.Test;
-
-import com.google.common.eventbus.EventBus;
-import com.google.common.eventbus.Subscribe;
-
-public class QstatMonitorTestWithMyProxyAuth {
-    private String myProxyUserName;
-    private String myProxyPassword;
-    private String certificateLocation;
-    private String pbsFilePath;
-    private String workingDirectory;
-    private HostDescription hostDescription;
-    private MonitorPublisher monitorPublisher;
-    private BlockingQueue<UserMonitorData> pullQueue;
-    private Thread monitorThread;
-
-    @org.testng.annotations.BeforeClass
-    public void setUp() throws Exception {
-//        System.setProperty("myproxy.username", "ogce");
-//        System.setProperty("myproxy.password", "");
-//        System.setProperty("basedir", "/Users/lahirugunathilake/work/airavata/sandbox/gsissh");
-//        System.setProperty("gsi.working.directory", "/home/ogce");
-//        System.setProperty("trusted.cert.location", "/Users/lahirugunathilake/Downloads/certificates");
-        myProxyUserName = System.getProperty("myproxy.username");
-        myProxyPassword = System.getProperty("myproxy.password");
-        workingDirectory = System.getProperty("gsi.working.directory");
-        certificateLocation = System.getProperty("trusted.cert.location");
-        if (myProxyUserName == null || myProxyPassword == null || workingDirectory == null) {
-            System.out.println(">>>>>> Please run tests with my proxy user name and password. " +
-                    "E.g :- mvn clean install -Dmyproxy.username=xxx -Dmyproxy.password=xxx -Dgsi.working.directory=/path<<<<<<<");
-            throw new Exception("Need my proxy user name password to run tests.");
-        }
-
-        monitorPublisher =  new MonitorPublisher(new EventBus());
-        class InnerClassQstat {
-
-            @Subscribe
-            private void getStatus(JobStatusChangeEvent status) {
-                Assert.assertNotNull(status);
-                System.out.println(status.getState().toString());
-                monitorThread.interrupt();
-            }
-        }
-        monitorPublisher.registerListener(this);
-        pullQueue = new LinkedBlockingQueue<UserMonitorData>();
-        final HPCPullMonitor qstatMonitor = new
-                HPCPullMonitor(pullQueue, monitorPublisher);
-        try {
-            (new Thread(){
-                public void run(){
-                    qstatMonitor.run();
-                }
-            }).start();
-        } catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        hostDescription = new HostDescription(GsisshHostType.type);
-        hostDescription.getType().setHostAddress("trestles.sdsc.edu");
-        hostDescription.getType().setHostName("gsissh-gordon");
-        ((GsisshHostType) hostDescription.getType()).setPort(22);
-        ((GsisshHostType)hostDescription.getType()).setInstalledPath("/opt/torque/bin/");
-    }
-
-    @Test
-    public void testQstatMonitor() throws SSHApiException {
-        /* now have to submit a job to some machine and add that job to the queue */
-        //Create authentication
-        GSIAuthenticationInfo authenticationInfo
-                = new MyProxyAuthenticationInfo(myProxyUserName, myProxyPassword, "myproxy.teragrid.org",
-                7512, 17280000, certificateLocation);
-
-        // Server info
-        ServerInfo serverInfo = new ServerInfo("ogce", hostDescription.getType().getHostAddress());
-
-
-        Cluster pbsCluster = new PBSCluster(serverInfo, authenticationInfo, CommonUtils.getPBSJobManager("/opt/torque/bin/"));
-
-
-        // Execute command
-        System.out.println("Target PBS file path: " + workingDirectory);
-        // constructing the job object
-        JobDescriptor jobDescriptor = new JobDescriptor();
-        jobDescriptor.setWorkingDirectory(workingDirectory);
-        jobDescriptor.setShellName("/bin/bash");
-        jobDescriptor.setJobName("GSI_SSH_SLEEP_JOB");
-        jobDescriptor.setExecutablePath("/bin/echo");
-        jobDescriptor.setAllEnvExport(true);
-        jobDescriptor.setMailOptions("n");
-        jobDescriptor.setStandardOutFile(workingDirectory + File.separator + "application.out");
-        jobDescriptor.setStandardErrorFile(workingDirectory + File.separator + "application.err");
-        jobDescriptor.setNodes(1);
-        jobDescriptor.setProcessesPerNode(1);
-        jobDescriptor.setQueueName("normal");
-        jobDescriptor.setMaxWallTime("60");
-        jobDescriptor.setAcountString("sds128");
-        List<String> inputs = new ArrayList<String>();
-        jobDescriptor.setOwner("ogce");
-        inputs.add("Hello World");
-        jobDescriptor.setInputValues(inputs);
-        //finished construction of job object
-        System.out.println(jobDescriptor.toXML());
-        for (int i = 0; i < 1; i++) {
-            String jobID = pbsCluster.submitBatchJob(jobDescriptor);
-            System.out.println("Job submitted successfully, Job ID: " +  jobID);
-            MonitorID monitorID = new HPCMonitorID(hostDescription, jobID,null,null,null, "ogce","");
-            ((HPCMonitorID)monitorID).setAuthenticationInfo(authenticationInfo);
-            try {
-                org.apache.airavata.gfac.monitor.util.CommonUtils.addMonitortoQueue(pullQueue, monitorID);
-            } catch (Exception e) {
-                e.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
-            }
-        }
-        try {
-
-            monitorThread.join();
-        } catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    @Subscribe
-    public void testCaseShutDown(JobStatusChangeEvent status) {
-        Assert.assertNotNull(status.getState());
-        monitorThread.stop();
-    }
-}
+///*
+// *
+// * Licensed to the Apache Software Foundation (ASF) under one
+// * or more contributor license agreements.  See the NOTICE file
+// * distributed with this work for additional information
+// * regarding copyright ownership.  The ASF licenses this file
+// * to you under the Apache License, Version 2.0 (the
+// * "License"); you may not use this file except in compliance
+// * with the License.  You may obtain a copy of the License at
+// *
+// *   http://www.apache.org/licenses/LICENSE-2.0
+// *
+// * Unless required by applicable law or agreed to in writing,
+// * software distributed under the License is distributed on an
+// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// * KIND, either express or implied.  See the License for the
+// * specific language governing permissions and limitations
+// * under the License.
+// *
+//*/
+//package org.apache.airavata.job;
+//
+//import java.io.File;
+//import java.util.ArrayList;
+//import java.util.List;
+//import java.util.concurrent.BlockingQueue;
+//import java.util.concurrent.LinkedBlockingQueue;
+//
+//import org.apache.airavata.common.utils.MonitorPublisher;
+//import org.apache.airavata.commons.gfac.type.HostDescription;
+//import org.apache.airavata.gfac.core.monitor.MonitorID;
+//import org.apache.airavata.gfac.monitor.HPCMonitorID;
+//import org.apache.airavata.gfac.monitor.UserMonitorData;
+//import org.apache.airavata.gfac.monitor.impl.pull.qstat.HPCPullMonitor;
+//import org.apache.airavata.gsi.ssh.api.Cluster;
+//import org.apache.airavata.gsi.ssh.api.SSHApiException;
+//import org.apache.airavata.gsi.ssh.api.ServerInfo;
+//import org.apache.airavata.gsi.ssh.api.authentication.GSIAuthenticationInfo;
+//import org.apache.airavata.gsi.ssh.api.job.JobDescriptor;
+//import org.apache.airavata.gsi.ssh.impl.PBSCluster;
+//import org.apache.airavata.gsi.ssh.impl.authentication.MyProxyAuthenticationInfo;
+//import org.apache.airavata.gsi.ssh.util.CommonUtils;
+//import org.apache.airavata.model.messaging.event.JobStatusChangeEvent;
+//import org.apache.airavata.schemas.gfac.GsisshHostType;
+//import org.junit.Assert;
+//import org.testng.annotations.Test;
+//
+//import com.google.common.eventbus.EventBus;
+//import com.google.common.eventbus.Subscribe;
+//
+//public class QstatMonitorTestWithMyProxyAuth {
+//    private String myProxyUserName;
+//    private String myProxyPassword;
+//    private String certificateLocation;
+//    private String pbsFilePath;
+//    private String workingDirectory;
+//    private HostDescription hostDescription;
+//    private MonitorPublisher monitorPublisher;
+//    private BlockingQueue<UserMonitorData> pullQueue;
+//    private Thread monitorThread;
+//
+//    @org.testng.annotations.BeforeClass
+//    public void setUp() throws Exception {
+////        System.setProperty("myproxy.username", "ogce");
+////        System.setProperty("myproxy.password", "");
+////        System.setProperty("basedir", "/Users/lahirugunathilake/work/airavata/sandbox/gsissh");
+////        System.setProperty("gsi.working.directory", "/home/ogce");
+////        System.setProperty("trusted.cert.location", "/Users/lahirugunathilake/Downloads/certificates");
+//        myProxyUserName = System.getProperty("myproxy.username");
+//        myProxyPassword = System.getProperty("myproxy.password");
+//        workingDirectory = System.getProperty("gsi.working.directory");
+//        certificateLocation = System.getProperty("trusted.cert.location");
+//        if (myProxyUserName == null || myProxyPassword == null || workingDirectory == null) {
+//            System.out.println(">>>>>> Please run tests with my proxy user name and password. " +
+//                    "E.g :- mvn clean install -Dmyproxy.username=xxx -Dmyproxy.password=xxx -Dgsi.working.directory=/path<<<<<<<");
+//            throw new Exception("Need my proxy user name password to run tests.");
+//        }
+//
+//        monitorPublisher =  new MonitorPublisher(new EventBus());
+//        class InnerClassQstat {
+//
+//            @Subscribe
+//            private void getStatus(JobStatusChangeEvent status) {
+//                Assert.assertNotNull(status);
+//                System.out.println(status.getState().toString());
+//                monitorThread.interrupt();
+//            }
+//        }
+//        monitorPublisher.registerListener(this);
+//        pullQueue = new LinkedBlockingQueue<UserMonitorData>();
+//        final HPCPullMonitor qstatMonitor = new
+//                HPCPullMonitor(pullQueue, monitorPublisher);
+//        try {
+//            (new Thread(){
+//                public void run(){
+//                    qstatMonitor.run();
+//                }
+//            }).start();
+//        } catch (Exception e) {
+//            e.printStackTrace();
+//        }
+//
+//        hostDescription = new HostDescription(GsisshHostType.type);
+//        hostDescription.getType().setHostAddress("trestles.sdsc.edu");
+//        hostDescription.getType().setHostName("gsissh-gordon");
+//        ((GsisshHostType) hostDescription.getType()).setPort(22);
+//        ((GsisshHostType)hostDescription.getType()).setInstalledPath("/opt/torque/bin/");
+//    }
+//
+//    @Test
+//    public void testQstatMonitor() throws SSHApiException {
+//        /* now have to submit a job to some machine and add that job to the queue */
+//        //Create authentication
+//        GSIAuthenticationInfo authenticationInfo
+//                = new MyProxyAuthenticationInfo(myProxyUserName, myProxyPassword, "myproxy.teragrid.org",
+//                7512, 17280000, certificateLocation);
+//
+//        // Server info
+//        ServerInfo serverInfo = new ServerInfo("ogce", hostDescription.getType().getHostAddress());
+//
+//
+//        Cluster pbsCluster = new PBSCluster(serverInfo, authenticationInfo, CommonUtils.getPBSJobManager("/opt/torque/bin/"));
+//
+//
+//        // Execute command
+//        System.out.println("Target PBS file path: " + workingDirectory);
+//        // constructing the job object
+//        JobDescriptor jobDescriptor = new JobDescriptor();
+//        jobDescriptor.setWorkingDirectory(workingDirectory);
+//        jobDescriptor.setShellName("/bin/bash");
+//        jobDescriptor.setJobName("GSI_SSH_SLEEP_JOB");
+//        jobDescriptor.setExecutablePath("/bin/echo");
+//        jobDescriptor.setAllEnvExport(true);
+//        jobDescriptor.setMailOptions("n");
+//        jobDescriptor.setStandardOutFile(workingDirectory + File.separator + "application.out");
+//        jobDescriptor.setStandardErrorFile(workingDirectory + File.separator + "application.err");
+//        jobDescriptor.setNodes(1);
+//        jobDescriptor.setProcessesPerNode(1);
+//        jobDescriptor.setQueueName("normal");
+//        jobDescriptor.setMaxWallTime("60");
+//        jobDescriptor.setAcountString("sds128");
+//        List<String> inputs = new ArrayList<String>();
+//        jobDescriptor.setOwner("ogce");
+//        inputs.add("Hello World");
+//        jobDescriptor.setInputValues(inputs);
+//        //finished construction of job object
+//        System.out.println(jobDescriptor.toXML());
+//        for (int i = 0; i < 1; i++) {
+//            String jobID = pbsCluster.submitBatchJob(jobDescriptor);
+//            System.out.println("Job submitted successfully, Job ID: " +  jobID);
+//            MonitorID monitorID = new HPCMonitorID(hostDescription, jobID,null,null,null, "ogce","");
+//            ((HPCMonitorID)monitorID).setAuthenticationInfo(authenticationInfo);
+//            try {
+//                org.apache.airavata.gfac.monitor.util.CommonUtils.addMonitortoQueue(pullQueue, monitorID, jobExecutionContext);
+//            } catch (Exception e) {
+//                e.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
+//            }
+//        }
+//        try {
+//
+//            monitorThread.join();
+//        } catch (Exception e) {
+//            e.printStackTrace();
+//        }
+//    }
+//
+//    @Subscribe
+//    public void testCaseShutDown(JobStatusChangeEvent status) {
+//        Assert.assertNotNull(status.getState());
+//        monitorThread.stop();
+//    }
+//}


[25/50] [abbrv] airavata git commit: Merging changes

Posted by ch...@apache.org.
Merging changes


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/755273e1
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/755273e1
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/755273e1

Branch: refs/heads/master
Commit: 755273e1ad2a91e4aa64b41d1a52c25fff78fc5f
Parents: 7b8d984
Author: chathuriw <ka...@gmail.com>
Authored: Wed Nov 5 13:29:04 2014 -0500
Committer: Chathuri Wimalasena <ka...@gmail.com>
Committed: Wed Nov 5 13:29:04 2014 -0500

----------------------------------------------------------------------
 .../airavata/gfac/monitor/impl/pull/qstat/HPCPullMonitor.java     | 3 ---
 1 file changed, 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/755273e1/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/HPCPullMonitor.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/HPCPullMonitor.java b/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/HPCPullMonitor.java
index 643263f..331663f 100644
--- a/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/HPCPullMonitor.java
+++ b/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/HPCPullMonitor.java
@@ -41,9 +41,6 @@ import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtoco
 import org.apache.airavata.model.messaging.event.JobIdentifier;
 import org.apache.airavata.model.messaging.event.JobStatusChangeRequestEvent;
 import org.apache.airavata.model.workspace.experiment.JobState;
-import org.apache.airavata.schemas.gfac.GsisshHostType;
-import org.apache.airavata.schemas.gfac.SSHHostType;
-import org.apache.zookeeper.ZooKeeper;
 
 import java.sql.Timestamp;
 import java.util.*;


[26/50] [abbrv] airavata git commit: fixing build issues in branch

Posted by ch...@apache.org.
fixing build issues in branch


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/ce1354e9
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/ce1354e9
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/ce1354e9

Branch: refs/heads/master
Commit: ce1354e96d1f3d2eedc851ca87e00207ca9f56aa
Parents: 755273e
Author: chathuriw <ka...@gmail.com>
Authored: Wed Nov 5 14:39:05 2014 -0500
Committer: Chathuri Wimalasena <ka...@gmail.com>
Committed: Wed Nov 5 14:39:05 2014 -0500

----------------------------------------------------------------------
 .../airavata/client/tools/RegisterSampleApplicationsUtils.java | 4 ++--
 .../org/apache/airavata/client/tools/DocumentCreatorUtils.java | 4 ++--
 .../apache/airavata/gfac/core/context/JobExecutionContext.java | 2 +-
 modules/gfac/pom.xml                                           | 6 +++---
 .../airavata/integration/tools/DocumentCreatorUtils.java       | 4 ++--
 5 files changed, 10 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/ce1354e9/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/tools/RegisterSampleApplicationsUtils.java
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/tools/RegisterSampleApplicationsUtils.java b/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/tools/RegisterSampleApplicationsUtils.java
index 57bcc27..24c0688 100644
--- a/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/tools/RegisterSampleApplicationsUtils.java
+++ b/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/tools/RegisterSampleApplicationsUtils.java
@@ -48,8 +48,8 @@ public class RegisterSampleApplicationsUtils {
         computeResourcePreference.setOverridebyAiravata(overridebyAiravata);
         computeResourcePreference.setAllocationProjectNumber(allocationProjectNumber);
         computeResourcePreference.setPreferredBatchQueue(preferredBatchQueue);
-        computeResourcePreference.setPreferredDataMovementProtocol(preferredDataMovementProtocol);
-        computeResourcePreference.setPreferredJobSubmissionProtocol(preferredJobSubmissionProtocol);
+        computeResourcePreference.setPreferredDataMovementProtocol(DataMovementProtocol.valueOf(preferredDataMovementProtocol));
+        computeResourcePreference.setPreferredJobSubmissionProtocol(JobSubmissionProtocol.valueOf(preferredJobSubmissionProtocol));
         computeResourcePreference.setScratchLocation(scratchLocation);
         return computeResourcePreference;
     }

http://git-wip-us.apache.org/repos/asf/airavata/blob/ce1354e9/modules/airavata-client/src/main/java/org/apache/airavata/client/tools/DocumentCreatorUtils.java
----------------------------------------------------------------------
diff --git a/modules/airavata-client/src/main/java/org/apache/airavata/client/tools/DocumentCreatorUtils.java b/modules/airavata-client/src/main/java/org/apache/airavata/client/tools/DocumentCreatorUtils.java
index e6fcb7d..d27d8cf 100644
--- a/modules/airavata-client/src/main/java/org/apache/airavata/client/tools/DocumentCreatorUtils.java
+++ b/modules/airavata-client/src/main/java/org/apache/airavata/client/tools/DocumentCreatorUtils.java
@@ -56,8 +56,8 @@ public class DocumentCreatorUtils {
 		computeResourcePreference.setOverridebyAiravata(overridebyAiravata);
 		computeResourcePreference.setAllocationProjectNumber(allocationProjectNumber);
 		computeResourcePreference.setPreferredBatchQueue(preferredBatchQueue);
-		computeResourcePreference.setPreferredDataMovementProtocol(preferredDataMovementProtocol);
-		computeResourcePreference.setPreferredJobSubmissionProtocol(preferredJobSubmissionProtocol);
+		computeResourcePreference.setPreferredDataMovementProtocol(DataMovementProtocol.valueOf(preferredDataMovementProtocol));
+		computeResourcePreference.setPreferredJobSubmissionProtocol(JobSubmissionProtocol.valueOf(preferredJobSubmissionProtocol));
 		computeResourcePreference.setScratchLocation(scratchLocation);
 		return computeResourcePreference;
 	}

http://git-wip-us.apache.org/repos/asf/airavata/blob/ce1354e9/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
index 30142f8..a9d1bb4 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
@@ -279,7 +279,7 @@ public class JobExecutionContext extends AbstractContext implements Serializable
     }
 
 	public SecurityContext getSecurityContext(String name) throws GFacException{
-		SecurityContext secContext = securityContext.get(name+"-"+this.getApplicationContext().getHostDescription().getType().getHostAddress());
+		SecurityContext secContext = securityContext.get(name+"-"+this.getHostName());
 		return secContext;
 	}
 

http://git-wip-us.apache.org/repos/asf/airavata/blob/ce1354e9/modules/gfac/pom.xml
----------------------------------------------------------------------
diff --git a/modules/gfac/pom.xml b/modules/gfac/pom.xml
index ed3e27d..dd38b6f 100644
--- a/modules/gfac/pom.xml
+++ b/modules/gfac/pom.xml
@@ -35,10 +35,10 @@
                 <module>gfac-ec2</module>
                 <module>gfac-ssh</module>
                 <module>gfac-local</module>
-                <module>gfac-hadoop</module>
-                <module>gfac-gram</module>
+                <!--<module>gfac-hadoop</module>-->
+                <!--<module>gfac-gram</module>-->
                 <module>gfac-gsissh</module>
-                <module>gfac-bes</module>
+                <!--<module>gfac-bes</module>-->
                 <module>gfac-monitor</module>
                 <module>airavata-gfac-service</module>
             </modules>

http://git-wip-us.apache.org/repos/asf/airavata/blob/ce1354e9/modules/integration-tests/src/test/java/org/apache/airavata/integration/tools/DocumentCreatorUtils.java
----------------------------------------------------------------------
diff --git a/modules/integration-tests/src/test/java/org/apache/airavata/integration/tools/DocumentCreatorUtils.java b/modules/integration-tests/src/test/java/org/apache/airavata/integration/tools/DocumentCreatorUtils.java
index 3cfff87..2fc3ac2 100644
--- a/modules/integration-tests/src/test/java/org/apache/airavata/integration/tools/DocumentCreatorUtils.java
+++ b/modules/integration-tests/src/test/java/org/apache/airavata/integration/tools/DocumentCreatorUtils.java
@@ -52,8 +52,8 @@ public class DocumentCreatorUtils {
 		computeResourcePreference.setOverridebyAiravata(overridebyAiravata);
 		computeResourcePreference.setAllocationProjectNumber(allocationProjectNumber);
 		computeResourcePreference.setPreferredBatchQueue(preferredBatchQueue);
-		computeResourcePreference.setPreferredDataMovementProtocol(preferredDataMovementProtocol);
-		computeResourcePreference.setPreferredJobSubmissionProtocol(preferredJobSubmissionProtocol);
+		computeResourcePreference.setPreferredDataMovementProtocol(DataMovementProtocol.valueOf(preferredDataMovementProtocol));
+		computeResourcePreference.setPreferredJobSubmissionProtocol(JobSubmissionProtocol.valueOf(preferredJobSubmissionProtocol));
 		computeResourcePreference.setScratchLocation(scratchLocation);
 		return computeResourcePreference;
 	}


[45/50] [abbrv] airavata git commit: adding missing job parameters at createJobDescriptor

Posted by ch...@apache.org.
adding missing job parameters at createJobDescriptor


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/62f00363
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/62f00363
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/62f00363

Branch: refs/heads/master
Commit: 62f003631e1493dfaacff2357c96e90088d5187d
Parents: e541329
Author: chathuriw <ka...@gmail.com>
Authored: Fri Nov 7 13:55:55 2014 -0500
Committer: Chathuri Wimalasena <ka...@gmail.com>
Committed: Fri Nov 7 13:55:55 2014 -0500

----------------------------------------------------------------------
 .../client/samples/CreateLaunchExperiment.java  | 18 ++++----
 .../gfac/gsissh/util/GFACGSISSHUtils.java       | 45 ++++++++++++++++++++
 2 files changed, 54 insertions(+), 9 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/62f00363/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperiment.java
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperiment.java b/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperiment.java
index d6c0f80..d437c4a 100644
--- a/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperiment.java
+++ b/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperiment.java
@@ -56,9 +56,9 @@ public class CreateLaunchExperiment {
     private static final String DEFAULT_GATEWAY = "default.registry.gateway";
     private static Airavata.Client airavataClient;
 
-    private static String echoAppId = "Echo_37c26231-8784-4a40-b184-ae00f6330113";
-    private static String wrfAppId = "WRF_5f097c9c-7066-49ec-aed7-4e39607b3adc";
-    private static String amberAppId = "Amber_89906be6-5678-49a6-9d04-a0604fbdef2e";
+    private static String echoAppId = "Echo_647ba0c5-64ef-4efe-9786-2d28f4d5acc9";
+    private static String wrfAppId = "WRF_a458df70-6808-4d5d-ae32-c49082f2a6cc";
+    private static String amberAppId = "Amber_1b99f73b-a88d-44e3-b04e-4f56ba95ed6f";
 
     private static String localHost = "localhost";
     private static String trestlesHostName = "trestles.sdsc.xsede.org";
@@ -178,7 +178,7 @@ public class CreateLaunchExperiment {
                 for (String id : computeResources.keySet()) {
                     String resourceName = computeResources.get(id);
                     if (resourceName.equals(trestlesHostName)) {
-                        ComputationalResourceScheduling scheduling = ExperimentModelUtil.createComputationResourceScheduling(id, 1, 1, 1, "normal", 1, 0, 1, "sds128");
+                        ComputationalResourceScheduling scheduling = ExperimentModelUtil.createComputationResourceScheduling(id, 1, 1, 1, "normal", 30, 0, 1, "sds128");
                         UserConfigurationData userConfigurationData = new UserConfigurationData();
                         userConfigurationData.setAiravataAutoSchedule(false);
                         userConfigurationData.setOverrideManualScheduledParams(false);
@@ -234,7 +234,7 @@ public class CreateLaunchExperiment {
                 for (String id : computeResources.keySet()) {
                     String resourceName = computeResources.get(id);
                     if (resourceName.equals(unicoreHostName)) {
-                        ComputationalResourceScheduling scheduling = ExperimentModelUtil.createComputationResourceScheduling(id, 1, 1, 1, "normal", 1, 0, 1, "sds128");
+                        ComputationalResourceScheduling scheduling = ExperimentModelUtil.createComputationResourceScheduling(id, 1, 1, 1, "normal", 30, 0, 1, "sds128");
                         UserConfigurationData userConfigurationData = new UserConfigurationData();
                         userConfigurationData.setAiravataAutoSchedule(false);
                         userConfigurationData.setOverrideManualScheduledParams(false);
@@ -384,7 +384,7 @@ public class CreateLaunchExperiment {
                 for (String id : computeResources.keySet()) {
                     String resourceName = computeResources.get(id);
                     if (resourceName.equals(trestlesHostName)) {
-                        ComputationalResourceScheduling scheduling = ExperimentModelUtil.createComputationResourceScheduling(id, 1, 1, 1, "normal", 1, 0, 1, "sds128");
+                        ComputationalResourceScheduling scheduling = ExperimentModelUtil.createComputationResourceScheduling(id, 1, 1, 1, "normal", 30, 0, 1, "sds128");
                         UserConfigurationData userConfigurationData = new UserConfigurationData();
                         userConfigurationData.setAiravataAutoSchedule(false);
                         userConfigurationData.setOverrideManualScheduledParams(false);
@@ -460,7 +460,7 @@ public class CreateLaunchExperiment {
                 for (String id : computeResources.keySet()) {
                     String resourceName = computeResources.get(id);
                     if (resourceName.equals(localHost)) {
-                        ComputationalResourceScheduling scheduling = ExperimentModelUtil.createComputationResourceScheduling(id, 1, 1, 1, "normal", 1, 0, 1, "");
+                        ComputationalResourceScheduling scheduling = ExperimentModelUtil.createComputationResourceScheduling(id, 1, 1, 1, "normal", 30, 0, 1, "");
                         UserConfigurationData userConfigurationData = new UserConfigurationData();
                         userConfigurationData.setAiravataAutoSchedule(false);
                         userConfigurationData.setOverrideManualScheduledParams(false);
@@ -560,7 +560,7 @@ public class CreateLaunchExperiment {
                 for (String id : computeResources.keySet()) {
                     String resourceName = computeResources.get(id);
                     if (resourceName.equals(stampedeHostName)) {
-                        ComputationalResourceScheduling scheduling = ExperimentModelUtil.createComputationResourceScheduling(id, 1, 1, 1, "normal", 1, 0, 1, "TG-STA110014S");
+                        ComputationalResourceScheduling scheduling = ExperimentModelUtil.createComputationResourceScheduling(id, 1, 1, 1, "normal", 30, 0, 1, "TG-STA110014S");
                         UserConfigurationData userConfigurationData = new UserConfigurationData();
                         userConfigurationData.setAiravataAutoSchedule(false);
                         userConfigurationData.setOverrideManualScheduledParams(false);
@@ -684,7 +684,7 @@ public class CreateLaunchExperiment {
                 for (String id : computeResources.keySet()) {
                     String resourceName = computeResources.get(id);
                     if (resourceName.equals(br2HostName)) {
-                        ComputationalResourceScheduling scheduling = ExperimentModelUtil.createComputationResourceScheduling(id, 1, 1, 1, "normal", 1, 0, 1, null);
+                        ComputationalResourceScheduling scheduling = ExperimentModelUtil.createComputationResourceScheduling(id, 1, 1, 1, "normal", 30, 0, 1, null);
                         UserConfigurationData userConfigurationData = new UserConfigurationData();
                         userConfigurationData.setAiravataAutoSchedule(false);
                         userConfigurationData.setOverrideManualScheduledParams(false);

http://git-wip-us.apache.org/repos/asf/airavata/blob/62f00363/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/util/GFACGSISSHUtils.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/util/GFACGSISSHUtils.java b/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/util/GFACGSISSHUtils.java
index 2adc013..3c2e839 100644
--- a/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/util/GFACGSISSHUtils.java
+++ b/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/util/GFACGSISSHUtils.java
@@ -42,6 +42,7 @@ import org.apache.airavata.gsi.ssh.api.Cluster;
 import org.apache.airavata.gsi.ssh.api.ServerInfo;
 import org.apache.airavata.gsi.ssh.api.job.JobDescriptor;
 import org.apache.airavata.gsi.ssh.api.job.JobManagerConfiguration;
+import org.apache.airavata.gsi.ssh.impl.GSISSHAbstractCluster;
 import org.apache.airavata.gsi.ssh.impl.PBSCluster;
 import org.apache.airavata.gsi.ssh.util.CommonUtils;
 import org.apache.airavata.model.appcatalog.appdeployment.ApplicationDeploymentDescription;
@@ -50,6 +51,8 @@ import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionInterfa
 import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol;
 import org.apache.airavata.model.appcatalog.computeresource.SSHJobSubmission;
 import org.apache.airavata.model.appcatalog.computeresource.SecurityProtocol;
+import org.apache.airavata.model.workspace.experiment.ComputationalResourceScheduling;
+import org.apache.airavata.model.workspace.experiment.TaskDetails;
 import org.apache.airavata.schemas.gfac.FileArrayType;
 import org.apache.airavata.schemas.gfac.StringArrayType;
 import org.apache.airavata.schemas.gfac.URIArrayType;
@@ -168,6 +171,7 @@ public class GFACGSISSHUtils {
         JobDescriptor jobDescriptor = new JobDescriptor();
         ApplicationContext applicationContext = jobExecutionContext.getApplicationContext();
         ApplicationDeploymentDescription app = applicationContext.getApplicationDeploymentDescription();
+        TaskDetails taskData = jobExecutionContext.getTaskData();
         // this is common for any application descriptor
         jobDescriptor.setCallBackIp(ServerSettings.getIp());
         jobDescriptor.setCallBackPort(ServerSettings.getSetting(org.apache.airavata.common.utils.Constants.GFAC_SERVER_PORT, "8950"));
@@ -191,6 +195,47 @@ public class GFACGSISSHUtils {
         }
         jobDescriptor.setInputValues(inputValues);
 
+        jobDescriptor.setUserName(((GSISSHAbstractCluster) cluster).getServerInfo().getUserName());
+        jobDescriptor.setShellName("/bin/bash");
+        jobDescriptor.setAllEnvExport(true);
+        jobDescriptor.setMailOptions("n");
+        jobDescriptor.setOwner(((PBSCluster) cluster).getServerInfo().getUserName());
+
+        ComputationalResourceScheduling taskScheduling = taskData.getTaskScheduling();
+        if (taskScheduling != null) {
+            int totalNodeCount = taskScheduling.getNodeCount();
+            int totalCPUCount = taskScheduling.getTotalCPUCount();
+
+//        jobDescriptor.setJobSubmitter(applicationDeploymentType.getJobSubmitterCommand());
+            if (taskScheduling.getComputationalProjectAccount() != null) {
+                jobDescriptor.setAcountString(taskScheduling.getComputationalProjectAccount());
+            }
+            if (taskScheduling.getQueueName() != null) {
+                jobDescriptor.setQueueName(taskScheduling.getQueueName());
+            }
+
+            if (totalNodeCount > 0) {
+                jobDescriptor.setNodes(totalNodeCount);
+            }
+            if (taskScheduling.getComputationalProjectAccount() != null) {
+                jobDescriptor.setAcountString(taskScheduling.getComputationalProjectAccount());
+            }
+            if (taskScheduling.getQueueName() != null) {
+                jobDescriptor.setQueueName(taskScheduling.getQueueName());
+            }
+            if (totalCPUCount > 0) {
+                int ppn = totalCPUCount / totalNodeCount;
+                jobDescriptor.setProcessesPerNode(ppn);
+                jobDescriptor.setCPUCount(totalCPUCount);
+            }
+            if (taskScheduling.getWallTimeLimit() > 0) {
+                jobDescriptor.setMaxWallTime(String.valueOf(taskScheduling.getWallTimeLimit()));
+            }
+        } else {
+            logger.error("Task scheduling cannot be null at this point..");
+        }
+
+
         return jobDescriptor;
     }
 }


[17/50] [abbrv] airavata git commit: Ingegrated appCatalog thrift model with GSISSH input and output handlers and inprove job execution context

Posted by ch...@apache.org.
Ingegrated appCatalog thrift model with GSISSH input and output handlers and inprove job execution context


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/5a28f745
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/5a28f745
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/5a28f745

Branch: refs/heads/master
Commit: 5a28f745193e0cbab43dac26e018cc31ded1d26d
Parents: 3f953e0
Author: shamrath <sh...@gmail.com>
Authored: Fri Oct 31 17:41:22 2014 -0400
Committer: Chathuri Wimalasena <ka...@gmail.com>
Committed: Wed Nov 5 11:23:05 2014 -0500

----------------------------------------------------------------------
 .../model/workspace/experiment/JobDetails.java  | 11 ++-
 .../gfac/core/context/JobExecutionContext.java  | 27 +++++-
 .../airavata/gfac/core/cpi/BetterGfacImpl.java  | 91 ++++++++++++++++----
 .../handler/GSISSHDirectorySetupHandler.java    |  7 +-
 .../gfac/gsissh/handler/GSISSHInputHandler.java | 18 ++--
 .../gsissh/handler/GSISSHOutputHandler.java     | 53 +++---------
 .../airavata/gsi/ssh/api/job/JobDescriptor.java |  7 ++
 7 files changed, 143 insertions(+), 71 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/5a28f745/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/workspace/experiment/JobDetails.java
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/workspace/experiment/JobDetails.java b/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/workspace/experiment/JobDetails.java
index d1cbe5e..c1034a0 100644
--- a/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/workspace/experiment/JobDetails.java
+++ b/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/workspace/experiment/JobDetails.java
@@ -271,9 +271,14 @@ import org.slf4j.LoggerFactory;
     }
   }
 
-  public String getJobDescription() {
-    return this.jobDescription;
-  }
+    /**
+     * this method is deprecated after we introduce new thirft model with appcatalog
+     * @return
+     */
+    @Deprecated
+    public String getJobDescription() {
+        return this.jobDescription;
+    }
 
   public void setJobDescription(String jobDescription) {
     this.jobDescription = jobDescription;

http://git-wip-us.apache.org/repos/asf/airavata/blob/5a28f745/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
index cade06b..dcae96a 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
@@ -33,6 +33,7 @@ import org.apache.airavata.gfac.SecurityContext;
 import org.apache.airavata.gfac.core.cpi.GFac;
 import org.apache.airavata.gfac.core.notification.GFacNotifier;
 import org.apache.airavata.gfac.core.provider.GFacProvider;
+import org.apache.airavata.model.appcatalog.computeresource.DataMovementInterface;
 import org.apache.airavata.model.appcatalog.computeresource.DataMovementProtocol;
 import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionInterface;
 import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol;
@@ -100,12 +101,20 @@ public class JobExecutionContext extends AbstractContext implements Serializable
     private DataMovementProtocol preferredDataMovementProtocol;
     /**
      * List of job submission protocols sorted by priority order.
-      */
+     */
     private List<JobSubmissionInterface> hostPrioritizedJobSubmissionInterfaces;
     /**
      * use preferred job submission protocol.
      */
     private JobSubmissionInterface preferredJobSubmissionInterface;
+    /**
+     * List of job submission protocols sorted by priority order.
+     */
+    private List<DataMovementInterface> hostPrioritizedDataMovementInterfaces;
+    /**
+     * use preferred job submission protocol.
+     */
+    private DataMovementInterface preferredDataMovementInterface;
 
 //    private ContextHeaderDocument.ContextHeader contextHeader;
 
@@ -434,4 +443,20 @@ public class JobExecutionContext extends AbstractContext implements Serializable
     public String getHostName() {
         return applicationContext.getComputeResourceDescription().getHostName();
     }
+
+    public List<DataMovementInterface> getHostPrioritizedDataMovementInterfaces() {
+        return hostPrioritizedDataMovementInterfaces;
+    }
+
+    public void setHostPrioritizedDataMovementInterfaces(List<DataMovementInterface> hostPrioritizedDataMovementInterfaces) {
+        this.hostPrioritizedDataMovementInterfaces = hostPrioritizedDataMovementInterfaces;
+    }
+
+    public DataMovementInterface getPreferredDataMovementInterface() {
+        return preferredDataMovementInterface;
+    }
+
+    public void setPreferredDataMovementInterface(DataMovementInterface preferredDataMovementInterface) {
+        this.preferredDataMovementInterface = preferredDataMovementInterface;
+    }
 }

http://git-wip-us.apache.org/repos/asf/airavata/blob/5a28f745/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
index e8e4c66..656a291 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
@@ -52,8 +52,9 @@ import org.apache.airavata.messaging.core.PublisherFactory;
 import org.apache.airavata.model.appcatalog.appdeployment.ApplicationDeploymentDescription;
 import org.apache.airavata.model.appcatalog.appinterface.ApplicationInterfaceDescription;
 import org.apache.airavata.model.appcatalog.computeresource.ComputeResourceDescription;
+import org.apache.airavata.model.appcatalog.computeresource.DataMovementInterface;
+import org.apache.airavata.model.appcatalog.computeresource.FileSystems;
 import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionInterface;
-import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol;
 import org.apache.airavata.model.appcatalog.gatewayprofile.ComputeResourcePreference;
 import org.apache.airavata.model.messaging.event.*;
 import org.apache.airavata.model.workspace.experiment.*;
@@ -74,6 +75,7 @@ import java.util.ArrayList;
 import java.util.Collections;
 import java.util.Comparator;
 import java.util.List;
+import java.util.Map;
 import java.util.Properties;
 
 /**
@@ -303,6 +305,7 @@ public class BetterGfacImpl implements GFac,Watcher {
         jobExecutionContext.setZk(zk);
         jobExecutionContext.setCredentialStoreToken(AiravataZKUtils.getExpTokenId(zk, experimentID, taskID));
 
+        // handle job submission protocol
         List<JobSubmissionInterface> jobSubmissionInterfaces = computeResource.getJobSubmissionInterfaces();
         if (jobSubmissionInterfaces != null && !jobSubmissionInterfaces.isEmpty()){
             Collections.sort(jobSubmissionInterfaces, new Comparator<JobSubmissionInterface>() {
@@ -316,36 +319,92 @@ public class BetterGfacImpl implements GFac,Watcher {
         }else {
             throw new GFacException("Compute resource should have at least one job submission interface defined...");
         }
+        // handle data movement protocol
+        List<DataMovementInterface> dataMovementInterfaces = computeResource.getDataMovementInterfaces();
+        if (dataMovementInterfaces != null && !dataMovementInterfaces.isEmpty()) {
+            Collections.sort(dataMovementInterfaces, new Comparator<DataMovementInterface>() {
+                @Override
+                public int compare(DataMovementInterface dataMovementInterface, DataMovementInterface dataMovementInterface2) {
+                    return dataMovementInterface.getPriorityOrder() - dataMovementInterface2.getPriorityOrder();
+                }
+            });
+            jobExecutionContext.setHostPrioritizedDataMovementInterfaces(dataMovementInterfaces);
+        }
+
+        // set compute resource configuration as default preferred values, after that replace those with gateway user preferences.
+        populateDefaultComputeResourceConfiguration(jobExecutionContext, applicationInterface, computeResource);
+        // if gateway resource preference is set
         if (gatewayResourcePreferences != null ) {
             if (gatewayResourcePreferences.getScratchLocation() == null) {
                 gatewayResourcePreferences.setScratchLocation("/tmp");
             }
+            setUpWorkingLocation(jobExecutionContext, applicationInterface, gatewayResourcePreferences.getScratchLocation());
 
-            /**
-             * Working dir
-             */
-            String workingDir = gatewayResourcePreferences.getScratchLocation() + File.separator + jobExecutionContext.getExperimentID();
-            jobExecutionContext.setWorkingDir(workingDir);
+            jobExecutionContext.setPreferredJobSubmissionProtocol(gatewayResourcePreferences.getPreferredJobSubmissionProtocol());
+            if (gatewayResourcePreferences.getPreferredJobSubmissionProtocol() == null) {
+                jobExecutionContext.setPreferredJobSubmissionInterface(jobExecutionContext.getHostPrioritizedJobSubmissionInterfaces().get(0));
+                jobExecutionContext.setPreferredJobSubmissionProtocol(jobExecutionContext.getPreferredJobSubmissionInterface().getJobSubmissionProtocol());
+            } else {
+                for (JobSubmissionInterface jobSubmissionInterface : jobSubmissionInterfaces) {
+                    if (gatewayResourcePreferences.getPreferredJobSubmissionProtocol() == jobSubmissionInterface.getJobSubmissionProtocol()) {
+                        jobExecutionContext.setPreferredJobSubmissionInterface(jobSubmissionInterface);
+                        break;
+                    }
+                }
+            }
+
+            // set gatewayUserPreferred data movement protocol and interface
+            jobExecutionContext.setPreferredDataMovementProtocol(gatewayResourcePreferences.getPreferredDataMovementProtocol());
+            if (gatewayResourcePreferences.getPreferredJobSubmissionProtocol() == null) {
+                jobExecutionContext.setPreferredDataMovementInterface(jobExecutionContext.getHostPrioritizedDataMovementInterfaces().get(0));
+                jobExecutionContext.setPreferredDataMovementProtocol(jobExecutionContext.getPreferredDataMovementInterface().getDataMovementProtocol());
+            } else {
+                for (DataMovementInterface dataMovementInterface : dataMovementInterfaces) {
+                    if (gatewayResourcePreferences.getPreferredDataMovementProtocol() == dataMovementInterface.getDataMovementProtocol()) {
+                        jobExecutionContext.setPreferredDataMovementInterface(dataMovementInterface);
+                        break;
+                    }
+                }
+            }
+        }
+        return jobExecutionContext;
+    }
+
+    private void setUpWorkingLocation(JobExecutionContext jobExecutionContext, ApplicationInterfaceDescription applicationInterface, String scratchLocation) {
+
+        /**
+         * Working dir
+         */
+        String workingDir = scratchLocation + File.separator + jobExecutionContext.getExperimentID();
+        jobExecutionContext.setWorkingDir(workingDir);
 
             /*
             * Input and Output Directory
             */
-            jobExecutionContext.setInputDir(workingDir + File.separator + Constants.INPUT_DATA_DIR_VAR_NAME);
-            jobExecutionContext.setOutputDir(workingDir + File.separator + Constants.OUTPUT_DATA_DIR_VAR_NAME);
+        jobExecutionContext.setInputDir(workingDir + File.separator + Constants.INPUT_DATA_DIR_VAR_NAME);
+        jobExecutionContext.setOutputDir(workingDir + File.separator + Constants.OUTPUT_DATA_DIR_VAR_NAME);
 
             /*
             * Stdout and Stderr for Shell
             */
-            jobExecutionContext.setStandardOutput(workingDir + File.separator + applicationInterface.getApplicationName().replaceAll("\\s+", "") + ".stdout");
-            jobExecutionContext.setStandardError(workingDir + File.separator + applicationInterface.getApplicationName().replaceAll("\\s+", "") + ".stderr");
+        jobExecutionContext.setStandardOutput(workingDir + File.separator + applicationInterface.getApplicationName().replaceAll("\\s+", "") + ".stdout");
+        jobExecutionContext.setStandardError(workingDir + File.separator + applicationInterface.getApplicationName().replaceAll("\\s+", "") + ".stderr");
+    }
 
-            jobExecutionContext.setPreferredJobSubmissionProtocol(gatewayResourcePreferences.getPreferredJobSubmissionProtocol());
-            if (gatewayResourcePreferences.getPreferredJobSubmissionProtocol() == null) {
-                jobExecutionContext.setPreferredJobSubmissionInterface(jobExecutionContext.getHostPrioritizedJobSubmissionInterfaces().get(0));
-                jobExecutionContext.setPreferredJobSubmissionProtocol(jobExecutionContext.getPreferredJobSubmissionInterface().getJobSubmissionProtocol());
-            }
+    private void populateDefaultComputeResourceConfiguration(JobExecutionContext jobExecutionContext, ApplicationInterfaceDescription applicationInterface, ComputeResourceDescription computeResource) {
+        Map<FileSystems, String> fileSystems = computeResource.getFileSystems();
+        String scratchLocation = fileSystems.get(FileSystems.SCRATCH);
+        if (scratchLocation != null) {
+            setUpWorkingLocation(jobExecutionContext, applicationInterface, scratchLocation);
+        }
+
+        jobExecutionContext.setPreferredJobSubmissionInterface(jobExecutionContext.getHostPrioritizedJobSubmissionInterfaces().get(0));
+        jobExecutionContext.setPreferredJobSubmissionProtocol(jobExecutionContext.getPreferredJobSubmissionInterface().getJobSubmissionProtocol());
+
+        if (jobExecutionContext.getHostPrioritizedDataMovementInterfaces() != null) {
+            jobExecutionContext.setPreferredDataMovementInterface(jobExecutionContext.getHostPrioritizedDataMovementInterfaces().get(0));
+            jobExecutionContext.setPreferredDataMovementProtocol(jobExecutionContext.getPreferredDataMovementInterface().getDataMovementProtocol());
         }
-        return jobExecutionContext;
     }
 
     private boolean submitJob(JobExecutionContext jobExecutionContext) throws GFacException {

http://git-wip-us.apache.org/repos/asf/airavata/blob/5a28f745/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHDirectorySetupHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHDirectorySetupHandler.java b/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHDirectorySetupHandler.java
index b87f99a..b2790c9 100644
--- a/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHDirectorySetupHandler.java
+++ b/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHDirectorySetupHandler.java
@@ -77,12 +77,11 @@ public class GSISSHDirectorySetupHandler extends AbstractRecoverableHandler {
         } else {
             log.info("Successfully retrieved the Security Context");
         }
-        ApplicationDeploymentDescriptionType app = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription().getType();
 
-            String workingDirectory = app.getScratchWorkingDirectory();
+            String workingDirectory = jobExecutionContext.getWorkingDir();
             cluster.makeDirectory(workingDirectory);
-            cluster.makeDirectory(app.getInputDataDirectory());
-            cluster.makeDirectory(app.getOutputDataDirectory());
+            cluster.makeDirectory(jobExecutionContext.getInputDir());
+            cluster.makeDirectory(jobExecutionContext.getOutputDir());
             DataTransferDetails detail = new DataTransferDetails();
             TransferStatus status = new TransferStatus();
             status.setTransferState(TransferState.DIRECTORY_SETUP);

http://git-wip-us.apache.org/repos/asf/airavata/blob/5a28f745/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHInputHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHInputHandler.java b/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHInputHandler.java
index 5665b5b..b882be6 100644
--- a/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHInputHandler.java
+++ b/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHInputHandler.java
@@ -27,17 +27,18 @@ import org.apache.airavata.commons.gfac.type.MappingFactory;
 import org.apache.airavata.gfac.GFacException;
 import org.apache.airavata.gfac.core.context.JobExecutionContext;
 import org.apache.airavata.gfac.core.context.MessageContext;
-import org.apache.airavata.gfac.core.handler.AbstractHandler;
 import org.apache.airavata.gfac.core.handler.AbstractRecoverableHandler;
 import org.apache.airavata.gfac.core.handler.GFacHandlerException;
 import org.apache.airavata.gfac.core.utils.GFacUtils;
 import org.apache.airavata.gfac.gsissh.security.GSISecurityContext;
 import org.apache.airavata.gfac.gsissh.util.GFACGSISSHUtils;
 import org.apache.airavata.gsi.ssh.api.Cluster;
-import org.apache.airavata.gsi.ssh.api.SSHApiException;
-import org.apache.airavata.model.workspace.experiment.*;
+import org.apache.airavata.model.workspace.experiment.CorrectiveAction;
+import org.apache.airavata.model.workspace.experiment.DataTransferDetails;
+import org.apache.airavata.model.workspace.experiment.ErrorCategory;
+import org.apache.airavata.model.workspace.experiment.TransferState;
+import org.apache.airavata.model.workspace.experiment.TransferStatus;
 import org.apache.airavata.registry.cpi.ChildDataType;
-import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
 import org.apache.airavata.schemas.gfac.URIArrayType;
 import org.apache.airavata.schemas.gfac.URIParameterType;
 import org.slf4j.Logger;
@@ -45,7 +46,11 @@ import org.slf4j.LoggerFactory;
 
 import java.io.File;
 import java.io.IOException;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Properties;
+import java.util.Set;
 
 /**
  * Recoverability for this handler assumes the same input values will come in the second
@@ -171,11 +176,10 @@ public class GSISSHInputHandler extends AbstractRecoverableHandler {
     }
 
     private static String stageInputFiles(Cluster cluster, JobExecutionContext jobExecutionContext, String paramValue) throws IOException, GFacException {
-        ApplicationDeploymentDescriptionType app = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription().getType();
         int i = paramValue.lastIndexOf(File.separator);
         String substring = paramValue.substring(i + 1);
         try {
-            String targetFile = app.getInputDataDirectory() + File.separator + substring;
+            String targetFile = jobExecutionContext.getInputDir() + File.separator + substring;
             if (paramValue.startsWith("file")) {
                 paramValue = paramValue.substring(paramValue.indexOf(":") + 1, paramValue.length());
             }

http://git-wip-us.apache.org/repos/asf/airavata/blob/5a28f745/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHOutputHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHOutputHandler.java b/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHOutputHandler.java
index ac9bf3c..a714099 100644
--- a/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHOutputHandler.java
+++ b/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHOutputHandler.java
@@ -27,6 +27,7 @@ import java.util.*;
 import net.schmizz.sshj.connection.ConnectionException;
 import net.schmizz.sshj.transport.TransportException;
 
+import org.apache.aiaravata.application.catalog.data.impl.AppCatalogFactory;
 import org.apache.airavata.common.exception.ApplicationSettingsException;
 import org.apache.airavata.common.utils.Constants;
 import org.apache.airavata.commons.gfac.type.ActualParameter;
@@ -46,6 +47,10 @@ import org.apache.airavata.gfac.gsissh.util.GFACGSISSHUtils;
 import org.apache.airavata.gsi.ssh.api.Cluster;
 import org.apache.airavata.gsi.ssh.api.SSHApiException;
 import org.apache.airavata.gsi.ssh.api.job.JobDescriptor;
+import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol;
+import org.apache.airavata.model.appcatalog.computeresource.MonitorMode;
+import org.apache.airavata.model.appcatalog.computeresource.SSHJobSubmission;
+import org.apache.airavata.model.appcatalog.computeresource.SecurityProtocol;
 import org.apache.airavata.model.messaging.event.TaskIdentifier;
 import org.apache.airavata.model.messaging.event.TaskOutputChangeEvent;
 import org.apache.airavata.model.workspace.experiment.*;
@@ -67,36 +72,6 @@ public class GSISSHOutputHandler extends AbstractRecoverableHandler {
         int oldIndex = 0;
         List<String> oldFiles = new ArrayList<String>();
         StringBuffer data = new StringBuffer("|");
-        if (jobExecutionContext.getApplicationContext().getHostDescription().getType() instanceof GsisshHostType) { // this is because we don't have the right jobexecution context
-            // so attempting to get it from the registry
-            if (Constants.PUSH.equals(((GsisshHostType) jobExecutionContext.getApplicationContext().getHostDescription().getType()).getMonitorMode())) {
-                log.warn("During the out handler chain jobExecution context came null, so trying to handler");
-                ApplicationDescription applicationDeploymentDescription = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription();
-                TaskDetails taskData = null;
-                try {
-                    taskData = (TaskDetails) jobExecutionContext.getRegistry().get(RegistryModelType.TASK_DETAIL, jobExecutionContext.getTaskData().getTaskID());
-                } catch (RegistryException e) {
-                    log.error("Error retrieving job details from Registry");
-                    throw new GFacHandlerException("Error retrieving job details from Registry", e);
-                }
-                JobDetails jobDetails = taskData.getJobDetailsList().get(0);
-                String jobDescription = jobDetails.getJobDescription();
-                if (jobDescription != null) {
-                    JobDescriptor jobDescriptor = null;
-                    try {
-                        jobDescriptor = JobDescriptor.fromXML(jobDescription);
-                    } catch (XmlException e1) {
-                        e1.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
-                    }
-                    applicationDeploymentDescription.getType().setScratchWorkingDirectory(
-                            jobDescriptor.getJobDescriptorDocument().getJobDescriptor().getWorkingDirectory());
-                    applicationDeploymentDescription.getType().setInputDataDirectory(jobDescriptor.getInputDirectory());
-                    applicationDeploymentDescription.getType().setOutputDataDirectory(jobDescriptor.getOutputDirectory());
-                    applicationDeploymentDescription.getType().setStandardError(jobDescriptor.getJobDescriptorDocument().getJobDescriptor().getStandardErrorFile());
-                    applicationDeploymentDescription.getType().setStandardOutput(jobDescriptor.getJobDescriptorDocument().getJobDescriptor().getStandardOutFile());
-                }
-            }
-        }
         try {
             if (jobExecutionContext.getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT) == null) {
 
@@ -114,8 +89,6 @@ public class GSISSHOutputHandler extends AbstractRecoverableHandler {
         DataTransferDetails detail = new DataTransferDetails();
         TransferStatus status = new TransferStatus();
 
-        ApplicationDeploymentDescriptionType app = jobExecutionContext.getApplicationContext()
-                .getApplicationDeploymentDescription().getType();
         Cluster cluster = null;
         
         try {
@@ -174,7 +147,7 @@ public class GSISSHOutputHandler extends AbstractRecoverableHandler {
                 localStdOutFile = new File(outputDataDir + File.separator + timeStampedExperimentID + "stdout");
                 while(stdOutStr.isEmpty()){
                 try {
-                	cluster.scpFrom(app.getStandardOutput(), localStdOutFile.getAbsolutePath());
+                	cluster.scpFrom(jobExecutionContext.getStandardOutput(), localStdOutFile.getAbsolutePath());
                 	stdOutStr = GFacUtils.readFileToString(localStdOutFile.getAbsolutePath());
 				} catch (Exception e) {
 					log.error(e.getLocalizedMessage());
@@ -192,7 +165,7 @@ public class GSISSHOutputHandler extends AbstractRecoverableHandler {
                 data.append(oldFiles.get(index++)).append(",");
             } else {
                 localStdErrFile = new File(outputDataDir + File.separator + timeStampedExperimentID + "stderr");
-                cluster.scpFrom(app.getStandardError(), localStdErrFile.getAbsolutePath());
+                cluster.scpFrom(jobExecutionContext.getStandardError(), localStdErrFile.getAbsolutePath());
                 StringBuffer temp = new StringBuffer(data.append(localStdErrFile.getAbsolutePath()).append(",").toString());
                 GFacUtils.savePluginData(jobExecutionContext, temp.insert(0, ++index), this.getClass().getName());
             }
@@ -219,7 +192,7 @@ public class GSISSHOutputHandler extends AbstractRecoverableHandler {
                     List<String> outputList = null;
                     int retry=3;
                     while(retry>0){
-                    	 outputList = cluster.listDirectory(app.getOutputDataDirectory());
+                    	 outputList = cluster.listDirectory(jobExecutionContext.getOutputDir());
                         if (outputList.size() == 1 && outputList.get(0).isEmpty()) {
                             Thread.sleep(10000);
                         } else if (outputList.size() > 0) {
@@ -229,7 +202,6 @@ public class GSISSHOutputHandler extends AbstractRecoverableHandler {
                         }
                         retry--;
                         if(retry==0){
-//                            log.info("Ohhhhhhh shitttttttOhhhhhhh shitttttttOhhhhhhh shitttttttOhhhhhhh shitttttttOhhhhhhh shitttttttOhhhhhhh shittttttt");
                         }
                     	 Thread.sleep(10000);
                     }
@@ -269,7 +241,7 @@ public class GSISSHOutputHandler extends AbstractRecoverableHandler {
                             outputFile = oldFiles.get(index);
                             data.append(oldFiles.get(index++)).append(",");
                         } else {
-                            cluster.scpFrom(app.getOutputDataDirectory() + File.separator + valueList, outputDataDir);
+                            cluster.scpFrom(jobExecutionContext.getOutputDir() + File.separator + valueList, outputDataDir);
                             outputFile = outputDataDir + File.separator + valueList;
                             jobExecutionContext.addOutputFile(outputFile);
                             StringBuffer temp = new StringBuffer(data.append(outputFile).append(",").toString());
@@ -296,9 +268,10 @@ public class GSISSHOutputHandler extends AbstractRecoverableHandler {
                 );
                 }
             }
-            app.setStandardError(localStdErrFile.getAbsolutePath());
-            app.setStandardOutput(localStdOutFile.getAbsolutePath());
-            app.setOutputDataDirectory(outputDataDir);
+            // Why we set following?
+//            app.setStandardError(localStdErrFile.getAbsolutePath());
+//            app.setStandardOutput(localStdOutFile.getAbsolutePath());
+//            app.setOutputDataDirectory(outputDataDir);
             status.setTransferState(TransferState.DOWNLOAD);
             detail.setTransferStatus(status);
             detail.setTransferDescription(outputDataDir);

http://git-wip-us.apache.org/repos/asf/airavata/blob/5a28f745/tools/gsissh/src/main/java/org/apache/airavata/gsi/ssh/api/job/JobDescriptor.java
----------------------------------------------------------------------
diff --git a/tools/gsissh/src/main/java/org/apache/airavata/gsi/ssh/api/job/JobDescriptor.java b/tools/gsissh/src/main/java/org/apache/airavata/gsi/ssh/api/job/JobDescriptor.java
index 9a0639b..9b7102b 100644
--- a/tools/gsissh/src/main/java/org/apache/airavata/gsi/ssh/api/job/JobDescriptor.java
+++ b/tools/gsissh/src/main/java/org/apache/airavata/gsi/ssh/api/job/JobDescriptor.java
@@ -60,6 +60,13 @@ public class JobDescriptor {
         return this.jobDescriptionDocument;
     }
 
+    /**
+     * With new app catalog thrift object integration, we don't use this
+     * @param xml
+     * @return
+     * @throws XmlException
+     */
+    @Deprecated
     public static JobDescriptor fromXML(String xml)
             throws XmlException {
         JobDescriptorDocument parse = JobDescriptorDocument.Factory


[43/50] [abbrv] airavata git commit: XBaya - Replaced DataObjectType with Input and OutputDataObjectType

Posted by ch...@apache.org.
XBaya - Replaced DataObjectType with Input and OutputDataObjectType


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/f09c742b
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/f09c742b
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/f09c742b

Branch: refs/heads/master
Commit: f09c742b8f4bb6cb91c61b852e82e414250a5bd7
Parents: 4b27ce8
Author: shamrath <sh...@gmail.com>
Authored: Thu Nov 6 19:40:55 2014 -0500
Committer: shamrath <sh...@gmail.com>
Committed: Thu Nov 6 19:40:55 2014 -0500

----------------------------------------------------------------------
 .../ui/experiment/LaunchApplicationWindow.java  | 58 +++++++++-----------
 .../WorkflowInterpreterLaunchWindow.java        | 17 ++++--
 2 files changed, 36 insertions(+), 39 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/f09c742b/modules/xbaya-gui/src/main/java/org/apache/airavata/xbaya/ui/experiment/LaunchApplicationWindow.java
----------------------------------------------------------------------
diff --git a/modules/xbaya-gui/src/main/java/org/apache/airavata/xbaya/ui/experiment/LaunchApplicationWindow.java b/modules/xbaya-gui/src/main/java/org/apache/airavata/xbaya/ui/experiment/LaunchApplicationWindow.java
index 9fb6884..3740918 100644
--- a/modules/xbaya-gui/src/main/java/org/apache/airavata/xbaya/ui/experiment/LaunchApplicationWindow.java
+++ b/modules/xbaya-gui/src/main/java/org/apache/airavata/xbaya/ui/experiment/LaunchApplicationWindow.java
@@ -21,48 +21,25 @@
 
 package org.apache.airavata.xbaya.ui.experiment;
 
-import java.awt.event.ActionEvent;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-
-import javax.swing.AbstractAction;
-import javax.swing.BorderFactory;
-import javax.swing.JButton;
-import javax.swing.JComboBox;
-import javax.swing.JLabel;
-import javax.swing.JOptionPane;
-import javax.swing.JPanel;
-import javax.swing.border.EtchedBorder;
-import javax.xml.namespace.QName;
-
 import org.apache.airavata.api.Airavata.Client;
-import org.apache.airavata.common.utils.XMLUtil;
+import org.apache.airavata.model.appcatalog.appinterface.DataType;
+import org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType;
+import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
 import org.apache.airavata.model.error.AiravataClientConnectException;
 import org.apache.airavata.model.error.AiravataClientException;
 import org.apache.airavata.model.error.AiravataSystemException;
 import org.apache.airavata.model.error.InvalidRequestException;
 import org.apache.airavata.model.workspace.Project;
 import org.apache.airavata.model.workspace.experiment.ComputationalResourceScheduling;
-import org.apache.airavata.model.workspace.experiment.DataObjectType;
-import org.apache.airavata.model.workspace.experiment.DataType;
 import org.apache.airavata.model.workspace.experiment.Experiment;
 import org.apache.airavata.model.workspace.experiment.UserConfigurationData;
 import org.apache.airavata.workflow.model.graph.DataPort;
 import org.apache.airavata.workflow.model.graph.impl.NodeImpl;
-import org.apache.airavata.workflow.model.graph.system.InputNode;
-import org.apache.airavata.workflow.model.graph.system.OutputNode;
-import org.apache.airavata.workflow.model.graph.util.GraphUtil;
 import org.apache.airavata.workflow.model.wf.Workflow;
-import org.apache.airavata.ws.monitor.MonitorConfiguration;
 import org.apache.airavata.xbaya.ThriftClientData;
 import org.apache.airavata.xbaya.ThriftServiceType;
 import org.apache.airavata.xbaya.XBayaEngine;
 import org.apache.airavata.xbaya.ui.dialogs.XBayaDialog;
-import org.apache.airavata.xbaya.ui.utils.ErrorMessages;
 import org.apache.airavata.xbaya.ui.widgets.GridPanel;
 import org.apache.airavata.xbaya.ui.widgets.XBayaLabel;
 import org.apache.airavata.xbaya.ui.widgets.XBayaTextField;
@@ -70,7 +47,22 @@ import org.apache.airavata.xbaya.util.XBayaUtil;
 import org.apache.thrift.TException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import org.xmlpull.infoset.XmlElement;
+
+import javax.swing.AbstractAction;
+import javax.swing.BorderFactory;
+import javax.swing.JButton;
+import javax.swing.JComboBox;
+import javax.swing.JLabel;
+import javax.swing.JOptionPane;
+import javax.swing.JPanel;
+import javax.swing.border.EtchedBorder;
+import javax.xml.namespace.QName;
+import java.awt.event.ActionEvent;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
 
 public class LaunchApplicationWindow {
 
@@ -364,8 +356,8 @@ public class LaunchApplicationWindow {
             DataPort inputPort = inputPorts.get(i);
             XBayaTextField parameterTextField = this.parameterTextFields.get(i);           
             String value = parameterTextField.getText();
-            DataObjectType elem = new DataObjectType();
-            elem.setKey(inputPort.getName());
+            InputDataObjectType elem = new InputDataObjectType();
+            elem.setName(inputPort.getName());
             String type = inputPort.getType().getLocalPart().trim();
             DataType inpType = DataType.STRING;
             if(type.equalsIgnoreCase("string")){
@@ -391,8 +383,8 @@ public class LaunchApplicationWindow {
         
         for (int i = 0; i < outputPorts.size(); i++) {
             DataPort outputPort = outputPorts.get(i);
-            DataObjectType elem = new DataObjectType();
-            elem.setKey(outputPort.getName());
+            OutputDataObjectType elem = new OutputDataObjectType();
+            elem.setName(outputPort.getName());
             String type = outputPort.getType().getLocalPart().trim();
             DataType outType = DataType.STRING;
             if(type.equalsIgnoreCase("string")){
@@ -443,10 +435,10 @@ public class LaunchApplicationWindow {
         	while(output.equals("")){
         		output = "";
         		fullOutput = "Experiment Completed Successfully. Output(s) are shown below:\n";
-            	List<DataObjectType> outputs = airavataClient.getExperimentOutputs(experiment.getExperimentID());
+            	List<OutputDataObjectType> outputs = airavataClient.getExperimentOutputs(experiment.getExperimentID());
             	for(int i1=0; i1<outputs.size(); i1++){
             		output = outputs.get(i1).getValue();
-            		fullOutput+= outputs.get(i1).getKey()+": "+output+"\n";
+            		fullOutput+= outputs.get(i1).getName()+": "+output+"\n";
             	}            	
             } 
         	JOptionPane.showMessageDialog(null, fullOutput);

http://git-wip-us.apache.org/repos/asf/airavata/blob/f09c742b/modules/xbaya-gui/src/main/java/org/apache/airavata/xbaya/ui/experiment/WorkflowInterpreterLaunchWindow.java
----------------------------------------------------------------------
diff --git a/modules/xbaya-gui/src/main/java/org/apache/airavata/xbaya/ui/experiment/WorkflowInterpreterLaunchWindow.java b/modules/xbaya-gui/src/main/java/org/apache/airavata/xbaya/ui/experiment/WorkflowInterpreterLaunchWindow.java
index 76a745a..c0533b4 100644
--- a/modules/xbaya-gui/src/main/java/org/apache/airavata/xbaya/ui/experiment/WorkflowInterpreterLaunchWindow.java
+++ b/modules/xbaya-gui/src/main/java/org/apache/airavata/xbaya/ui/experiment/WorkflowInterpreterLaunchWindow.java
@@ -26,6 +26,8 @@ import org.apache.airavata.api.Airavata.Client;
 import org.apache.airavata.api.client.AiravataClientFactory;
 import org.apache.airavata.common.utils.ServerSettings;
 import org.apache.airavata.common.utils.XMLUtil;
+import org.apache.airavata.model.appcatalog.appinterface.DataType;
+import org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType;
 import org.apache.airavata.model.error.AiravataClientConnectException;
 import org.apache.airavata.model.error.AiravataClientException;
 import org.apache.airavata.model.error.AiravataSystemException;
@@ -33,8 +35,6 @@ import org.apache.airavata.model.error.InvalidRequestException;
 import org.apache.airavata.model.util.ExperimentModelUtil;
 import org.apache.airavata.model.workspace.Project;
 import org.apache.airavata.model.workspace.experiment.ComputationalResourceScheduling;
-import org.apache.airavata.model.workspace.experiment.DataObjectType;
-import org.apache.airavata.model.workspace.experiment.DataType;
 import org.apache.airavata.model.workspace.experiment.Experiment;
 import org.apache.airavata.model.workspace.experiment.UserConfigurationData;
 import org.apache.airavata.orchestrator.client.OrchestratorClientFactory;
@@ -47,7 +47,6 @@ import org.apache.airavata.xbaya.ThriftServiceType;
 import org.apache.airavata.xbaya.XBayaEngine;
 import org.apache.airavata.xbaya.messaging.MonitorException;
 import org.apache.airavata.xbaya.ui.dialogs.XBayaDialog;
-import org.apache.airavata.xbaya.ui.utils.ErrorMessages;
 import org.apache.airavata.xbaya.ui.widgets.GridPanel;
 import org.apache.airavata.xbaya.ui.widgets.XBayaLabel;
 import org.apache.airavata.xbaya.ui.widgets.XBayaTextField;
@@ -57,7 +56,13 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.xmlpull.infoset.XmlElement;
 
-import javax.swing.*;
+import javax.swing.AbstractAction;
+import javax.swing.BorderFactory;
+import javax.swing.JButton;
+import javax.swing.JComboBox;
+import javax.swing.JLabel;
+import javax.swing.JOptionPane;
+import javax.swing.JPanel;
 import javax.swing.border.EtchedBorder;
 import javax.xml.namespace.QName;
 import java.awt.event.ActionEvent;
@@ -301,8 +306,8 @@ public class WorkflowInterpreterLaunchWindow {
             inputNode.getID();
             String value = parameterTextField.getText();
 //            inputNode.setDefaultValue(value);
-            DataObjectType elem = new DataObjectType();
-            elem.setKey(inputNode.getID());
+            InputDataObjectType elem = new InputDataObjectType();
+            elem.setName(inputNode.getID());
             elem.setType(DataType.STRING);
             elem.setValue(value);
 			experiment.addToExperimentInputs(elem );


[06/50] [abbrv] airavata git commit: Integrated appCatalog for ssh and gsi modules, commented out old test classes, need to fix this

Posted by ch...@apache.org.
http://git-wip-us.apache.org/repos/asf/airavata/blob/2dd94e63/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/util/GFACSSHUtils.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/util/GFACSSHUtils.java b/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/util/GFACSSHUtils.java
index 7ee5d6a..7e9fa89 100644
--- a/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/util/GFACSSHUtils.java
+++ b/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/util/GFACSSHUtils.java
@@ -20,11 +20,13 @@
 */
 package org.apache.airavata.gfac.ssh.util;
 
+import org.airavata.appcatalog.cpi.AppCatalog;
+import org.airavata.appcatalog.cpi.AppCatalogException;
+import org.apache.aiaravata.application.catalog.data.impl.AppCatalogFactory;
 import org.apache.airavata.common.exception.ApplicationSettingsException;
 import org.apache.airavata.common.utils.ServerSettings;
 import org.apache.airavata.common.utils.StringUtil;
 import org.apache.airavata.commons.gfac.type.ActualParameter;
-import org.apache.airavata.commons.gfac.type.HostDescription;
 import org.apache.airavata.commons.gfac.type.MappingFactory;
 import org.apache.airavata.credential.store.credential.impl.ssh.SSHCredential;
 import org.apache.airavata.gfac.Constants;
@@ -32,28 +34,36 @@ import org.apache.airavata.gfac.GFacException;
 import org.apache.airavata.gfac.RequestData;
 import org.apache.airavata.gfac.core.context.JobExecutionContext;
 import org.apache.airavata.gfac.core.context.MessageContext;
-import org.apache.airavata.gfac.core.utils.GFacUtils;
+import org.apache.airavata.gfac.core.states.GfacExperimentState;
 import org.apache.airavata.gfac.ssh.context.SSHAuthWrapper;
 import org.apache.airavata.gfac.ssh.security.SSHSecurityContext;
 import org.apache.airavata.gfac.ssh.security.TokenizedSSHAuthInfo;
 import org.apache.airavata.gsi.ssh.api.Cluster;
-import org.apache.airavata.gsi.ssh.api.SSHApiException;
 import org.apache.airavata.gsi.ssh.api.ServerInfo;
-import org.apache.airavata.gsi.ssh.api.authentication.AuthenticationInfo;
 import org.apache.airavata.gsi.ssh.api.job.JobDescriptor;
-import org.apache.airavata.gsi.ssh.api.job.JobManagerConfiguration;
 import org.apache.airavata.gsi.ssh.impl.GSISSHAbstractCluster;
 import org.apache.airavata.gsi.ssh.impl.PBSCluster;
-import org.apache.airavata.gsi.ssh.impl.authentication.DefaultPasswordAuthenticationInfo;
-import org.apache.airavata.gsi.ssh.impl.authentication.DefaultPublicKeyFileAuthentication;
 import org.apache.airavata.gsi.ssh.util.CommonUtils;
+import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionInterface;
+import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol;
+import org.apache.airavata.model.appcatalog.computeresource.SSHJobSubmission;
+import org.apache.airavata.model.appcatalog.computeresource.SecurityProtocol;
 import org.apache.airavata.model.workspace.experiment.ComputationalResourceScheduling;
 import org.apache.airavata.model.workspace.experiment.TaskDetails;
-import org.apache.airavata.schemas.gfac.*;
+import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
+import org.apache.airavata.schemas.gfac.FileArrayType;
+import org.apache.airavata.schemas.gfac.HpcApplicationDeploymentType;
+import org.apache.airavata.schemas.gfac.StringArrayType;
+import org.apache.airavata.schemas.gfac.URIArrayType;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.*;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+import java.util.Set;
 
 public class GFACSSHUtils {
     private final static Logger logger = LoggerFactory.getLogger(GFACSSHUtils.class);
@@ -66,109 +76,113 @@ public class GFACSSHUtils {
 
 
     public static void addSecurityContext(JobExecutionContext jobExecutionContext) throws GFacException, ApplicationSettingsException {
-        HostDescription registeredHost = jobExecutionContext.getApplicationContext().getHostDescription();
-        if (registeredHost.getType() instanceof GlobusHostType || registeredHost.getType() instanceof UnicoreHostType) {
+        JobSubmissionProtocol preferredJobSubmissionProtocol = jobExecutionContext.getPreferredJobSubmissionProtocol();
+        JobSubmissionInterface preferredJobSubmissionInterface = jobExecutionContext.getPreferredJobSubmissionInterface();
+        if (preferredJobSubmissionProtocol == JobSubmissionProtocol.GLOBUS || preferredJobSubmissionProtocol == JobSubmissionProtocol.UNICORE) {
             logger.error("This is a wrong method to invoke to non ssh host types,please check your gfac-config.xml");
-        } else if (registeredHost.getType() instanceof SSHHostType
-                || registeredHost.getType() instanceof GsisshHostType) {
-            SSHSecurityContext sshSecurityContext = new SSHSecurityContext();
-            String credentialStoreToken = jobExecutionContext.getCredentialStoreToken(); // this is set by the framework
-            RequestData requestData = new RequestData(ServerSettings.getDefaultUserGateway());
-            requestData.setTokenId(credentialStoreToken);
-
-            ServerInfo serverInfo = new ServerInfo(null, registeredHost.getType().getHostAddress());
-            SSHAuthWrapper sshAuth = (SSHAuthWrapper) jobExecutionContext.getProperty(ADVANCED_SSH_AUTH);
-
-            Cluster pbsCluster = null;
+        } else if ( preferredJobSubmissionProtocol == JobSubmissionProtocol.SSH  ) {
             try {
-                TokenizedSSHAuthInfo tokenizedSSHAuthInfo = new TokenizedSSHAuthInfo(requestData);
-                String installedParentPath = ((HpcApplicationDeploymentType)
-                        jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription().getType()).getInstalledParentPath();
-                if (installedParentPath == null) {
-                    installedParentPath = "/";
-                }
+                AppCatalog appCatalog = AppCatalogFactory.getAppCatalog();
+                SSHJobSubmission sshJobSubmission = appCatalog.getComputeResource().getSSHJobSubmission(preferredJobSubmissionInterface.getJobSubmissionInterfaceId());
+                if (sshJobSubmission.getSecurityProtocol() == SecurityProtocol.GSI) {
+                    SSHSecurityContext sshSecurityContext = new SSHSecurityContext();
+                    String credentialStoreToken = jobExecutionContext.getCredentialStoreToken(); // this is set by the framework
+                    RequestData requestData = new RequestData(ServerSettings.getDefaultUserGateway());
+                    requestData.setTokenId(credentialStoreToken);
+
+                    ServerInfo serverInfo = new ServerInfo(null, jobExecutionContext.getHostName());
+                    SSHAuthWrapper sshAuth = (SSHAuthWrapper) jobExecutionContext.getProperty(ADVANCED_SSH_AUTH);
+
+                    Cluster pbsCluster = null;
+                    try {
+                        TokenizedSSHAuthInfo tokenizedSSHAuthInfo = new TokenizedSSHAuthInfo(requestData);
+                        String installedParentPath = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription().getExecutablePath();
+                        if (installedParentPath == null) {
+                            installedParentPath = "/";
+                        }
 
-                SSHCredential credentials = tokenizedSSHAuthInfo.getCredentials();// this is just a call to get and set credentials in to this object,data will be used
-                serverInfo.setUserName(credentials.getPortalUserName());
-                jobExecutionContext.getExperiment().setUserName(credentials.getPortalUserName());
-                // inside the pbsCluser object
+                        SSHCredential credentials = tokenizedSSHAuthInfo.getCredentials();// this is just a call to get and set credentials in to this object,data will be used
+                        serverInfo.setUserName(credentials.getPortalUserName());
+                        jobExecutionContext.getExperiment().setUserName(credentials.getPortalUserName());
+                        // inside the pbsCluser object
 
-                String key = credentials.getPortalUserName() + registeredHost.getType().getHostAddress() +
-                        serverInfo.getPort();
-                if(sshAuth!=null){
-                    key=sshAuth.getKey();
-                }
-                boolean recreate = false;
-                synchronized (clusters) {
-                    if (clusters.containsKey(key) && clusters.get(key).size() < maxClusterCount) {
-                        recreate = true;
-                    } else if (clusters.containsKey(key)) {
-                        int i = new Random().nextInt(Integer.MAX_VALUE) % maxClusterCount;
-                        if (clusters.get(key).get(i).getSession().isConnected()) {
-                            pbsCluster = clusters.get(key).get(i);
-                        } else {
-                            clusters.get(key).remove(i);
-                            recreate = true;
+                        String key = credentials.getPortalUserName() + jobExecutionContext.getHostName() + serverInfo.getPort();
+                        if(sshAuth!=null){
+                            key=sshAuth.getKey();
                         }
-                        if (!recreate) {
-                            try {
-                                pbsCluster.listDirectory("~/"); // its hard to trust isConnected method, so we try to connect if it works we are good,else we recreate
-                            } catch (Exception e) {
-                                clusters.get(key).remove(i);
-                                logger.info("Connection found the connection map is expired, so we create from the scratch");
-                                maxClusterCount++;
-                                recreate = true; // we make the pbsCluster to create again if there is any exception druing connection
+                        boolean recreate = false;
+                        synchronized (clusters) {
+                            if (clusters.containsKey(key) && clusters.get(key).size() < maxClusterCount) {
+                                recreate = true;
+                            } else if (clusters.containsKey(key)) {
+                                int i = new Random().nextInt(Integer.MAX_VALUE) % maxClusterCount;
+                                if (clusters.get(key).get(i).getSession().isConnected()) {
+                                    pbsCluster = clusters.get(key).get(i);
+                                } else {
+                                    clusters.get(key).remove(i);
+                                    recreate = true;
+                                }
+                                if (!recreate) {
+                                    try {
+                                        pbsCluster.listDirectory("~/"); // its hard to trust isConnected method, so we try to connect if it works we are good,else we recreate
+                                    } catch (Exception e) {
+                                        clusters.get(key).remove(i);
+                                        logger.info("Connection found the connection map is expired, so we create from the scratch");
+                                        maxClusterCount++;
+                                        recreate = true; // we make the pbsCluster to create again if there is any exception druing connection
+                                    }
+                                }
+                                logger.info("Re-using the same connection used with the connection string:" + key);
+                            } else {
+                                recreate = true;
+                            }
+                            if (recreate) {
+                                if (sshAuth != null) {
+                                    pbsCluster = new PBSCluster(sshAuth.getServerInfo(), sshAuth.getAuthenticationInfo(),
+                                            CommonUtils.getPBSJobManager(installedParentPath));
+                                    jobExecutionContext.setProperty(ADVANCED_SSH_AUTH,null); // some other provider might fail
+                                    key = sshAuth.getKey();
+                                } else {
+                                    pbsCluster = new PBSCluster(serverInfo, tokenizedSSHAuthInfo,
+                                            CommonUtils.getPBSJobManager(installedParentPath));
+                                }
+                                List<Cluster> pbsClusters = null;
+                                if (!(clusters.containsKey(key))) {
+                                    pbsClusters = new ArrayList<Cluster>();
+                                } else {
+                                    pbsClusters = clusters.get(key);
+                                }
+                                pbsClusters.add(pbsCluster);
+                                clusters.put(key, pbsClusters);
                             }
                         }
-                        logger.info("Re-using the same connection used with the connection string:" + key);
-                    } else {
-                        recreate = true;
-                    }
-                    if (recreate) {
-                        if (sshAuth != null) {
-                            pbsCluster = new PBSCluster(sshAuth.getServerInfo(), sshAuth.getAuthenticationInfo(),
-                                    CommonUtils.getPBSJobManager(installedParentPath));
-                            jobExecutionContext.setProperty(ADVANCED_SSH_AUTH,null); // some other provider might fail
-                            key = sshAuth.getKey();
-                        } else {
-                            pbsCluster = new PBSCluster(serverInfo, tokenizedSSHAuthInfo,
-                                    CommonUtils.getPBSJobManager(installedParentPath));
-                        }
-                        List<Cluster> pbsClusters = null;
-                        if (!(clusters.containsKey(key))) {
-                            pbsClusters = new ArrayList<Cluster>();
-                        } else {
-                            pbsClusters = clusters.get(key);
-                        }
-                        pbsClusters.add(pbsCluster);
-                        clusters.put(key, pbsClusters);
+                    } catch (Exception e) {
+                        e.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
                     }
+                    sshSecurityContext.setPbsCluster(pbsCluster);
+                    jobExecutionContext.addSecurityContext(Constants.SSH_SECURITY_CONTEXT, sshSecurityContext);
                 }
-            } catch (Exception e) {
-                e.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
+            } catch (AppCatalogException e) {
+                throw new GFacException("Error while getting SSH Submission object from app catalog", e);
             }
-            sshSecurityContext.setPbsCluster(pbsCluster);
-            jobExecutionContext.addSecurityContext(Constants.SSH_SECURITY_CONTEXT, sshSecurityContext);
         }
     }
 
-    public static JobDescriptor createJobDescriptor(JobExecutionContext jobExecutionContext,
-                                                    ApplicationDeploymentDescriptionType app, Cluster cluster) {
+    public static JobDescriptor createJobDescriptor(JobExecutionContext jobExecutionContext, Cluster cluster) {
         JobDescriptor jobDescriptor = new JobDescriptor();
         // this is common for any application descriptor
         jobDescriptor.setCallBackIp(ServerSettings.getIp());
         jobDescriptor.setCallBackPort(ServerSettings.getSetting(org.apache.airavata.common.utils.Constants.GFAC_SERVER_PORT, "8950"));
-        jobDescriptor.setInputDirectory(app.getInputDataDirectory());
-        jobDescriptor.setOutputDirectory(app.getOutputDataDirectory());
-        jobDescriptor.setExecutablePath(app.getExecutableLocation());
-        jobDescriptor.setStandardOutFile(app.getStandardOutput());
-        jobDescriptor.setStandardErrorFile(app.getStandardError());
+        jobDescriptor.setInputDirectory(jobExecutionContext.getInputDir());
+        jobDescriptor.setOutputDirectory(jobExecutionContext.getOutputDir());
+        jobDescriptor.setExecutablePath(jobExecutionContext.getApplicationContext()
+                .getApplicationDeploymentDescription().getExecutablePath());
+        jobDescriptor.setStandardOutFile(jobExecutionContext.getStandardOutput());
+        jobDescriptor.setStandardErrorFile(jobExecutionContext.getStandardError());
         Random random = new Random();
         int i = random.nextInt(Integer.MAX_VALUE);
         jobDescriptor.setJobName(String.valueOf(i + 99999999));
-        jobDescriptor.setWorkingDirectory(app.getStaticWorkingDirectory());
-
-
+        jobDescriptor.setWorkingDirectory(jobExecutionContext.getWorkingDir());
         List<String> inputValues = new ArrayList<String>();
         MessageContext input = jobExecutionContext.getInMessageContext();
         Map<String, Object> inputs = input.getParameters();
@@ -194,51 +208,6 @@ public class GFACSSHUtils {
         }
         jobDescriptor.setInputValues(inputValues);
 
-        // this part will fill out the hpcApplicationDescriptor
-        if (app instanceof HpcApplicationDeploymentType) {
-            HpcApplicationDeploymentType applicationDeploymentType
-                    = (HpcApplicationDeploymentType) app;
-            jobDescriptor.setUserName(((GSISSHAbstractCluster) cluster).getServerInfo().getUserName());
-            jobDescriptor.setShellName("/bin/bash");
-            jobDescriptor.setAllEnvExport(true);
-            jobDescriptor.setMailOptions("n");
-            jobDescriptor.setNodes(applicationDeploymentType.getNodeCount());
-            jobDescriptor.setProcessesPerNode(applicationDeploymentType.getProcessorsPerNode());
-            jobDescriptor.setMaxWallTime(String.valueOf(applicationDeploymentType.getMaxWallTime()));
-            jobDescriptor.setJobSubmitter(applicationDeploymentType.getJobSubmitterCommand());
-            jobDescriptor.setCPUCount(applicationDeploymentType.getCpuCount());
-            if (applicationDeploymentType.getProjectAccount() != null) {
-                if (applicationDeploymentType.getProjectAccount().getProjectAccountNumber() != null) {
-                    jobDescriptor.setAcountString(applicationDeploymentType.getProjectAccount().getProjectAccountNumber());
-                }
-            }
-            if (applicationDeploymentType.getQueue() != null) {
-                if (applicationDeploymentType.getQueue().getQueueName() != null) {
-                    jobDescriptor.setQueueName(applicationDeploymentType.getQueue().getQueueName());
-                }
-            }
-            jobDescriptor.setOwner(((PBSCluster) cluster).getServerInfo().getUserName());
-            TaskDetails taskData = jobExecutionContext.getTaskData();
-            if (taskData != null && taskData.isSetTaskScheduling()) {
-                ComputationalResourceScheduling computionnalResource = taskData.getTaskScheduling();
-                if (computionnalResource.getNodeCount() > 0) {
-                    jobDescriptor.setNodes(computionnalResource.getNodeCount());
-                }
-                if (computionnalResource.getComputationalProjectAccount() != null) {
-                    jobDescriptor.setAcountString(computionnalResource.getComputationalProjectAccount());
-                }
-                if (computionnalResource.getQueueName() != null) {
-                    jobDescriptor.setQueueName(computionnalResource.getQueueName());
-                }
-                if (computionnalResource.getTotalCPUCount() > 0) {
-                    jobDescriptor.setProcessesPerNode(computionnalResource.getTotalCPUCount());
-                }
-                if (computionnalResource.getWallTimeLimit() > 0) {
-                    jobDescriptor.setMaxWallTime(String.valueOf(computionnalResource.getWallTimeLimit()));
-                }
-            }
-
-        }
         return jobDescriptor;
     }
 

http://git-wip-us.apache.org/repos/asf/airavata/blob/2dd94e63/modules/gfac/gfac-ssh/src/test/java/org/apache/airavata/core/gfac/services/impl/BigRed2TestWithSSHAuth.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ssh/src/test/java/org/apache/airavata/core/gfac/services/impl/BigRed2TestWithSSHAuth.java b/modules/gfac/gfac-ssh/src/test/java/org/apache/airavata/core/gfac/services/impl/BigRed2TestWithSSHAuth.java
index e84848c..c65f386 100644
--- a/modules/gfac/gfac-ssh/src/test/java/org/apache/airavata/core/gfac/services/impl/BigRed2TestWithSSHAuth.java
+++ b/modules/gfac/gfac-ssh/src/test/java/org/apache/airavata/core/gfac/services/impl/BigRed2TestWithSSHAuth.java
@@ -1,252 +1,252 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.core.gfac.services.impl;
-
-import org.apache.airavata.commons.gfac.type.ActualParameter;
-import org.apache.airavata.commons.gfac.type.ApplicationDescription;
-import org.apache.airavata.commons.gfac.type.HostDescription;
-import org.apache.airavata.commons.gfac.type.ServiceDescription;
-import org.apache.airavata.gfac.GFacConfiguration;
-import org.apache.airavata.gfac.GFacException;
-import org.apache.airavata.gfac.SecurityContext;
-import org.apache.airavata.gfac.core.context.ApplicationContext;
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.context.MessageContext;
-import org.apache.airavata.gfac.core.cpi.BetterGfacImpl;
-import org.apache.airavata.gfac.ssh.security.SSHSecurityContext;
-import org.apache.airavata.gsi.ssh.api.Cluster;
-import org.apache.airavata.gsi.ssh.api.SSHApiException;
-import org.apache.airavata.gsi.ssh.api.ServerInfo;
-import org.apache.airavata.gsi.ssh.api.authentication.AuthenticationInfo;
-import org.apache.airavata.gsi.ssh.api.job.JobManagerConfiguration;
-import org.apache.airavata.gsi.ssh.impl.PBSCluster;
-import org.apache.airavata.gsi.ssh.impl.authentication.DefaultPasswordAuthenticationInfo;
-import org.apache.airavata.gsi.ssh.impl.authentication.DefaultPublicKeyFileAuthentication;
-import org.apache.airavata.gsi.ssh.util.CommonUtils;
-import org.apache.airavata.model.workspace.experiment.TaskDetails;
-import org.apache.airavata.persistance.registry.jpa.impl.RegistryFactory;
-import org.apache.airavata.schemas.gfac.*;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.net.URL;
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.List;
-import java.util.UUID;
-
-public class BigRed2TestWithSSHAuth {
-    private JobExecutionContext jobExecutionContext;
-
-    private String userName;
-    private String password;
-    private String passPhrase;
-    private String hostName;
-    private String workingDirectory;
-    private String privateKeyPath;
-    private String publicKeyPath;
-
-    @BeforeClass
-    public void setUp() throws Exception {
-
-        System.out.println("Test case name " + this.getClass().getName());
-//        System.setProperty("ssh.host","bigred2.uits.iu.edu");        //default ssh host
-//        System.setProperty("ssh.user", "lginnali");
-//        System.setProperty("ssh.private.key.path", "/Users/lahirugunathilake/.ssh/id_dsa");
-//        System.setProperty("ssh.public.key.path", "/Users/lahirugunathilake/.ssh/id_dsa.pub");
-//        System.setProperty("ssh.working.directory", "/tmp");
-
-        this.hostName = "bigred2.uits.iu.edu";
-        this.hostName = System.getProperty("ssh.host");
-        this.userName = System.getProperty("ssh.username");
-        this.password = System.getProperty("ssh.password");
-        this.privateKeyPath = System.getProperty("private.ssh.key");
-        this.publicKeyPath = System.getProperty("public.ssh.key");
-        this.passPhrase = System.getProperty("ssh.keypass");
-        this.workingDirectory = System.getProperty("ssh.working.directory");
-
-
-         if (this.userName == null
-                || (this.password==null && (this.publicKeyPath == null || this.privateKeyPath == null)) || this.workingDirectory == null) {
-            System.out.println("########### In order to test you have to either username password or private,public keys");
-            System.out.println("Use -Dssh.username=xxx -Dssh.password=yyy -Dssh.keypass=zzz " +
-                    "-Dprivate.ssh.key -Dpublic.ssh.key -Dssh.working.directory ");
-        }
-        URL resource = BigRed2TestWithSSHAuth.class.getClassLoader().getResource(org.apache.airavata.common.utils.Constants.GFAC_CONFIG_XML);
-        assert resource != null;
-        System.out.println(resource.getFile());
-        GFacConfiguration gFacConfiguration = GFacConfiguration.create(new File(resource.getPath()), null);
-
-//        gFacConfiguration.setMyProxyLifeCycle(3600);
-//        gFacConfiguration.setMyProxyServer("myproxy.teragrid.org");
-//        gFacConfiguration.setMyProxyUser("*****");
-//        gFacConfiguration.setMyProxyPassphrase("*****");
-//        gFacConfiguration.setTrustedCertLocation("./certificates");
-//        //have to set InFlwo Handlers and outFlowHandlers
-//        gFacConfiguration.setInHandlers(Arrays.asList(new String[] {"org.apache.airavata.gfac.handler.GramDirectorySetupHandler","org.apache.airavata.gfac.handler.GridFTPInputHandler"}));
-//        gFacConfiguration.setOutHandlers(Arrays.asList(new String[] {"org.apache.airavata.gfac.handler.GridFTPOutputHandler"}));
-
-        /*
-        * Host
-        */
-        HostDescription host = new HostDescription(SSHHostType.type);
-        host.getType().setHostAddress(hostName);
-        host.getType().setHostName(hostName);
-        ((SSHHostType)host.getType()).setHpcResource(true);
-        /*
-        * App
-        */
-        ApplicationDescription appDesc = new ApplicationDescription(HpcApplicationDeploymentType.type);
-        HpcApplicationDeploymentType app = (HpcApplicationDeploymentType) appDesc.getType();
-        ApplicationDeploymentDescriptionType.ApplicationName name = ApplicationDeploymentDescriptionType.ApplicationName.Factory.newInstance();
-        name.setStringValue("EchoLocal");
-        app.setApplicationName(name);
-
-        app.setCpuCount(1);
-        app.setJobType(JobTypeType.SERIAL);
-        app.setNodeCount(1);
-        app.setProcessorsPerNode(1);
-
-        /*
-        * Use bat file if it is compiled on Windows
-        */
-        app.setExecutableLocation("/bin/echo");
-
-        /*
-        * Default tmp location
-        */
-        String tempDir = "/tmp";
-        String date = (new Date()).toString();
-        date = date.replaceAll(" ", "_");
-        date = date.replaceAll(":", "_");
-
-        tempDir = tempDir + File.separator
-                + "SimpleEcho" + "_" + date + "_" + UUID.randomUUID();
-
-        System.out.println(tempDir);
-        app.setScratchWorkingDirectory(tempDir);
-        app.setStaticWorkingDirectory(tempDir);
-        app.setInputDataDirectory(tempDir + File.separator + "inputData");
-        app.setOutputDataDirectory(tempDir + File.separator + "outputData");
-        app.setStandardOutput(tempDir + File.separator + app.getApplicationName().getStringValue() + ".stdout");
-        app.setStandardError(tempDir + File.separator + app.getApplicationName().getStringValue() + ".stderr");
-        app.setMaxWallTime(5);
-        app.setJobSubmitterCommand("aprun -n 1");
-        app.setInstalledParentPath("/opt/torque/torque-4.2.3.1/bin/");
-
-        /*
-        * Service
-        */
-        ServiceDescription serv = new ServiceDescription();
-        serv.getType().setName("SimpleEcho");
-
-        List<InputParameterType> inputList = new ArrayList<InputParameterType>();
-
-        InputParameterType input = InputParameterType.Factory.newInstance();
-        input.setParameterName("echo_input");
-        input.setParameterType(StringParameterType.Factory.newInstance());
-        inputList.add(input);
-
-        InputParameterType[] inputParamList = inputList.toArray(new InputParameterType[inputList
-
-                .size()]);
-        List<OutputParameterType> outputList = new ArrayList<OutputParameterType>();
-        OutputParameterType output = OutputParameterType.Factory.newInstance();
-        output.setParameterName("echo_output");
-        output.setParameterType(StringParameterType.Factory.newInstance());
-        outputList.add(output);
-
-        OutputParameterType[] outputParamList = outputList
-                .toArray(new OutputParameterType[outputList.size()]);
-
-        serv.getType().setInputParametersArray(inputParamList);
-        serv.getType().setOutputParametersArray(outputParamList);
-
-        jobExecutionContext = new JobExecutionContext(gFacConfiguration, serv.getType().getName());
-        // Adding security context
-        jobExecutionContext.addSecurityContext(SSHSecurityContext.SSH_SECURITY_CONTEXT, getSecurityContext(app));
-        ApplicationContext applicationContext = new ApplicationContext();
-        jobExecutionContext.setApplicationContext(applicationContext);
-        applicationContext.setServiceDescription(serv);
-        applicationContext.setApplicationDeploymentDescription(appDesc);
-        applicationContext.setHostDescription(host);
-
-        MessageContext inMessage = new MessageContext();
-        ActualParameter echo_input = new ActualParameter();
-        ((StringParameterType) echo_input.getType()).setValue("echo_output=hello");
-        inMessage.addParameter("echo_input", echo_input);
-
-
-        jobExecutionContext.setInMessageContext(inMessage);
-
-        MessageContext outMessage = new MessageContext();
-        ActualParameter echo_out = new ActualParameter();
-//		((StringParameterType)echo_input.getType()).setValue("echo_output=hello");
-        outMessage.addParameter("echo_output", echo_out);
-        jobExecutionContext.setRegistry(RegistryFactory.getLoggingRegistry());
-        jobExecutionContext.setTaskData(new TaskDetails("11323"));
-        jobExecutionContext.setOutMessageContext(outMessage);
-
-    }
-
-
-    private SecurityContext getSecurityContext(HpcApplicationDeploymentType app) {
-         try {
-
-        AuthenticationInfo authenticationInfo = null;
-        if (password != null) {
-            authenticationInfo = new DefaultPasswordAuthenticationInfo(this.password);
-        } else {
-            authenticationInfo = new DefaultPublicKeyFileAuthentication(this.publicKeyPath, this.privateKeyPath,
-                    this.passPhrase);
-        }
-        // Server info
-        ServerInfo serverInfo = new ServerInfo(this.userName, this.hostName);
-
-        Cluster pbsCluster = null;
-        SSHSecurityContext sshSecurityContext = null;
-
-            JobManagerConfiguration pbsJobManager = CommonUtils.getPBSJobManager(app.getInstalledParentPath());
-            pbsCluster = new PBSCluster(serverInfo, authenticationInfo, pbsJobManager);
-
-
-            sshSecurityContext = new SSHSecurityContext();
-            sshSecurityContext.setPbsCluster(pbsCluster);
-            sshSecurityContext.setUsername(userName);
-            sshSecurityContext.setKeyPass(passPhrase);
-            sshSecurityContext.setPrivateKeyLoc(privateKeyPath);
-             return sshSecurityContext;
-        } catch (SSHApiException e) {
-            e.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
-        }
-        return null;
-    }
-
-    @Test
-    public void testSSHProvider() throws GFacException {
-        BetterGfacImpl gFacAPI = new BetterGfacImpl();
-        gFacAPI.submitJob(jobExecutionContext.getExperimentID(), jobExecutionContext.getTaskData().getTaskID(), jobExecutionContext.getGatewayID());
-        org.junit.Assert.assertNotNull(jobExecutionContext.getJobDetails().getJobDescription());
-        org.junit.Assert.assertNotNull(jobExecutionContext.getJobDetails().getJobID());
-    }
-
-}
+///*
+// *
+// * Licensed to the Apache Software Foundation (ASF) under one
+// * or more contributor license agreements.  See the NOTICE file
+// * distributed with this work for additional information
+// * regarding copyright ownership.  The ASF licenses this file
+// * to you under the Apache License, Version 2.0 (the
+// * "License"); you may not use this file except in compliance
+// * with the License.  You may obtain a copy of the License at
+// *
+// *   http://www.apache.org/licenses/LICENSE-2.0
+// *
+// * Unless required by applicable law or agreed to in writing,
+// * software distributed under the License is distributed on an
+// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// * KIND, either express or implied.  See the License for the
+// * specific language governing permissions and limitations
+// * under the License.
+// *
+//*/
+//package org.apache.airavata.core.gfac.services.impl;
+//
+//import org.apache.airavata.commons.gfac.type.ActualParameter;
+//import org.apache.airavata.commons.gfac.type.ApplicationDescription;
+//import org.apache.airavata.commons.gfac.type.HostDescription;
+//import org.apache.airavata.commons.gfac.type.ServiceDescription;
+//import org.apache.airavata.gfac.GFacConfiguration;
+//import org.apache.airavata.gfac.GFacException;
+//import org.apache.airavata.gfac.SecurityContext;
+//import org.apache.airavata.gfac.core.context.ApplicationContext;
+//import org.apache.airavata.gfac.core.context.JobExecutionContext;
+//import org.apache.airavata.gfac.core.context.MessageContext;
+//import org.apache.airavata.gfac.core.cpi.BetterGfacImpl;
+//import org.apache.airavata.gfac.ssh.security.SSHSecurityContext;
+//import org.apache.airavata.gsi.ssh.api.Cluster;
+//import org.apache.airavata.gsi.ssh.api.SSHApiException;
+//import org.apache.airavata.gsi.ssh.api.ServerInfo;
+//import org.apache.airavata.gsi.ssh.api.authentication.AuthenticationInfo;
+//import org.apache.airavata.gsi.ssh.api.job.JobManagerConfiguration;
+//import org.apache.airavata.gsi.ssh.impl.PBSCluster;
+//import org.apache.airavata.gsi.ssh.impl.authentication.DefaultPasswordAuthenticationInfo;
+//import org.apache.airavata.gsi.ssh.impl.authentication.DefaultPublicKeyFileAuthentication;
+//import org.apache.airavata.gsi.ssh.util.CommonUtils;
+//import org.apache.airavata.model.workspace.experiment.TaskDetails;
+//import org.apache.airavata.persistance.registry.jpa.impl.RegistryFactory;
+//import org.apache.airavata.schemas.gfac.*;
+//import org.testng.annotations.BeforeClass;
+//import org.testng.annotations.Test;
+//
+//import java.io.File;
+//import java.net.URL;
+//import java.util.ArrayList;
+//import java.util.Date;
+//import java.util.List;
+//import java.util.UUID;
+//
+//public class BigRed2TestWithSSHAuth {
+//    private JobExecutionContext jobExecutionContext;
+//
+//    private String userName;
+//    private String password;
+//    private String passPhrase;
+//    private String hostName;
+//    private String workingDirectory;
+//    private String privateKeyPath;
+//    private String publicKeyPath;
+//
+//    @BeforeClass
+//    public void setUp() throws Exception {
+//
+//        System.out.println("Test case name " + this.getClass().getName());
+////        System.setProperty("ssh.host","bigred2.uits.iu.edu");        //default ssh host
+////        System.setProperty("ssh.user", "lginnali");
+////        System.setProperty("ssh.private.key.path", "/Users/lahirugunathilake/.ssh/id_dsa");
+////        System.setProperty("ssh.public.key.path", "/Users/lahirugunathilake/.ssh/id_dsa.pub");
+////        System.setProperty("ssh.working.directory", "/tmp");
+//
+//        this.hostName = "bigred2.uits.iu.edu";
+//        this.hostName = System.getProperty("ssh.host");
+//        this.userName = System.getProperty("ssh.username");
+//        this.password = System.getProperty("ssh.password");
+//        this.privateKeyPath = System.getProperty("private.ssh.key");
+//        this.publicKeyPath = System.getProperty("public.ssh.key");
+//        this.passPhrase = System.getProperty("ssh.keypass");
+//        this.workingDirectory = System.getProperty("ssh.working.directory");
+//
+//
+//         if (this.userName == null
+//                || (this.password==null && (this.publicKeyPath == null || this.privateKeyPath == null)) || this.workingDirectory == null) {
+//            System.out.println("########### In order to test you have to either username password or private,public keys");
+//            System.out.println("Use -Dssh.username=xxx -Dssh.password=yyy -Dssh.keypass=zzz " +
+//                    "-Dprivate.ssh.key -Dpublic.ssh.key -Dssh.working.directory ");
+//        }
+//        URL resource = BigRed2TestWithSSHAuth.class.getClassLoader().getResource(org.apache.airavata.common.utils.Constants.GFAC_CONFIG_XML);
+//        assert resource != null;
+//        System.out.println(resource.getFile());
+//        GFacConfiguration gFacConfiguration = GFacConfiguration.create(new File(resource.getPath()), null);
+//
+////        gFacConfiguration.setMyProxyLifeCycle(3600);
+////        gFacConfiguration.setMyProxyServer("myproxy.teragrid.org");
+////        gFacConfiguration.setMyProxyUser("*****");
+////        gFacConfiguration.setMyProxyPassphrase("*****");
+////        gFacConfiguration.setTrustedCertLocation("./certificates");
+////        //have to set InFlwo Handlers and outFlowHandlers
+////        gFacConfiguration.setInHandlers(Arrays.asList(new String[] {"org.apache.airavata.gfac.handler.GramDirectorySetupHandler","org.apache.airavata.gfac.handler.GridFTPInputHandler"}));
+////        gFacConfiguration.setOutHandlers(Arrays.asList(new String[] {"org.apache.airavata.gfac.handler.GridFTPOutputHandler"}));
+//
+//        /*
+//        * Host
+//        */
+//        HostDescription host = new HostDescription(SSHHostType.type);
+//        host.getType().setHostAddress(hostName);
+//        host.getType().setHostName(hostName);
+//        ((SSHHostType)host.getType()).setHpcResource(true);
+//        /*
+//        * App
+//        */
+//        ApplicationDescription appDesc = new ApplicationDescription(HpcApplicationDeploymentType.type);
+//        HpcApplicationDeploymentType app = (HpcApplicationDeploymentType) appDesc.getType();
+//        ApplicationDeploymentDescriptionType.ApplicationName name = ApplicationDeploymentDescriptionType.ApplicationName.Factory.newInstance();
+//        name.setStringValue("EchoLocal");
+//        app.setApplicationName(name);
+//
+//        app.setCpuCount(1);
+//        app.setJobType(JobTypeType.SERIAL);
+//        app.setNodeCount(1);
+//        app.setProcessorsPerNode(1);
+//
+//        /*
+//        * Use bat file if it is compiled on Windows
+//        */
+//        app.setExecutableLocation("/bin/echo");
+//
+//        /*
+//        * Default tmp location
+//        */
+//        String tempDir = "/tmp";
+//        String date = (new Date()).toString();
+//        date = date.replaceAll(" ", "_");
+//        date = date.replaceAll(":", "_");
+//
+//        tempDir = tempDir + File.separator
+//                + "SimpleEcho" + "_" + date + "_" + UUID.randomUUID();
+//
+//        System.out.println(tempDir);
+//        app.setScratchWorkingDirectory(tempDir);
+//        app.setStaticWorkingDirectory(tempDir);
+//        app.setInputDataDirectory(tempDir + File.separator + "inputData");
+//        app.setOutputDataDirectory(tempDir + File.separator + "outputData");
+//        app.setStandardOutput(tempDir + File.separator + app.getApplicationName().getStringValue() + ".stdout");
+//        app.setStandardError(tempDir + File.separator + app.getApplicationName().getStringValue() + ".stderr");
+//        app.setMaxWallTime(5);
+//        app.setJobSubmitterCommand("aprun -n 1");
+//        app.setInstalledParentPath("/opt/torque/torque-4.2.3.1/bin/");
+//
+//        /*
+//        * Service
+//        */
+//        ServiceDescription serv = new ServiceDescription();
+//        serv.getType().setName("SimpleEcho");
+//
+//        List<InputParameterType> inputList = new ArrayList<InputParameterType>();
+//
+//        InputParameterType input = InputParameterType.Factory.newInstance();
+//        input.setParameterName("echo_input");
+//        input.setParameterType(StringParameterType.Factory.newInstance());
+//        inputList.add(input);
+//
+//        InputParameterType[] inputParamList = inputList.toArray(new InputParameterType[inputList
+//
+//                .size()]);
+//        List<OutputParameterType> outputList = new ArrayList<OutputParameterType>();
+//        OutputParameterType output = OutputParameterType.Factory.newInstance();
+//        output.setParameterName("echo_output");
+//        output.setParameterType(StringParameterType.Factory.newInstance());
+//        outputList.add(output);
+//
+//        OutputParameterType[] outputParamList = outputList
+//                .toArray(new OutputParameterType[outputList.size()]);
+//
+//        serv.getType().setInputParametersArray(inputParamList);
+//        serv.getType().setOutputParametersArray(outputParamList);
+//
+//        jobExecutionContext = new JobExecutionContext(gFacConfiguration, serv.getType().getName());
+//        // Adding security context
+//        jobExecutionContext.addSecurityContext(SSHSecurityContext.SSH_SECURITY_CONTEXT, getSecurityContext(app));
+//        ApplicationContext applicationContext = new ApplicationContext();
+//        jobExecutionContext.setApplicationContext(applicationContext);
+//        applicationContext.setServiceDescription(serv);
+//        applicationContext.setApplicationDeploymentDescription(appDesc);
+//        applicationContext.setHostDescription(host);
+//
+//        MessageContext inMessage = new MessageContext();
+//        ActualParameter echo_input = new ActualParameter();
+//        ((StringParameterType) echo_input.getType()).setValue("echo_output=hello");
+//        inMessage.addParameter("echo_input", echo_input);
+//
+//
+//        jobExecutionContext.setInMessageContext(inMessage);
+//
+//        MessageContext outMessage = new MessageContext();
+//        ActualParameter echo_out = new ActualParameter();
+////		((StringParameterType)echo_input.getType()).setValue("echo_output=hello");
+//        outMessage.addParameter("echo_output", echo_out);
+//        jobExecutionContext.setRegistry(RegistryFactory.getLoggingRegistry());
+//        jobExecutionContext.setTaskData(new TaskDetails("11323"));
+//        jobExecutionContext.setOutMessageContext(outMessage);
+//
+//    }
+//
+//
+//    private SecurityContext getSecurityContext(HpcApplicationDeploymentType app) {
+//         try {
+//
+//        AuthenticationInfo authenticationInfo = null;
+//        if (password != null) {
+//            authenticationInfo = new DefaultPasswordAuthenticationInfo(this.password);
+//        } else {
+//            authenticationInfo = new DefaultPublicKeyFileAuthentication(this.publicKeyPath, this.privateKeyPath,
+//                    this.passPhrase);
+//        }
+//        // Server info
+//        ServerInfo serverInfo = new ServerInfo(this.userName, this.hostName);
+//
+//        Cluster pbsCluster = null;
+//        SSHSecurityContext sshSecurityContext = null;
+//
+//            JobManagerConfiguration pbsJobManager = CommonUtils.getPBSJobManager(app.getInstalledParentPath());
+//            pbsCluster = new PBSCluster(serverInfo, authenticationInfo, pbsJobManager);
+//
+//
+//            sshSecurityContext = new SSHSecurityContext();
+//            sshSecurityContext.setPbsCluster(pbsCluster);
+//            sshSecurityContext.setUsername(userName);
+//            sshSecurityContext.setKeyPass(passPhrase);
+//            sshSecurityContext.setPrivateKeyLoc(privateKeyPath);
+//             return sshSecurityContext;
+//        } catch (SSHApiException e) {
+//            e.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
+//        }
+//        return null;
+//    }
+//
+//    @Test
+//    public void testSSHProvider() throws GFacException {
+//        BetterGfacImpl gFacAPI = new BetterGfacImpl();
+//        gFacAPI.submitJob(jobExecutionContext.getExperimentID(), jobExecutionContext.getTaskData().getTaskID(), jobExecutionContext.getGatewayID());
+//        org.junit.Assert.assertNotNull(jobExecutionContext.getJobDetails().getJobDescription());
+//        org.junit.Assert.assertNotNull(jobExecutionContext.getJobDetails().getJobID());
+//    }
+//
+//}

http://git-wip-us.apache.org/repos/asf/airavata/blob/2dd94e63/modules/gfac/gfac-ssh/src/test/java/org/apache/airavata/core/gfac/services/impl/SSHProviderTestWithSSHAuth.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ssh/src/test/java/org/apache/airavata/core/gfac/services/impl/SSHProviderTestWithSSHAuth.java b/modules/gfac/gfac-ssh/src/test/java/org/apache/airavata/core/gfac/services/impl/SSHProviderTestWithSSHAuth.java
index 5cb1200..b115b6c 100644
--- a/modules/gfac/gfac-ssh/src/test/java/org/apache/airavata/core/gfac/services/impl/SSHProviderTestWithSSHAuth.java
+++ b/modules/gfac/gfac-ssh/src/test/java/org/apache/airavata/core/gfac/services/impl/SSHProviderTestWithSSHAuth.java
@@ -1,172 +1,172 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.core.gfac.services.impl;
-
-import java.io.File;
-import java.net.URL;
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.List;
-import java.util.UUID;
-
-import org.apache.airavata.commons.gfac.type.ActualParameter;
-import org.apache.airavata.commons.gfac.type.ApplicationDescription;
-import org.apache.airavata.commons.gfac.type.HostDescription;
-import org.apache.airavata.commons.gfac.type.MappingFactory;
-import org.apache.airavata.commons.gfac.type.ServiceDescription;
-import org.apache.airavata.gfac.GFacConfiguration;
-import org.apache.airavata.gfac.GFacException;
-import org.apache.airavata.gfac.core.context.ApplicationContext;
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.context.MessageContext;
-import org.apache.airavata.gfac.core.cpi.BetterGfacImpl;
-import org.apache.airavata.gfac.ssh.security.SSHSecurityContext;
-import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
-import org.apache.airavata.schemas.gfac.InputParameterType;
-import org.apache.airavata.schemas.gfac.OutputParameterType;
-import org.apache.airavata.schemas.gfac.SSHHostType;
-import org.apache.airavata.schemas.gfac.StringParameterType;
-import org.apache.commons.lang.SystemUtils;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-
-public class SSHProviderTestWithSSHAuth {
-	private JobExecutionContext jobExecutionContext;
-    @Before
-    public void setUp() throws Exception {
-
-    	URL resource = SSHProviderTestWithSSHAuth.class.getClassLoader().getResource(org.apache.airavata.common.utils.Constants.GFAC_CONFIG_XML);
-        GFacConfiguration gFacConfiguration = GFacConfiguration.create(new File(resource.getPath()),null);
-//        gFacConfiguration.s
-        //have to set InFlwo Handlers and outFlowHandlers
-        ApplicationContext applicationContext = new ApplicationContext();
-        HostDescription host = new HostDescription(SSHHostType.type);
-        host.getType().setHostName("bigred");
-        host.getType().setHostAddress("bigred2.uits.iu.edu");
-        applicationContext.setHostDescription(host);
-        /*
-           * App
-           */
-        ApplicationDescription appDesc = new ApplicationDescription();
-        ApplicationDeploymentDescriptionType app = appDesc.getType();
-        ApplicationDeploymentDescriptionType.ApplicationName name = ApplicationDeploymentDescriptionType.ApplicationName.Factory.newInstance();
-        name.setStringValue("EchoSSH");
-        app.setApplicationName(name);
-
-        /*
-           * Use bat file if it is compiled on Windows
-           */
-        if (SystemUtils.IS_OS_WINDOWS) {
-            URL url = this.getClass().getClassLoader().getResource("echo.bat");
-            app.setExecutableLocation(url.getFile());
-        } else {
-            //for unix and Mac
-            app.setExecutableLocation("/bin/echo");
-        }
-
-        /*
-         * Job location
-        */
-        String tempDir = "/tmp";
-        String date = (new Date()).toString();
-        date = date.replaceAll(" ", "_");
-        date = date.replaceAll(":", "_");
-
-        tempDir = tempDir + File.separator
-                + "EchoSSH" + "_" + date + "_" + UUID.randomUUID();
-
-        app.setScratchWorkingDirectory(tempDir);
-        app.setStaticWorkingDirectory(tempDir);
-        app.setInputDataDirectory(tempDir + File.separator + "input");
-        app.setOutputDataDirectory(tempDir + File.separator + "output");
-        app.setStandardOutput(tempDir + File.separator + "echo.stdout");
-        app.setStandardError(tempDir + File.separator + "echo.stderr");
-
-        applicationContext.setApplicationDeploymentDescription(appDesc);
-
-        /*
-           * Service
-           */
-        ServiceDescription serv = new ServiceDescription();
-        serv.getType().setName("EchoSSH");
-
-        List<InputParameterType> inputList = new ArrayList<InputParameterType>();
-        InputParameterType input = InputParameterType.Factory.newInstance();
-        input.setParameterName("echo_input");
-        input.setParameterType(StringParameterType.Factory.newInstance());
-        inputList.add(input);
-        InputParameterType[] inputParamList = inputList.toArray(new InputParameterType[inputList
-                .size()]);
-
-        List<OutputParameterType> outputList = new ArrayList<OutputParameterType>();
-        OutputParameterType output = OutputParameterType.Factory.newInstance();
-        output.setParameterName("echo_output");
-        output.setParameterType(StringParameterType.Factory.newInstance());
-        outputList.add(output);
-        OutputParameterType[] outputParamList = outputList
-                .toArray(new OutputParameterType[outputList.size()]);
-
-        serv.getType().setInputParametersArray(inputParamList);
-        serv.getType().setOutputParametersArray(outputParamList);
-
-        jobExecutionContext = new JobExecutionContext(gFacConfiguration,serv.getType().getName());
-        jobExecutionContext.setApplicationContext(applicationContext);
-
-        // Add security context
-        jobExecutionContext.addSecurityContext(SSHSecurityContext.SSH_SECURITY_CONTEXT, getSecurityContext());
-        /*
-        * Host
-        */
-        applicationContext.setServiceDescription(serv);
-
-        MessageContext inMessage = new MessageContext();
-        ActualParameter echo_input = new ActualParameter();
-		((StringParameterType)echo_input.getType()).setValue("echo_output=hello");
-        inMessage.addParameter("echo_input", echo_input);
-
-        jobExecutionContext.setInMessageContext(inMessage);
-
-        MessageContext outMessage = new MessageContext();
-        ActualParameter echo_out = new ActualParameter();
+///*
+// *
+// * Licensed to the Apache Software Foundation (ASF) under one
+// * or more contributor license agreements.  See the NOTICE file
+// * distributed with this work for additional information
+// * regarding copyright ownership.  The ASF licenses this file
+// * to you under the Apache License, Version 2.0 (the
+// * "License"); you may not use this file except in compliance
+// * with the License.  You may obtain a copy of the License at
+// *
+// *   http://www.apache.org/licenses/LICENSE-2.0
+// *
+// * Unless required by applicable law or agreed to in writing,
+// * software distributed under the License is distributed on an
+// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// * KIND, either express or implied.  See the License for the
+// * specific language governing permissions and limitations
+// * under the License.
+// *
+//*/
+//package org.apache.airavata.core.gfac.services.impl;
+//
+//import java.io.File;
+//import java.net.URL;
+//import java.util.ArrayList;
+//import java.util.Date;
+//import java.util.List;
+//import java.util.UUID;
+//
+//import org.apache.airavata.commons.gfac.type.ActualParameter;
+//import org.apache.airavata.commons.gfac.type.ApplicationDescription;
+//import org.apache.airavata.commons.gfac.type.HostDescription;
+//import org.apache.airavata.commons.gfac.type.MappingFactory;
+//import org.apache.airavata.commons.gfac.type.ServiceDescription;
+//import org.apache.airavata.gfac.GFacConfiguration;
+//import org.apache.airavata.gfac.GFacException;
+//import org.apache.airavata.gfac.core.context.ApplicationContext;
+//import org.apache.airavata.gfac.core.context.JobExecutionContext;
+//import org.apache.airavata.gfac.core.context.MessageContext;
+//import org.apache.airavata.gfac.core.cpi.BetterGfacImpl;
+//import org.apache.airavata.gfac.ssh.security.SSHSecurityContext;
+//import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
+//import org.apache.airavata.schemas.gfac.InputParameterType;
+//import org.apache.airavata.schemas.gfac.OutputParameterType;
+//import org.apache.airavata.schemas.gfac.SSHHostType;
+//import org.apache.airavata.schemas.gfac.StringParameterType;
+//import org.apache.commons.lang.SystemUtils;
+//import org.junit.Assert;
+//import org.junit.Before;
+//import org.junit.Test;
+//
+//public class SSHProviderTestWithSSHAuth {
+//	private JobExecutionContext jobExecutionContext;
+//    @Before
+//    public void setUp() throws Exception {
+//
+//    	URL resource = SSHProviderTestWithSSHAuth.class.getClassLoader().getResource(org.apache.airavata.common.utils.Constants.GFAC_CONFIG_XML);
+//        GFacConfiguration gFacConfiguration = GFacConfiguration.create(new File(resource.getPath()),null);
+////        gFacConfiguration.s
+//        //have to set InFlwo Handlers and outFlowHandlers
+//        ApplicationContext applicationContext = new ApplicationContext();
+//        HostDescription host = new HostDescription(SSHHostType.type);
+//        host.getType().setHostName("bigred");
+//        host.getType().setHostAddress("bigred2.uits.iu.edu");
+//        applicationContext.setHostDescription(host);
+//        /*
+//           * App
+//           */
+//        ApplicationDescription appDesc = new ApplicationDescription();
+//        ApplicationDeploymentDescriptionType app = appDesc.getType();
+//        ApplicationDeploymentDescriptionType.ApplicationName name = ApplicationDeploymentDescriptionType.ApplicationName.Factory.newInstance();
+//        name.setStringValue("EchoSSH");
+//        app.setApplicationName(name);
+//
+//        /*
+//           * Use bat file if it is compiled on Windows
+//           */
+//        if (SystemUtils.IS_OS_WINDOWS) {
+//            URL url = this.getClass().getClassLoader().getResource("echo.bat");
+//            app.setExecutableLocation(url.getFile());
+//        } else {
+//            //for unix and Mac
+//            app.setExecutableLocation("/bin/echo");
+//        }
+//
+//        /*
+//         * Job location
+//        */
+//        String tempDir = "/tmp";
+//        String date = (new Date()).toString();
+//        date = date.replaceAll(" ", "_");
+//        date = date.replaceAll(":", "_");
+//
+//        tempDir = tempDir + File.separator
+//                + "EchoSSH" + "_" + date + "_" + UUID.randomUUID();
+//
+//        app.setScratchWorkingDirectory(tempDir);
+//        app.setStaticWorkingDirectory(tempDir);
+//        app.setInputDataDirectory(tempDir + File.separator + "input");
+//        app.setOutputDataDirectory(tempDir + File.separator + "output");
+//        app.setStandardOutput(tempDir + File.separator + "echo.stdout");
+//        app.setStandardError(tempDir + File.separator + "echo.stderr");
+//
+//        applicationContext.setApplicationDeploymentDescription(appDesc);
+//
+//        /*
+//           * Service
+//           */
+//        ServiceDescription serv = new ServiceDescription();
+//        serv.getType().setName("EchoSSH");
+//
+//        List<InputParameterType> inputList = new ArrayList<InputParameterType>();
+//        InputParameterType input = InputParameterType.Factory.newInstance();
+//        input.setParameterName("echo_input");
+//        input.setParameterType(StringParameterType.Factory.newInstance());
+//        inputList.add(input);
+//        InputParameterType[] inputParamList = inputList.toArray(new InputParameterType[inputList
+//                .size()]);
+//
+//        List<OutputParameterType> outputList = new ArrayList<OutputParameterType>();
+//        OutputParameterType output = OutputParameterType.Factory.newInstance();
+//        output.setParameterName("echo_output");
+//        output.setParameterType(StringParameterType.Factory.newInstance());
+//        outputList.add(output);
+//        OutputParameterType[] outputParamList = outputList
+//                .toArray(new OutputParameterType[outputList.size()]);
+//
+//        serv.getType().setInputParametersArray(inputParamList);
+//        serv.getType().setOutputParametersArray(outputParamList);
+//
+//        jobExecutionContext = new JobExecutionContext(gFacConfiguration,serv.getType().getName());
+//        jobExecutionContext.setApplicationContext(applicationContext);
+//
+//        // Add security context
+//        jobExecutionContext.addSecurityContext(SSHSecurityContext.SSH_SECURITY_CONTEXT, getSecurityContext());
+//        /*
+//        * Host
+//        */
+//        applicationContext.setServiceDescription(serv);
+//
+//        MessageContext inMessage = new MessageContext();
+//        ActualParameter echo_input = new ActualParameter();
 //		((StringParameterType)echo_input.getType()).setValue("echo_output=hello");
-        outMessage.addParameter("echo_output", echo_out);
-
-        jobExecutionContext.setOutMessageContext(outMessage);
-
-    }
-
-	private SSHSecurityContext getSecurityContext() {
-		SSHSecurityContext context = new SSHSecurityContext();
-        context.setUsername("lginnali");
-        context.setPrivateKeyLoc("~/.ssh/id_dsa");
-        context.setKeyPass("i want to be free");
-		return context;
-	}
-
-    @Test
-    public void testLocalProvider() throws GFacException {
-        BetterGfacImpl gFacAPI = new BetterGfacImpl();
-        gFacAPI.submitJob(jobExecutionContext.getExperimentID(), jobExecutionContext.getTaskData().getTaskID(), jobExecutionContext.getGatewayID());
-        MessageContext outMessageContext = jobExecutionContext.getOutMessageContext();
-        Assert.assertEquals(MappingFactory.toString((ActualParameter)outMessageContext.getParameter("echo_output")), "hello");
-    }
-}
+//        inMessage.addParameter("echo_input", echo_input);
+//
+//        jobExecutionContext.setInMessageContext(inMessage);
+//
+//        MessageContext outMessage = new MessageContext();
+//        ActualParameter echo_out = new ActualParameter();
+////		((StringParameterType)echo_input.getType()).setValue("echo_output=hello");
+//        outMessage.addParameter("echo_output", echo_out);
+//
+//        jobExecutionContext.setOutMessageContext(outMessage);
+//
+//    }
+//
+//	private SSHSecurityContext getSecurityContext() {
+//		SSHSecurityContext context = new SSHSecurityContext();
+//        context.setUsername("lginnali");
+//        context.setPrivateKeyLoc("~/.ssh/id_dsa");
+//        context.setKeyPass("i want to be free");
+//		return context;
+//	}
+//
+//    @Test
+//    public void testLocalProvider() throws GFacException {
+//        BetterGfacImpl gFacAPI = new BetterGfacImpl();
+//        gFacAPI.submitJob(jobExecutionContext.getExperimentID(), jobExecutionContext.getTaskData().getTaskID(), jobExecutionContext.getGatewayID());
+//        MessageContext outMessageContext = jobExecutionContext.getOutMessageContext();
+//        Assert.assertEquals(MappingFactory.toString((ActualParameter)outMessageContext.getParameter("echo_output")), "hello");
+//    }
+//}


[05/50] [abbrv] airavata git commit: Merge branch 'gfac_appcatalog_int' of https://git-wip-us.apache.org/repos/asf/airavata into gfac_appcatalog_int

Posted by ch...@apache.org.
Merge branch 'gfac_appcatalog_int' of https://git-wip-us.apache.org/repos/asf/airavata into gfac_appcatalog_int


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/bb4fe122
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/bb4fe122
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/bb4fe122

Branch: refs/heads/master
Commit: bb4fe1226ae1a170f59e3992116bcea70f19c3f6
Parents: ad2b1d3 04f09e7
Author: shamrath <sh...@gmail.com>
Authored: Fri Oct 31 17:41:42 2014 -0400
Committer: shamrath <sh...@gmail.com>
Committed: Fri Oct 31 17:41:42 2014 -0400

----------------------------------------------------------------------
 .../gfac/bes/handlers/AbstractSMSHandler.java   |  74 ++--
 .../gfac/bes/provider/impl/BESProvider.java     | 378 +++++++++----------
 .../bes/security/UNICORESecurityContext.java    |   4 +-
 .../gfac/bes/utils/ApplicationProcessor.java    | 212 ++++-------
 .../airavata/gfac/core/utils/GFacUtils.java     |  23 +-
 .../apache/airavata/gfac/ec2/EC2Provider.java   |  15 +-
 6 files changed, 306 insertions(+), 400 deletions(-)
----------------------------------------------------------------------



[44/50] [abbrv] airavata git commit: fixing class cast exception occured due to ActualParameter instance

Posted by ch...@apache.org.
fixing class cast exception occured due to ActualParameter instance


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/e5413290
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/e5413290
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/e5413290

Branch: refs/heads/master
Commit: e54132907b81eb0daf479ea0986f7ff7a9be0e58
Parents: f09c742
Author: chathuriw <ka...@gmail.com>
Authored: Fri Nov 7 10:58:02 2014 -0500
Committer: Chathuri Wimalasena <ka...@gmail.com>
Committed: Fri Nov 7 10:58:02 2014 -0500

----------------------------------------------------------------------
 .../gfac/gsissh/util/GFACGSISSHUtils.java       | 20 +++-----------------
 1 file changed, 3 insertions(+), 17 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/e5413290/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/util/GFACGSISSHUtils.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/util/GFACGSISSHUtils.java b/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/util/GFACGSISSHUtils.java
index 0a521b5..2adc013 100644
--- a/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/util/GFACGSISSHUtils.java
+++ b/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/util/GFACGSISSHUtils.java
@@ -45,6 +45,7 @@ import org.apache.airavata.gsi.ssh.api.job.JobManagerConfiguration;
 import org.apache.airavata.gsi.ssh.impl.PBSCluster;
 import org.apache.airavata.gsi.ssh.util.CommonUtils;
 import org.apache.airavata.model.appcatalog.appdeployment.ApplicationDeploymentDescription;
+import org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType;
 import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionInterface;
 import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol;
 import org.apache.airavata.model.appcatalog.computeresource.SSHJobSubmission;
@@ -185,23 +186,8 @@ public class GFACGSISSHUtils {
         Map<String, Object> inputs = input.getParameters();
         Set<String> keys = inputs.keySet();
         for (String paramName : keys) {
-            ActualParameter actualParameter = (ActualParameter) inputs.get(paramName);
-            if ("URIArray".equals(actualParameter.getType().getType().toString()) || "StringArray".equals(actualParameter.getType().getType().toString())
-                    || "FileArray".equals(actualParameter.getType().getType().toString())) {
-                String[] values = null;
-                if (actualParameter.getType() instanceof URIArrayType) {
-                    values = ((URIArrayType) actualParameter.getType()).getValueArray();
-                } else if (actualParameter.getType() instanceof StringArrayType) {
-                    values = ((StringArrayType) actualParameter.getType()).getValueArray();
-                } else if (actualParameter.getType() instanceof FileArrayType) {
-                    values = ((FileArrayType) actualParameter.getType()).getValueArray();
-                }
-                String value = StringUtil.createDelimiteredString(values, " ");
-                inputValues.add(value);
-            } else {
-                String paramValue = MappingFactory.toString(actualParameter);
-                inputValues.add(paramValue);
-            }
+            InputDataObjectType inputDataObjectType = (InputDataObjectType) inputs.get(paramName);
+            inputValues.add(inputDataObjectType.getValue());
         }
         jobDescriptor.setInputValues(inputValues);
 


[32/50] [abbrv] airavata git commit: Removed DataObjectType thift struct from experimentModel and replace it with Input and Output DataObjectType define in applicationInterfaceModel thrift file

Posted by ch...@apache.org.
Removed DataObjectType thift struct from experimentModel and replace it with Input and Output DataObjectType define in applicationInterfaceModel thrift file


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/198de990
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/198de990
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/198de990

Branch: refs/heads/master
Commit: 198de990384ca3b3974222f22847d2a431fdd686
Parents: 95354ea
Author: shamrath <sh...@gmail.com>
Authored: Thu Nov 6 15:47:57 2014 -0500
Committer: shamrath <sh...@gmail.com>
Committed: Thu Nov 6 15:47:57 2014 -0500

----------------------------------------------------------------------
 .../java/org/apache/airavata/api/Airavata.java  |  58 +--
 .../main/resources/lib/airavata/Airavata.cpp    |   6 +-
 .../src/main/resources/lib/airavata/Airavata.h  |  16 +-
 .../lib/airavata/Airavata_server.skeleton.cpp   |   2 +-
 .../lib/airavata/experimentModel_types.cpp      | 521 +++++++------------
 .../lib/airavata/experimentModel_types.h        | 124 +----
 .../lib/airavata/messagingEvents_types.cpp      |   6 +-
 .../lib/airavata/messagingEvents_types.h        |   9 +-
 .../resources/lib/Airavata/API/Airavata.php     |   4 +-
 .../Airavata/Model/Messaging/Event/Types.php    |   4 +-
 .../Model/Workspace/Experiment/Types.php        | 171 +-----
 .../client/samples/CreateLaunchExperiment.java  |   2 +-
 .../CreateLaunchExperimentForLocalhost.java     | 269 ++++++++++
 .../messaging/event/TaskOutputChangeEvent.java  |  40 +-
 .../model/workspace/experiment/Experiment.java  |  76 +--
 .../model/workspace/experiment/JobDetails.java  |  11 +-
 .../model/workspace/experiment/TaskDetails.java |  76 +--
 .../experiment/WorkflowNodeDetails.java         |  76 +--
 .../airavataAPI.thrift                          |   2 +-
 .../experimentModel.thrift                      |  32 +-
 .../messagingEvents.thrift                      |   3 +-
 21 files changed, 699 insertions(+), 809 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/198de990/airavata-api/airavata-api-stubs/src/main/java/org/apache/airavata/api/Airavata.java
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-api-stubs/src/main/java/org/apache/airavata/api/Airavata.java b/airavata-api/airavata-api-stubs/src/main/java/org/apache/airavata/api/Airavata.java
index 720173c..d6f2c44 100644
--- a/airavata-api/airavata-api-stubs/src/main/java/org/apache/airavata/api/Airavata.java
+++ b/airavata-api/airavata-api-stubs/src/main/java/org/apache/airavata/api/Airavata.java
@@ -358,7 +358,7 @@ import org.slf4j.LoggerFactory;
 
     public org.apache.airavata.model.workspace.experiment.ExperimentStatus getExperimentStatus(String airavataExperimentId) throws org.apache.airavata.model.error.InvalidRequestException, org.apache.airavata.model.error.ExperimentNotFoundException, org.apache.airavata.model.error.AiravataClientException, org.apache.airavata.model.error.AiravataSystemException, org.apache.thrift.TException;
 
-    public List<org.apache.airavata.model.workspace.experiment.DataObjectType> getExperimentOutputs(String airavataExperimentId) throws org.apache.airavata.model.error.InvalidRequestException, org.apache.airavata.model.error.ExperimentNotFoundException, org.apache.airavata.model.error.AiravataClientException, org.apache.airavata.model.error.AiravataSystemException, org.apache.thrift.TException;
+    public List<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType> getExperimentOutputs(String airavataExperimentId) throws org.apache.airavata.model.error.InvalidRequestException, org.apache.airavata.model.error.ExperimentNotFoundException, org.apache.airavata.model.error.AiravataClientException, org.apache.airavata.model.error.AiravataSystemException, org.apache.thrift.TException;
 
     public Map<String,org.apache.airavata.model.workspace.experiment.JobStatus> getJobStatuses(String airavataExperimentId) throws org.apache.airavata.model.error.InvalidRequestException, org.apache.airavata.model.error.ExperimentNotFoundException, org.apache.airavata.model.error.AiravataClientException, org.apache.airavata.model.error.AiravataSystemException, org.apache.thrift.TException;
 
@@ -2335,7 +2335,7 @@ import org.slf4j.LoggerFactory;
       throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "getExperimentStatus failed: unknown result");
     }
 
-    public List<org.apache.airavata.model.workspace.experiment.DataObjectType> getExperimentOutputs(String airavataExperimentId) throws org.apache.airavata.model.error.InvalidRequestException, org.apache.airavata.model.error.ExperimentNotFoundException, org.apache.airavata.model.error.AiravataClientException, org.apache.airavata.model.error.AiravataSystemException, org.apache.thrift.TException
+    public List<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType> getExperimentOutputs(String airavataExperimentId) throws org.apache.airavata.model.error.InvalidRequestException, org.apache.airavata.model.error.ExperimentNotFoundException, org.apache.airavata.model.error.AiravataClientException, org.apache.airavata.model.error.AiravataSystemException, org.apache.thrift.TException
     {
       send_getExperimentOutputs(airavataExperimentId);
       return recv_getExperimentOutputs();
@@ -2348,7 +2348,7 @@ import org.slf4j.LoggerFactory;
       sendBase("getExperimentOutputs", args);
     }
 
-    public List<org.apache.airavata.model.workspace.experiment.DataObjectType> recv_getExperimentOutputs() throws org.apache.airavata.model.error.InvalidRequestException, org.apache.airavata.model.error.ExperimentNotFoundException, org.apache.airavata.model.error.AiravataClientException, org.apache.airavata.model.error.AiravataSystemException, org.apache.thrift.TException
+    public List<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType> recv_getExperimentOutputs() throws org.apache.airavata.model.error.InvalidRequestException, org.apache.airavata.model.error.ExperimentNotFoundException, org.apache.airavata.model.error.AiravataClientException, org.apache.airavata.model.error.AiravataSystemException, org.apache.thrift.TException
     {
       getExperimentOutputs_result result = new getExperimentOutputs_result();
       receiveBase(result, "getExperimentOutputs");
@@ -5622,7 +5622,7 @@ import org.slf4j.LoggerFactory;
         prot.writeMessageEnd();
       }
 
-      public List<org.apache.airavata.model.workspace.experiment.DataObjectType> getResult() throws org.apache.airavata.model.error.InvalidRequestException, org.apache.airavata.model.error.ExperimentNotFoundException, org.apache.airavata.model.error.AiravataClientException, org.apache.airavata.model.error.AiravataSystemException, org.apache.thrift.TException {
+      public List<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType> getResult() throws org.apache.airavata.model.error.InvalidRequestException, org.apache.airavata.model.error.ExperimentNotFoundException, org.apache.airavata.model.error.AiravataClientException, org.apache.airavata.model.error.AiravataSystemException, org.apache.thrift.TException {
         if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
           throw new IllegalStateException("Method call not finished!");
         }
@@ -12695,7 +12695,7 @@ import org.slf4j.LoggerFactory;
       }
     }
 
-    public static class getExperimentOutputs<I extends AsyncIface> extends org.apache.thrift.AsyncProcessFunction<I, getExperimentOutputs_args, List<org.apache.airavata.model.workspace.experiment.DataObjectType>> {
+    public static class getExperimentOutputs<I extends AsyncIface> extends org.apache.thrift.AsyncProcessFunction<I, getExperimentOutputs_args, List<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType>> {
       public getExperimentOutputs() {
         super("getExperimentOutputs");
       }
@@ -12704,10 +12704,10 @@ import org.slf4j.LoggerFactory;
         return new getExperimentOutputs_args();
       }
 
-      public AsyncMethodCallback<List<org.apache.airavata.model.workspace.experiment.DataObjectType>> getResultHandler(final AsyncFrameBuffer fb, final int seqid) {
+      public AsyncMethodCallback<List<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType>> getResultHandler(final AsyncFrameBuffer fb, final int seqid) {
         final org.apache.thrift.AsyncProcessFunction fcall = this;
-        return new AsyncMethodCallback<List<org.apache.airavata.model.workspace.experiment.DataObjectType>>() { 
-          public void onComplete(List<org.apache.airavata.model.workspace.experiment.DataObjectType> o) {
+        return new AsyncMethodCallback<List<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType>>() { 
+          public void onComplete(List<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType> o) {
             getExperimentOutputs_result result = new getExperimentOutputs_result();
             result.success = o;
             try {
@@ -12762,7 +12762,7 @@ import org.slf4j.LoggerFactory;
         return false;
       }
 
-      public void start(I iface, getExperimentOutputs_args args, org.apache.thrift.async.AsyncMethodCallback<List<org.apache.airavata.model.workspace.experiment.DataObjectType>> resultHandler) throws TException {
+      public void start(I iface, getExperimentOutputs_args args, org.apache.thrift.async.AsyncMethodCallback<List<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType>> resultHandler) throws TException {
         iface.getExperimentOutputs(args.airavataExperimentId,resultHandler);
       }
     }
@@ -41960,7 +41960,7 @@ import org.slf4j.LoggerFactory;
       schemes.put(TupleScheme.class, new getExperimentOutputs_resultTupleSchemeFactory());
     }
 
-    public List<org.apache.airavata.model.workspace.experiment.DataObjectType> success; // required
+    public List<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType> success; // required
     public org.apache.airavata.model.error.InvalidRequestException ire; // required
     public org.apache.airavata.model.error.ExperimentNotFoundException enf; // required
     public org.apache.airavata.model.error.AiravataClientException ace; // required
@@ -42042,7 +42042,7 @@ import org.slf4j.LoggerFactory;
       Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
       tmpMap.put(_Fields.SUCCESS, new org.apache.thrift.meta_data.FieldMetaData("success", org.apache.thrift.TFieldRequirementType.DEFAULT, 
           new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, 
-              new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, org.apache.airavata.model.workspace.experiment.DataObjectType.class))));
+              new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType.class))));
       tmpMap.put(_Fields.IRE, new org.apache.thrift.meta_data.FieldMetaData("ire", org.apache.thrift.TFieldRequirementType.DEFAULT, 
           new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT)));
       tmpMap.put(_Fields.ENF, new org.apache.thrift.meta_data.FieldMetaData("enf", org.apache.thrift.TFieldRequirementType.DEFAULT, 
@@ -42059,7 +42059,7 @@ import org.slf4j.LoggerFactory;
     }
 
     public getExperimentOutputs_result(
-      List<org.apache.airavata.model.workspace.experiment.DataObjectType> success,
+      List<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType> success,
       org.apache.airavata.model.error.InvalidRequestException ire,
       org.apache.airavata.model.error.ExperimentNotFoundException enf,
       org.apache.airavata.model.error.AiravataClientException ace,
@@ -42078,9 +42078,9 @@ import org.slf4j.LoggerFactory;
      */
     public getExperimentOutputs_result(getExperimentOutputs_result other) {
       if (other.isSetSuccess()) {
-        List<org.apache.airavata.model.workspace.experiment.DataObjectType> __this__success = new ArrayList<org.apache.airavata.model.workspace.experiment.DataObjectType>(other.success.size());
-        for (org.apache.airavata.model.workspace.experiment.DataObjectType other_element : other.success) {
-          __this__success.add(new org.apache.airavata.model.workspace.experiment.DataObjectType(other_element));
+        List<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType> __this__success = new ArrayList<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType>(other.success.size());
+        for (org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType other_element : other.success) {
+          __this__success.add(new org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType(other_element));
         }
         this.success = __this__success;
       }
@@ -42115,22 +42115,22 @@ import org.slf4j.LoggerFactory;
       return (this.success == null) ? 0 : this.success.size();
     }
 
-    public java.util.Iterator<org.apache.airavata.model.workspace.experiment.DataObjectType> getSuccessIterator() {
+    public java.util.Iterator<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType> getSuccessIterator() {
       return (this.success == null) ? null : this.success.iterator();
     }
 
-    public void addToSuccess(org.apache.airavata.model.workspace.experiment.DataObjectType elem) {
+    public void addToSuccess(org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType elem) {
       if (this.success == null) {
-        this.success = new ArrayList<org.apache.airavata.model.workspace.experiment.DataObjectType>();
+        this.success = new ArrayList<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType>();
       }
       this.success.add(elem);
     }
 
-    public List<org.apache.airavata.model.workspace.experiment.DataObjectType> getSuccess() {
+    public List<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType> getSuccess() {
       return this.success;
     }
 
-    public getExperimentOutputs_result setSuccess(List<org.apache.airavata.model.workspace.experiment.DataObjectType> success) {
+    public getExperimentOutputs_result setSuccess(List<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType> success) {
       this.success = success;
       return this;
     }
@@ -42252,7 +42252,7 @@ import org.slf4j.LoggerFactory;
         if (value == null) {
           unsetSuccess();
         } else {
-          setSuccess((List<org.apache.airavata.model.workspace.experiment.DataObjectType>)value);
+          setSuccess((List<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType>)value);
         }
         break;
 
@@ -42563,11 +42563,11 @@ import org.slf4j.LoggerFactory;
               if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
                 {
                   org.apache.thrift.protocol.TList _list80 = iprot.readListBegin();
-                  struct.success = new ArrayList<org.apache.airavata.model.workspace.experiment.DataObjectType>(_list80.size);
+                  struct.success = new ArrayList<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType>(_list80.size);
                   for (int _i81 = 0; _i81 < _list80.size; ++_i81)
                   {
-                    org.apache.airavata.model.workspace.experiment.DataObjectType _elem82;
-                    _elem82 = new org.apache.airavata.model.workspace.experiment.DataObjectType();
+                    org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType _elem82;
+                    _elem82 = new org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType();
                     _elem82.read(iprot);
                     struct.success.add(_elem82);
                   }
@@ -42633,7 +42633,7 @@ import org.slf4j.LoggerFactory;
           oprot.writeFieldBegin(SUCCESS_FIELD_DESC);
           {
             oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.success.size()));
-            for (org.apache.airavata.model.workspace.experiment.DataObjectType _iter83 : struct.success)
+            for (org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType _iter83 : struct.success)
             {
               _iter83.write(oprot);
             }
@@ -42698,7 +42698,7 @@ import org.slf4j.LoggerFactory;
         if (struct.isSetSuccess()) {
           {
             oprot.writeI32(struct.success.size());
-            for (org.apache.airavata.model.workspace.experiment.DataObjectType _iter84 : struct.success)
+            for (org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType _iter84 : struct.success)
             {
               _iter84.write(oprot);
             }
@@ -42725,11 +42725,11 @@ import org.slf4j.LoggerFactory;
         if (incoming.get(0)) {
           {
             org.apache.thrift.protocol.TList _list85 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
-            struct.success = new ArrayList<org.apache.airavata.model.workspace.experiment.DataObjectType>(_list85.size);
+            struct.success = new ArrayList<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType>(_list85.size);
             for (int _i86 = 0; _i86 < _list85.size; ++_i86)
             {
-              org.apache.airavata.model.workspace.experiment.DataObjectType _elem87;
-              _elem87 = new org.apache.airavata.model.workspace.experiment.DataObjectType();
+              org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType _elem87;
+              _elem87 = new org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType();
               _elem87.read(iprot);
               struct.success.add(_elem87);
             }

http://git-wip-us.apache.org/repos/asf/airavata/blob/198de990/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/Airavata.cpp
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/Airavata.cpp b/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/Airavata.cpp
index 063c4c6..59d72e5 100644
--- a/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/Airavata.cpp
+++ b/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/Airavata.cpp
@@ -5621,7 +5621,7 @@ uint32_t Airavata_getExperimentOutputs_result::write(::apache::thrift::protocol:
     xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_LIST, 0);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->success.size()));
-      std::vector< ::apache::airavata::model::workspace::experiment::DataObjectType> ::const_iterator _iter116;
+      std::vector< ::apache::airavata::model::appcatalog::appinterface::OutputDataObjectType> ::const_iterator _iter116;
       for (_iter116 = this->success.begin(); _iter116 != this->success.end(); ++_iter116)
       {
         xfer += (*_iter116).write(oprot);
@@ -25467,7 +25467,7 @@ void AiravataClient::recv_getExperimentStatus( ::apache::airavata::model::worksp
   throw ::apache::thrift::TApplicationException(::apache::thrift::TApplicationException::MISSING_RESULT, "getExperimentStatus failed: unknown result");
 }
 
-void AiravataClient::getExperimentOutputs(std::vector< ::apache::airavata::model::workspace::experiment::DataObjectType> & _return, const std::string& airavataExperimentId)
+void AiravataClient::getExperimentOutputs(std::vector< ::apache::airavata::model::appcatalog::appinterface::OutputDataObjectType> & _return, const std::string& airavataExperimentId)
 {
   send_getExperimentOutputs(airavataExperimentId);
   recv_getExperimentOutputs(_return);
@@ -25487,7 +25487,7 @@ void AiravataClient::send_getExperimentOutputs(const std::string& airavataExperi
   oprot_->getTransport()->flush();
 }
 
-void AiravataClient::recv_getExperimentOutputs(std::vector< ::apache::airavata::model::workspace::experiment::DataObjectType> & _return)
+void AiravataClient::recv_getExperimentOutputs(std::vector< ::apache::airavata::model::appcatalog::appinterface::OutputDataObjectType> & _return)
 {
 
   int32_t rseqid = 0;

http://git-wip-us.apache.org/repos/asf/airavata/blob/198de990/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/Airavata.h
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/Airavata.h b/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/Airavata.h
index b68d927..31e2d25 100644
--- a/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/Airavata.h
+++ b/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/Airavata.h
@@ -54,7 +54,7 @@ class AiravataIf {
   virtual bool validateExperiment(const std::string& airavataExperimentId) = 0;
   virtual void launchExperiment(const std::string& airavataExperimentId, const std::string& airavataCredStoreToken) = 0;
   virtual void getExperimentStatus( ::apache::airavata::model::workspace::experiment::ExperimentStatus& _return, const std::string& airavataExperimentId) = 0;
-  virtual void getExperimentOutputs(std::vector< ::apache::airavata::model::workspace::experiment::DataObjectType> & _return, const std::string& airavataExperimentId) = 0;
+  virtual void getExperimentOutputs(std::vector< ::apache::airavata::model::appcatalog::appinterface::OutputDataObjectType> & _return, const std::string& airavataExperimentId) = 0;
   virtual void getJobStatuses(std::map<std::string,  ::apache::airavata::model::workspace::experiment::JobStatus> & _return, const std::string& airavataExperimentId) = 0;
   virtual void getJobDetails(std::vector< ::apache::airavata::model::workspace::experiment::JobDetails> & _return, const std::string& airavataExperimentId) = 0;
   virtual void getDataTransferDetails(std::vector< ::apache::airavata::model::workspace::experiment::DataTransferDetails> & _return, const std::string& airavataExperimentId) = 0;
@@ -227,7 +227,7 @@ class AiravataNull : virtual public AiravataIf {
   void getExperimentStatus( ::apache::airavata::model::workspace::experiment::ExperimentStatus& /* _return */, const std::string& /* airavataExperimentId */) {
     return;
   }
-  void getExperimentOutputs(std::vector< ::apache::airavata::model::workspace::experiment::DataObjectType> & /* _return */, const std::string& /* airavataExperimentId */) {
+  void getExperimentOutputs(std::vector< ::apache::airavata::model::appcatalog::appinterface::OutputDataObjectType> & /* _return */, const std::string& /* airavataExperimentId */) {
     return;
   }
   void getJobStatuses(std::map<std::string,  ::apache::airavata::model::workspace::experiment::JobStatus> & /* _return */, const std::string& /* airavataExperimentId */) {
@@ -3511,7 +3511,7 @@ class Airavata_getExperimentOutputs_result {
 
   virtual ~Airavata_getExperimentOutputs_result() throw() {}
 
-  std::vector< ::apache::airavata::model::workspace::experiment::DataObjectType>  success;
+  std::vector< ::apache::airavata::model::appcatalog::appinterface::OutputDataObjectType>  success;
    ::apache::airavata::api::error::InvalidRequestException ire;
    ::apache::airavata::api::error::ExperimentNotFoundException enf;
    ::apache::airavata::api::error::AiravataClientException ace;
@@ -3519,7 +3519,7 @@ class Airavata_getExperimentOutputs_result {
 
   _Airavata_getExperimentOutputs_result__isset __isset;
 
-  void __set_success(const std::vector< ::apache::airavata::model::workspace::experiment::DataObjectType> & val) {
+  void __set_success(const std::vector< ::apache::airavata::model::appcatalog::appinterface::OutputDataObjectType> & val) {
     success = val;
   }
 
@@ -3579,7 +3579,7 @@ class Airavata_getExperimentOutputs_presult {
 
   virtual ~Airavata_getExperimentOutputs_presult() throw() {}
 
-  std::vector< ::apache::airavata::model::workspace::experiment::DataObjectType> * success;
+  std::vector< ::apache::airavata::model::appcatalog::appinterface::OutputDataObjectType> * success;
    ::apache::airavata::api::error::InvalidRequestException ire;
    ::apache::airavata::api::error::ExperimentNotFoundException enf;
    ::apache::airavata::api::error::AiravataClientException ace;
@@ -14014,9 +14014,9 @@ class AiravataClient : virtual public AiravataIf {
   void getExperimentStatus( ::apache::airavata::model::workspace::experiment::ExperimentStatus& _return, const std::string& airavataExperimentId);
   void send_getExperimentStatus(const std::string& airavataExperimentId);
   void recv_getExperimentStatus( ::apache::airavata::model::workspace::experiment::ExperimentStatus& _return);
-  void getExperimentOutputs(std::vector< ::apache::airavata::model::workspace::experiment::DataObjectType> & _return, const std::string& airavataExperimentId);
+  void getExperimentOutputs(std::vector< ::apache::airavata::model::appcatalog::appinterface::OutputDataObjectType> & _return, const std::string& airavataExperimentId);
   void send_getExperimentOutputs(const std::string& airavataExperimentId);
-  void recv_getExperimentOutputs(std::vector< ::apache::airavata::model::workspace::experiment::DataObjectType> & _return);
+  void recv_getExperimentOutputs(std::vector< ::apache::airavata::model::appcatalog::appinterface::OutputDataObjectType> & _return);
   void getJobStatuses(std::map<std::string,  ::apache::airavata::model::workspace::experiment::JobStatus> & _return, const std::string& airavataExperimentId);
   void send_getJobStatuses(const std::string& airavataExperimentId);
   void recv_getJobStatuses(std::map<std::string,  ::apache::airavata::model::workspace::experiment::JobStatus> & _return);
@@ -14703,7 +14703,7 @@ class AiravataMultiface : virtual public AiravataIf {
     return;
   }
 
-  void getExperimentOutputs(std::vector< ::apache::airavata::model::workspace::experiment::DataObjectType> & _return, const std::string& airavataExperimentId) {
+  void getExperimentOutputs(std::vector< ::apache::airavata::model::appcatalog::appinterface::OutputDataObjectType> & _return, const std::string& airavataExperimentId) {
     size_t sz = ifaces_.size();
     size_t i = 0;
     for (; i < (sz - 1); ++i) {

http://git-wip-us.apache.org/repos/asf/airavata/blob/198de990/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/Airavata_server.skeleton.cpp
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/Airavata_server.skeleton.cpp b/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/Airavata_server.skeleton.cpp
index ce06c45..1dc8277 100644
--- a/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/Airavata_server.skeleton.cpp
+++ b/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/Airavata_server.skeleton.cpp
@@ -149,7 +149,7 @@ class AiravataHandler : virtual public AiravataIf {
     printf("getExperimentStatus\n");
   }
 
-  void getExperimentOutputs(std::vector< ::apache::airavata::model::workspace::experiment::DataObjectType> & _return, const std::string& airavataExperimentId) {
+  void getExperimentOutputs(std::vector< ::apache::airavata::model::appcatalog::appinterface::OutputDataObjectType> & _return, const std::string& airavataExperimentId) {
     // Your implementation goes here
     printf("getExperimentOutputs\n");
   }

http://git-wip-us.apache.org/repos/asf/airavata/blob/198de990/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/experimentModel_types.cpp
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/experimentModel_types.cpp b/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/experimentModel_types.cpp
index fdf070d..18a2674 100644
--- a/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/experimentModel_types.cpp
+++ b/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/experimentModel_types.cpp
@@ -221,22 +221,6 @@ const char* _kCorrectiveActionNames[] = {
 };
 const std::map<int, const char*> _CorrectiveAction_VALUES_TO_NAMES(::apache::thrift::TEnumIterator(3, _kCorrectiveActionValues, _kCorrectiveActionNames), ::apache::thrift::TEnumIterator(-1, NULL, NULL));
 
-int _kDataTypeValues[] = {
-  DataType::STRING,
-  DataType::INTEGER,
-  DataType::URI,
-  DataType::STDOUT,
-  DataType::STDERR
-};
-const char* _kDataTypeNames[] = {
-  "STRING",
-  "INTEGER",
-  "URI",
-  "STDOUT",
-  "STDERR"
-};
-const std::map<int, const char*> _DataType_VALUES_TO_NAMES(::apache::thrift::TEnumIterator(5, _kDataTypeValues, _kDataTypeNames), ::apache::thrift::TEnumIterator(-1, NULL, NULL));
-
 int _kExecutionUnitValues[] = {
   ExecutionUnit::INPUT,
   ExecutionUnit::APPLICATION,
@@ -735,115 +719,6 @@ void swap(ApplicationStatus &a, ApplicationStatus &b) {
   swap(a.__isset, b.__isset);
 }
 
-const char* DataObjectType::ascii_fingerprint = "544FBB8031AE070AEEB7AC0E4A90E43C";
-const uint8_t DataObjectType::binary_fingerprint[16] = {0x54,0x4F,0xBB,0x80,0x31,0xAE,0x07,0x0A,0xEE,0xB7,0xAC,0x0E,0x4A,0x90,0xE4,0x3C};
-
-uint32_t DataObjectType::read(::apache::thrift::protocol::TProtocol* iprot) {
-
-  uint32_t xfer = 0;
-  std::string fname;
-  ::apache::thrift::protocol::TType ftype;
-  int16_t fid;
-
-  xfer += iprot->readStructBegin(fname);
-
-  using ::apache::thrift::protocol::TProtocolException;
-
-  bool isset_key = false;
-
-  while (true)
-  {
-    xfer += iprot->readFieldBegin(fname, ftype, fid);
-    if (ftype == ::apache::thrift::protocol::T_STOP) {
-      break;
-    }
-    switch (fid)
-    {
-      case 1:
-        if (ftype == ::apache::thrift::protocol::T_STRING) {
-          xfer += iprot->readString(this->key);
-          isset_key = true;
-        } else {
-          xfer += iprot->skip(ftype);
-        }
-        break;
-      case 2:
-        if (ftype == ::apache::thrift::protocol::T_STRING) {
-          xfer += iprot->readString(this->value);
-          this->__isset.value = true;
-        } else {
-          xfer += iprot->skip(ftype);
-        }
-        break;
-      case 3:
-        if (ftype == ::apache::thrift::protocol::T_I32) {
-          int32_t ecast5;
-          xfer += iprot->readI32(ecast5);
-          this->type = (DataType::type)ecast5;
-          this->__isset.type = true;
-        } else {
-          xfer += iprot->skip(ftype);
-        }
-        break;
-      case 4:
-        if (ftype == ::apache::thrift::protocol::T_STRING) {
-          xfer += iprot->readString(this->metaData);
-          this->__isset.metaData = true;
-        } else {
-          xfer += iprot->skip(ftype);
-        }
-        break;
-      default:
-        xfer += iprot->skip(ftype);
-        break;
-    }
-    xfer += iprot->readFieldEnd();
-  }
-
-  xfer += iprot->readStructEnd();
-
-  if (!isset_key)
-    throw TProtocolException(TProtocolException::INVALID_DATA);
-  return xfer;
-}
-
-uint32_t DataObjectType::write(::apache::thrift::protocol::TProtocol* oprot) const {
-  uint32_t xfer = 0;
-  xfer += oprot->writeStructBegin("DataObjectType");
-
-  xfer += oprot->writeFieldBegin("key", ::apache::thrift::protocol::T_STRING, 1);
-  xfer += oprot->writeString(this->key);
-  xfer += oprot->writeFieldEnd();
-
-  if (this->__isset.value) {
-    xfer += oprot->writeFieldBegin("value", ::apache::thrift::protocol::T_STRING, 2);
-    xfer += oprot->writeString(this->value);
-    xfer += oprot->writeFieldEnd();
-  }
-  if (this->__isset.type) {
-    xfer += oprot->writeFieldBegin("type", ::apache::thrift::protocol::T_I32, 3);
-    xfer += oprot->writeI32((int32_t)this->type);
-    xfer += oprot->writeFieldEnd();
-  }
-  if (this->__isset.metaData) {
-    xfer += oprot->writeFieldBegin("metaData", ::apache::thrift::protocol::T_STRING, 4);
-    xfer += oprot->writeString(this->metaData);
-    xfer += oprot->writeFieldEnd();
-  }
-  xfer += oprot->writeFieldStop();
-  xfer += oprot->writeStructEnd();
-  return xfer;
-}
-
-void swap(DataObjectType &a, DataObjectType &b) {
-  using ::std::swap;
-  swap(a.key, b.key);
-  swap(a.value, b.value);
-  swap(a.type, b.type);
-  swap(a.metaData, b.metaData);
-  swap(a.__isset, b.__isset);
-}
-
 const char* ComputationalResourceScheduling::ascii_fingerprint = "32AC7AC41AD3753A7224A32FD6EB4B5D";
 const uint8_t ComputationalResourceScheduling::binary_fingerprint[16] = {0x32,0xAC,0x7A,0xC4,0x1A,0xD3,0x75,0x3A,0x72,0x24,0xA3,0x2F,0xD6,0xEB,0x4B,0x5D};
 
@@ -1529,9 +1404,9 @@ uint32_t ErrorDetails::read(::apache::thrift::protocol::TProtocol* iprot) {
         break;
       case 5:
         if (ftype == ::apache::thrift::protocol::T_I32) {
-          int32_t ecast6;
-          xfer += iprot->readI32(ecast6);
-          this->errorCategory = (ErrorCategory::type)ecast6;
+          int32_t ecast5;
+          xfer += iprot->readI32(ecast5);
+          this->errorCategory = (ErrorCategory::type)ecast5;
           this->__isset.errorCategory = true;
         } else {
           xfer += iprot->skip(ftype);
@@ -1547,9 +1422,9 @@ uint32_t ErrorDetails::read(::apache::thrift::protocol::TProtocol* iprot) {
         break;
       case 7:
         if (ftype == ::apache::thrift::protocol::T_I32) {
-          int32_t ecast7;
-          xfer += iprot->readI32(ecast7);
-          this->correctiveAction = (CorrectiveAction::type)ecast7;
+          int32_t ecast6;
+          xfer += iprot->readI32(ecast6);
+          this->correctiveAction = (CorrectiveAction::type)ecast6;
           this->__isset.correctiveAction = true;
         } else {
           xfer += iprot->skip(ftype);
@@ -1557,9 +1432,9 @@ uint32_t ErrorDetails::read(::apache::thrift::protocol::TProtocol* iprot) {
         break;
       case 8:
         if (ftype == ::apache::thrift::protocol::T_I32) {
-          int32_t ecast8;
-          xfer += iprot->readI32(ecast8);
-          this->actionableGroup = (ActionableGroup::type)ecast8;
+          int32_t ecast7;
+          xfer += iprot->readI32(ecast7);
+          this->actionableGroup = (ActionableGroup::type)ecast7;
           this->__isset.actionableGroup = true;
         } else {
           xfer += iprot->skip(ftype);
@@ -1569,14 +1444,14 @@ uint32_t ErrorDetails::read(::apache::thrift::protocol::TProtocol* iprot) {
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->rootCauseErrorIdList.clear();
-            uint32_t _size9;
-            ::apache::thrift::protocol::TType _etype12;
-            xfer += iprot->readListBegin(_etype12, _size9);
-            this->rootCauseErrorIdList.resize(_size9);
-            uint32_t _i13;
-            for (_i13 = 0; _i13 < _size9; ++_i13)
+            uint32_t _size8;
+            ::apache::thrift::protocol::TType _etype11;
+            xfer += iprot->readListBegin(_etype11, _size8);
+            this->rootCauseErrorIdList.resize(_size8);
+            uint32_t _i12;
+            for (_i12 = 0; _i12 < _size8; ++_i12)
             {
-              xfer += iprot->readString(this->rootCauseErrorIdList[_i13]);
+              xfer += iprot->readString(this->rootCauseErrorIdList[_i12]);
             }
             xfer += iprot->readListEnd();
           }
@@ -1646,10 +1521,10 @@ uint32_t ErrorDetails::write(::apache::thrift::protocol::TProtocol* oprot) const
     xfer += oprot->writeFieldBegin("rootCauseErrorIdList", ::apache::thrift::protocol::T_LIST, 9);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->rootCauseErrorIdList.size()));
-      std::vector<std::string> ::const_iterator _iter14;
-      for (_iter14 = this->rootCauseErrorIdList.begin(); _iter14 != this->rootCauseErrorIdList.end(); ++_iter14)
+      std::vector<std::string> ::const_iterator _iter13;
+      for (_iter13 = this->rootCauseErrorIdList.begin(); _iter13 != this->rootCauseErrorIdList.end(); ++_iter13)
       {
-        xfer += oprot->writeString((*_iter14));
+        xfer += oprot->writeString((*_iter13));
       }
       xfer += oprot->writeListEnd();
     }
@@ -1743,14 +1618,14 @@ uint32_t JobDetails::read(::apache::thrift::protocol::TProtocol* iprot) {
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->errors.clear();
-            uint32_t _size15;
-            ::apache::thrift::protocol::TType _etype18;
-            xfer += iprot->readListBegin(_etype18, _size15);
-            this->errors.resize(_size15);
-            uint32_t _i19;
-            for (_i19 = 0; _i19 < _size15; ++_i19)
+            uint32_t _size14;
+            ::apache::thrift::protocol::TType _etype17;
+            xfer += iprot->readListBegin(_etype17, _size14);
+            this->errors.resize(_size14);
+            uint32_t _i18;
+            for (_i18 = 0; _i18 < _size14; ++_i18)
             {
-              xfer += this->errors[_i19].read(iprot);
+              xfer += this->errors[_i18].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -1822,10 +1697,10 @@ uint32_t JobDetails::write(::apache::thrift::protocol::TProtocol* oprot) const {
     xfer += oprot->writeFieldBegin("errors", ::apache::thrift::protocol::T_LIST, 6);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->errors.size()));
-      std::vector<ErrorDetails> ::const_iterator _iter20;
-      for (_iter20 = this->errors.begin(); _iter20 != this->errors.end(); ++_iter20)
+      std::vector<ErrorDetails> ::const_iterator _iter19;
+      for (_iter19 = this->errors.begin(); _iter19 != this->errors.end(); ++_iter19)
       {
-        xfer += (*_iter20).write(oprot);
+        xfer += (*_iter19).write(oprot);
       }
       xfer += oprot->writeListEnd();
     }
@@ -1968,8 +1843,8 @@ void swap(DataTransferDetails &a, DataTransferDetails &b) {
   swap(a.__isset, b.__isset);
 }
 
-const char* TaskDetails::ascii_fingerprint = "5329C387E7633AF234038F8461F51097";
-const uint8_t TaskDetails::binary_fingerprint[16] = {0x53,0x29,0xC3,0x87,0xE7,0x63,0x3A,0xF2,0x34,0x03,0x8F,0x84,0x61,0xF5,0x10,0x97};
+const char* TaskDetails::ascii_fingerprint = "C0E50EB91BEBDC23A45D03BFD2BD630A";
+const uint8_t TaskDetails::binary_fingerprint[16] = {0xC0,0xE5,0x0E,0xB9,0x1B,0xEB,0xDC,0x23,0xA4,0x5D,0x03,0xBF,0xD2,0xBD,0x63,0x0A};
 
 uint32_t TaskDetails::read(::apache::thrift::protocol::TProtocol* iprot) {
 
@@ -2036,14 +1911,14 @@ uint32_t TaskDetails::read(::apache::thrift::protocol::TProtocol* iprot) {
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->applicationInputs.clear();
-            uint32_t _size21;
-            ::apache::thrift::protocol::TType _etype24;
-            xfer += iprot->readListBegin(_etype24, _size21);
-            this->applicationInputs.resize(_size21);
-            uint32_t _i25;
-            for (_i25 = 0; _i25 < _size21; ++_i25)
+            uint32_t _size20;
+            ::apache::thrift::protocol::TType _etype23;
+            xfer += iprot->readListBegin(_etype23, _size20);
+            this->applicationInputs.resize(_size20);
+            uint32_t _i24;
+            for (_i24 = 0; _i24 < _size20; ++_i24)
             {
-              xfer += this->applicationInputs[_i25].read(iprot);
+              xfer += this->applicationInputs[_i24].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -2056,14 +1931,14 @@ uint32_t TaskDetails::read(::apache::thrift::protocol::TProtocol* iprot) {
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->applicationOutputs.clear();
-            uint32_t _size26;
-            ::apache::thrift::protocol::TType _etype29;
-            xfer += iprot->readListBegin(_etype29, _size26);
-            this->applicationOutputs.resize(_size26);
-            uint32_t _i30;
-            for (_i30 = 0; _i30 < _size26; ++_i30)
+            uint32_t _size25;
+            ::apache::thrift::protocol::TType _etype28;
+            xfer += iprot->readListBegin(_etype28, _size25);
+            this->applicationOutputs.resize(_size25);
+            uint32_t _i29;
+            for (_i29 = 0; _i29 < _size25; ++_i29)
             {
-              xfer += this->applicationOutputs[_i30].read(iprot);
+              xfer += this->applicationOutputs[_i29].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -2108,14 +1983,14 @@ uint32_t TaskDetails::read(::apache::thrift::protocol::TProtocol* iprot) {
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->jobDetailsList.clear();
-            uint32_t _size31;
-            ::apache::thrift::protocol::TType _etype34;
-            xfer += iprot->readListBegin(_etype34, _size31);
-            this->jobDetailsList.resize(_size31);
-            uint32_t _i35;
-            for (_i35 = 0; _i35 < _size31; ++_i35)
+            uint32_t _size30;
+            ::apache::thrift::protocol::TType _etype33;
+            xfer += iprot->readListBegin(_etype33, _size30);
+            this->jobDetailsList.resize(_size30);
+            uint32_t _i34;
+            for (_i34 = 0; _i34 < _size30; ++_i34)
             {
-              xfer += this->jobDetailsList[_i35].read(iprot);
+              xfer += this->jobDetailsList[_i34].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -2128,14 +2003,14 @@ uint32_t TaskDetails::read(::apache::thrift::protocol::TProtocol* iprot) {
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->dataTransferDetailsList.clear();
-            uint32_t _size36;
-            ::apache::thrift::protocol::TType _etype39;
-            xfer += iprot->readListBegin(_etype39, _size36);
-            this->dataTransferDetailsList.resize(_size36);
-            uint32_t _i40;
-            for (_i40 = 0; _i40 < _size36; ++_i40)
+            uint32_t _size35;
+            ::apache::thrift::protocol::TType _etype38;
+            xfer += iprot->readListBegin(_etype38, _size35);
+            this->dataTransferDetailsList.resize(_size35);
+            uint32_t _i39;
+            for (_i39 = 0; _i39 < _size35; ++_i39)
             {
-              xfer += this->dataTransferDetailsList[_i40].read(iprot);
+              xfer += this->dataTransferDetailsList[_i39].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -2148,14 +2023,14 @@ uint32_t TaskDetails::read(::apache::thrift::protocol::TProtocol* iprot) {
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->errors.clear();
-            uint32_t _size41;
-            ::apache::thrift::protocol::TType _etype44;
-            xfer += iprot->readListBegin(_etype44, _size41);
-            this->errors.resize(_size41);
-            uint32_t _i45;
-            for (_i45 = 0; _i45 < _size41; ++_i45)
+            uint32_t _size40;
+            ::apache::thrift::protocol::TType _etype43;
+            xfer += iprot->readListBegin(_etype43, _size40);
+            this->errors.resize(_size40);
+            uint32_t _i44;
+            for (_i44 = 0; _i44 < _size40; ++_i44)
             {
-              xfer += this->errors[_i45].read(iprot);
+              xfer += this->errors[_i44].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -2210,10 +2085,10 @@ uint32_t TaskDetails::write(::apache::thrift::protocol::TProtocol* oprot) const
     xfer += oprot->writeFieldBegin("applicationInputs", ::apache::thrift::protocol::T_LIST, 6);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->applicationInputs.size()));
-      std::vector<DataObjectType> ::const_iterator _iter46;
-      for (_iter46 = this->applicationInputs.begin(); _iter46 != this->applicationInputs.end(); ++_iter46)
+      std::vector< ::apache::airavata::model::appcatalog::appinterface::InputDataObjectType> ::const_iterator _iter45;
+      for (_iter45 = this->applicationInputs.begin(); _iter45 != this->applicationInputs.end(); ++_iter45)
       {
-        xfer += (*_iter46).write(oprot);
+        xfer += (*_iter45).write(oprot);
       }
       xfer += oprot->writeListEnd();
     }
@@ -2223,10 +2098,10 @@ uint32_t TaskDetails::write(::apache::thrift::protocol::TProtocol* oprot) const
     xfer += oprot->writeFieldBegin("applicationOutputs", ::apache::thrift::protocol::T_LIST, 7);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->applicationOutputs.size()));
-      std::vector<DataObjectType> ::const_iterator _iter47;
-      for (_iter47 = this->applicationOutputs.begin(); _iter47 != this->applicationOutputs.end(); ++_iter47)
+      std::vector< ::apache::airavata::model::appcatalog::appinterface::OutputDataObjectType> ::const_iterator _iter46;
+      for (_iter46 = this->applicationOutputs.begin(); _iter46 != this->applicationOutputs.end(); ++_iter46)
       {
-        xfer += (*_iter47).write(oprot);
+        xfer += (*_iter46).write(oprot);
       }
       xfer += oprot->writeListEnd();
     }
@@ -2256,10 +2131,10 @@ uint32_t TaskDetails::write(::apache::thrift::protocol::TProtocol* oprot) const
     xfer += oprot->writeFieldBegin("jobDetailsList", ::apache::thrift::protocol::T_LIST, 12);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->jobDetailsList.size()));
-      std::vector<JobDetails> ::const_iterator _iter48;
-      for (_iter48 = this->jobDetailsList.begin(); _iter48 != this->jobDetailsList.end(); ++_iter48)
+      std::vector<JobDetails> ::const_iterator _iter47;
+      for (_iter47 = this->jobDetailsList.begin(); _iter47 != this->jobDetailsList.end(); ++_iter47)
       {
-        xfer += (*_iter48).write(oprot);
+        xfer += (*_iter47).write(oprot);
       }
       xfer += oprot->writeListEnd();
     }
@@ -2269,10 +2144,10 @@ uint32_t TaskDetails::write(::apache::thrift::protocol::TProtocol* oprot) const
     xfer += oprot->writeFieldBegin("dataTransferDetailsList", ::apache::thrift::protocol::T_LIST, 13);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->dataTransferDetailsList.size()));
-      std::vector<DataTransferDetails> ::const_iterator _iter49;
-      for (_iter49 = this->dataTransferDetailsList.begin(); _iter49 != this->dataTransferDetailsList.end(); ++_iter49)
+      std::vector<DataTransferDetails> ::const_iterator _iter48;
+      for (_iter48 = this->dataTransferDetailsList.begin(); _iter48 != this->dataTransferDetailsList.end(); ++_iter48)
       {
-        xfer += (*_iter49).write(oprot);
+        xfer += (*_iter48).write(oprot);
       }
       xfer += oprot->writeListEnd();
     }
@@ -2282,10 +2157,10 @@ uint32_t TaskDetails::write(::apache::thrift::protocol::TProtocol* oprot) const
     xfer += oprot->writeFieldBegin("errors", ::apache::thrift::protocol::T_LIST, 14);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->errors.size()));
-      std::vector<ErrorDetails> ::const_iterator _iter50;
-      for (_iter50 = this->errors.begin(); _iter50 != this->errors.end(); ++_iter50)
+      std::vector<ErrorDetails> ::const_iterator _iter49;
+      for (_iter49 = this->errors.begin(); _iter49 != this->errors.end(); ++_iter49)
       {
-        xfer += (*_iter50).write(oprot);
+        xfer += (*_iter49).write(oprot);
       }
       xfer += oprot->writeListEnd();
     }
@@ -2315,8 +2190,8 @@ void swap(TaskDetails &a, TaskDetails &b) {
   swap(a.__isset, b.__isset);
 }
 
-const char* WorkflowNodeDetails::ascii_fingerprint = "6ABC52FB94DCEC8D6AA3F1F3188E2691";
-const uint8_t WorkflowNodeDetails::binary_fingerprint[16] = {0x6A,0xBC,0x52,0xFB,0x94,0xDC,0xEC,0x8D,0x6A,0xA3,0xF1,0xF3,0x18,0x8E,0x26,0x91};
+const char* WorkflowNodeDetails::ascii_fingerprint = "F9600D5A8E84EAF65A64E38C18DFACAE";
+const uint8_t WorkflowNodeDetails::binary_fingerprint[16] = {0xF9,0x60,0x0D,0x5A,0x8E,0x84,0xEA,0xF6,0x5A,0x64,0xE3,0x8C,0x18,0xDF,0xAC,0xAE};
 
 uint32_t WorkflowNodeDetails::read(::apache::thrift::protocol::TProtocol* iprot) {
 
@@ -2367,9 +2242,9 @@ uint32_t WorkflowNodeDetails::read(::apache::thrift::protocol::TProtocol* iprot)
         break;
       case 4:
         if (ftype == ::apache::thrift::protocol::T_I32) {
-          int32_t ecast51;
-          xfer += iprot->readI32(ecast51);
-          this->executionUnit = (ExecutionUnit::type)ecast51;
+          int32_t ecast50;
+          xfer += iprot->readI32(ecast50);
+          this->executionUnit = (ExecutionUnit::type)ecast50;
           isset_executionUnit = true;
         } else {
           xfer += iprot->skip(ftype);
@@ -2387,14 +2262,14 @@ uint32_t WorkflowNodeDetails::read(::apache::thrift::protocol::TProtocol* iprot)
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->nodeInputs.clear();
-            uint32_t _size52;
-            ::apache::thrift::protocol::TType _etype55;
-            xfer += iprot->readListBegin(_etype55, _size52);
-            this->nodeInputs.resize(_size52);
-            uint32_t _i56;
-            for (_i56 = 0; _i56 < _size52; ++_i56)
+            uint32_t _size51;
+            ::apache::thrift::protocol::TType _etype54;
+            xfer += iprot->readListBegin(_etype54, _size51);
+            this->nodeInputs.resize(_size51);
+            uint32_t _i55;
+            for (_i55 = 0; _i55 < _size51; ++_i55)
             {
-              xfer += this->nodeInputs[_i56].read(iprot);
+              xfer += this->nodeInputs[_i55].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -2407,14 +2282,14 @@ uint32_t WorkflowNodeDetails::read(::apache::thrift::protocol::TProtocol* iprot)
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->nodeOutputs.clear();
-            uint32_t _size57;
-            ::apache::thrift::protocol::TType _etype60;
-            xfer += iprot->readListBegin(_etype60, _size57);
-            this->nodeOutputs.resize(_size57);
-            uint32_t _i61;
-            for (_i61 = 0; _i61 < _size57; ++_i61)
+            uint32_t _size56;
+            ::apache::thrift::protocol::TType _etype59;
+            xfer += iprot->readListBegin(_etype59, _size56);
+            this->nodeOutputs.resize(_size56);
+            uint32_t _i60;
+            for (_i60 = 0; _i60 < _size56; ++_i60)
             {
-              xfer += this->nodeOutputs[_i61].read(iprot);
+              xfer += this->nodeOutputs[_i60].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -2435,14 +2310,14 @@ uint32_t WorkflowNodeDetails::read(::apache::thrift::protocol::TProtocol* iprot)
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->taskDetailsList.clear();
-            uint32_t _size62;
-            ::apache::thrift::protocol::TType _etype65;
-            xfer += iprot->readListBegin(_etype65, _size62);
-            this->taskDetailsList.resize(_size62);
-            uint32_t _i66;
-            for (_i66 = 0; _i66 < _size62; ++_i66)
+            uint32_t _size61;
+            ::apache::thrift::protocol::TType _etype64;
+            xfer += iprot->readListBegin(_etype64, _size61);
+            this->taskDetailsList.resize(_size61);
+            uint32_t _i65;
+            for (_i65 = 0; _i65 < _size61; ++_i65)
             {
-              xfer += this->taskDetailsList[_i66].read(iprot);
+              xfer += this->taskDetailsList[_i65].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -2455,14 +2330,14 @@ uint32_t WorkflowNodeDetails::read(::apache::thrift::protocol::TProtocol* iprot)
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->errors.clear();
-            uint32_t _size67;
-            ::apache::thrift::protocol::TType _etype70;
-            xfer += iprot->readListBegin(_etype70, _size67);
-            this->errors.resize(_size67);
-            uint32_t _i71;
-            for (_i71 = 0; _i71 < _size67; ++_i71)
+            uint32_t _size66;
+            ::apache::thrift::protocol::TType _etype69;
+            xfer += iprot->readListBegin(_etype69, _size66);
+            this->errors.resize(_size66);
+            uint32_t _i70;
+            for (_i70 = 0; _i70 < _size66; ++_i70)
             {
-              xfer += this->errors[_i71].read(iprot);
+              xfer += this->errors[_i70].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -2519,10 +2394,10 @@ uint32_t WorkflowNodeDetails::write(::apache::thrift::protocol::TProtocol* oprot
     xfer += oprot->writeFieldBegin("nodeInputs", ::apache::thrift::protocol::T_LIST, 6);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->nodeInputs.size()));
-      std::vector<DataObjectType> ::const_iterator _iter72;
-      for (_iter72 = this->nodeInputs.begin(); _iter72 != this->nodeInputs.end(); ++_iter72)
+      std::vector< ::apache::airavata::model::appcatalog::appinterface::InputDataObjectType> ::const_iterator _iter71;
+      for (_iter71 = this->nodeInputs.begin(); _iter71 != this->nodeInputs.end(); ++_iter71)
       {
-        xfer += (*_iter72).write(oprot);
+        xfer += (*_iter71).write(oprot);
       }
       xfer += oprot->writeListEnd();
     }
@@ -2532,10 +2407,10 @@ uint32_t WorkflowNodeDetails::write(::apache::thrift::protocol::TProtocol* oprot
     xfer += oprot->writeFieldBegin("nodeOutputs", ::apache::thrift::protocol::T_LIST, 7);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->nodeOutputs.size()));
-      std::vector<DataObjectType> ::const_iterator _iter73;
-      for (_iter73 = this->nodeOutputs.begin(); _iter73 != this->nodeOutputs.end(); ++_iter73)
+      std::vector< ::apache::airavata::model::appcatalog::appinterface::OutputDataObjectType> ::const_iterator _iter72;
+      for (_iter72 = this->nodeOutputs.begin(); _iter72 != this->nodeOutputs.end(); ++_iter72)
       {
-        xfer += (*_iter73).write(oprot);
+        xfer += (*_iter72).write(oprot);
       }
       xfer += oprot->writeListEnd();
     }
@@ -2550,10 +2425,10 @@ uint32_t WorkflowNodeDetails::write(::apache::thrift::protocol::TProtocol* oprot
     xfer += oprot->writeFieldBegin("taskDetailsList", ::apache::thrift::protocol::T_LIST, 9);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->taskDetailsList.size()));
-      std::vector<TaskDetails> ::const_iterator _iter74;
-      for (_iter74 = this->taskDetailsList.begin(); _iter74 != this->taskDetailsList.end(); ++_iter74)
+      std::vector<TaskDetails> ::const_iterator _iter73;
+      for (_iter73 = this->taskDetailsList.begin(); _iter73 != this->taskDetailsList.end(); ++_iter73)
       {
-        xfer += (*_iter74).write(oprot);
+        xfer += (*_iter73).write(oprot);
       }
       xfer += oprot->writeListEnd();
     }
@@ -2563,10 +2438,10 @@ uint32_t WorkflowNodeDetails::write(::apache::thrift::protocol::TProtocol* oprot
     xfer += oprot->writeFieldBegin("errors", ::apache::thrift::protocol::T_LIST, 10);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->errors.size()));
-      std::vector<ErrorDetails> ::const_iterator _iter75;
-      for (_iter75 = this->errors.begin(); _iter75 != this->errors.end(); ++_iter75)
+      std::vector<ErrorDetails> ::const_iterator _iter74;
+      for (_iter74 = this->errors.begin(); _iter74 != this->errors.end(); ++_iter74)
       {
-        xfer += (*_iter75).write(oprot);
+        xfer += (*_iter74).write(oprot);
       }
       xfer += oprot->writeListEnd();
     }
@@ -2708,14 +2583,14 @@ uint32_t ValidationResults::read(::apache::thrift::protocol::TProtocol* iprot) {
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->validationResultList.clear();
-            uint32_t _size76;
-            ::apache::thrift::protocol::TType _etype79;
-            xfer += iprot->readListBegin(_etype79, _size76);
-            this->validationResultList.resize(_size76);
-            uint32_t _i80;
-            for (_i80 = 0; _i80 < _size76; ++_i80)
+            uint32_t _size75;
+            ::apache::thrift::protocol::TType _etype78;
+            xfer += iprot->readListBegin(_etype78, _size75);
+            this->validationResultList.resize(_size75);
+            uint32_t _i79;
+            for (_i79 = 0; _i79 < _size75; ++_i79)
             {
-              xfer += this->validationResultList[_i80].read(iprot);
+              xfer += this->validationResultList[_i79].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -2751,10 +2626,10 @@ uint32_t ValidationResults::write(::apache::thrift::protocol::TProtocol* oprot)
   xfer += oprot->writeFieldBegin("validationResultList", ::apache::thrift::protocol::T_LIST, 2);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->validationResultList.size()));
-    std::vector<ValidatorResult> ::const_iterator _iter81;
-    for (_iter81 = this->validationResultList.begin(); _iter81 != this->validationResultList.end(); ++_iter81)
+    std::vector<ValidatorResult> ::const_iterator _iter80;
+    for (_iter80 = this->validationResultList.begin(); _iter80 != this->validationResultList.end(); ++_iter80)
     {
-      xfer += (*_iter81).write(oprot);
+      xfer += (*_iter80).write(oprot);
     }
     xfer += oprot->writeListEnd();
   }
@@ -2771,8 +2646,8 @@ void swap(ValidationResults &a, ValidationResults &b) {
   swap(a.validationResultList, b.validationResultList);
 }
 
-const char* Experiment::ascii_fingerprint = "EAE6C4E7D5F1EDAC82E4630FDDD892A9";
-const uint8_t Experiment::binary_fingerprint[16] = {0xEA,0xE6,0xC4,0xE7,0xD5,0xF1,0xED,0xAC,0x82,0xE4,0x63,0x0F,0xDD,0xD8,0x92,0xA9};
+const char* Experiment::ascii_fingerprint = "CDFB79AEABF988D5D38D8EEAEEBECC6F";
+const uint8_t Experiment::binary_fingerprint[16] = {0xCD,0xFB,0x79,0xAE,0xAB,0xF9,0x88,0xD5,0xD3,0x8D,0x8E,0xEA,0xEE,0xBE,0xCC,0x6F};
 
 uint32_t Experiment::read(::apache::thrift::protocol::TProtocol* iprot) {
 
@@ -2898,14 +2773,14 @@ uint32_t Experiment::read(::apache::thrift::protocol::TProtocol* iprot) {
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->experimentInputs.clear();
-            uint32_t _size82;
-            ::apache::thrift::protocol::TType _etype85;
-            xfer += iprot->readListBegin(_etype85, _size82);
-            this->experimentInputs.resize(_size82);
-            uint32_t _i86;
-            for (_i86 = 0; _i86 < _size82; ++_i86)
+            uint32_t _size81;
+            ::apache::thrift::protocol::TType _etype84;
+            xfer += iprot->readListBegin(_etype84, _size81);
+            this->experimentInputs.resize(_size81);
+            uint32_t _i85;
+            for (_i85 = 0; _i85 < _size81; ++_i85)
             {
-              xfer += this->experimentInputs[_i86].read(iprot);
+              xfer += this->experimentInputs[_i85].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -2918,14 +2793,14 @@ uint32_t Experiment::read(::apache::thrift::protocol::TProtocol* iprot) {
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->experimentOutputs.clear();
-            uint32_t _size87;
-            ::apache::thrift::protocol::TType _etype90;
-            xfer += iprot->readListBegin(_etype90, _size87);
-            this->experimentOutputs.resize(_size87);
-            uint32_t _i91;
-            for (_i91 = 0; _i91 < _size87; ++_i91)
+            uint32_t _size86;
+            ::apache::thrift::protocol::TType _etype89;
+            xfer += iprot->readListBegin(_etype89, _size86);
+            this->experimentOutputs.resize(_size86);
+            uint32_t _i90;
+            for (_i90 = 0; _i90 < _size86; ++_i90)
             {
-              xfer += this->experimentOutputs[_i91].read(iprot);
+              xfer += this->experimentOutputs[_i90].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -2946,14 +2821,14 @@ uint32_t Experiment::read(::apache::thrift::protocol::TProtocol* iprot) {
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->stateChangeList.clear();
-            uint32_t _size92;
-            ::apache::thrift::protocol::TType _etype95;
-            xfer += iprot->readListBegin(_etype95, _size92);
-            this->stateChangeList.resize(_size92);
-            uint32_t _i96;
-            for (_i96 = 0; _i96 < _size92; ++_i96)
+            uint32_t _size91;
+            ::apache::thrift::protocol::TType _etype94;
+            xfer += iprot->readListBegin(_etype94, _size91);
+            this->stateChangeList.resize(_size91);
+            uint32_t _i95;
+            for (_i95 = 0; _i95 < _size91; ++_i95)
             {
-              xfer += this->stateChangeList[_i96].read(iprot);
+              xfer += this->stateChangeList[_i95].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -2966,14 +2841,14 @@ uint32_t Experiment::read(::apache::thrift::protocol::TProtocol* iprot) {
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->workflowNodeDetailsList.clear();
-            uint32_t _size97;
-            ::apache::thrift::protocol::TType _etype100;
-            xfer += iprot->readListBegin(_etype100, _size97);
-            this->workflowNodeDetailsList.resize(_size97);
-            uint32_t _i101;
-            for (_i101 = 0; _i101 < _size97; ++_i101)
+            uint32_t _size96;
+            ::apache::thrift::protocol::TType _etype99;
+            xfer += iprot->readListBegin(_etype99, _size96);
+            this->workflowNodeDetailsList.resize(_size96);
+            uint32_t _i100;
+            for (_i100 = 0; _i100 < _size96; ++_i100)
             {
-              xfer += this->workflowNodeDetailsList[_i101].read(iprot);
+              xfer += this->workflowNodeDetailsList[_i100].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -2986,14 +2861,14 @@ uint32_t Experiment::read(::apache::thrift::protocol::TProtocol* iprot) {
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->errors.clear();
-            uint32_t _size102;
-            ::apache::thrift::protocol::TType _etype105;
-            xfer += iprot->readListBegin(_etype105, _size102);
-            this->errors.resize(_size102);
-            uint32_t _i106;
-            for (_i106 = 0; _i106 < _size102; ++_i106)
+            uint32_t _size101;
+            ::apache::thrift::protocol::TType _etype104;
+            xfer += iprot->readListBegin(_etype104, _size101);
+            this->errors.resize(_size101);
+            uint32_t _i105;
+            for (_i105 = 0; _i105 < _size101; ++_i105)
             {
-              xfer += this->errors[_i106].read(iprot);
+              xfer += this->errors[_i105].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -3086,10 +2961,10 @@ uint32_t Experiment::write(::apache::thrift::protocol::TProtocol* oprot) const {
     xfer += oprot->writeFieldBegin("experimentInputs", ::apache::thrift::protocol::T_LIST, 13);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->experimentInputs.size()));
-      std::vector<DataObjectType> ::const_iterator _iter107;
-      for (_iter107 = this->experimentInputs.begin(); _iter107 != this->experimentInputs.end(); ++_iter107)
+      std::vector< ::apache::airavata::model::appcatalog::appinterface::InputDataObjectType> ::const_iterator _iter106;
+      for (_iter106 = this->experimentInputs.begin(); _iter106 != this->experimentInputs.end(); ++_iter106)
       {
-        xfer += (*_iter107).write(oprot);
+        xfer += (*_iter106).write(oprot);
       }
       xfer += oprot->writeListEnd();
     }
@@ -3099,10 +2974,10 @@ uint32_t Experiment::write(::apache::thrift::protocol::TProtocol* oprot) const {
     xfer += oprot->writeFieldBegin("experimentOutputs", ::apache::thrift::protocol::T_LIST, 14);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->experimentOutputs.size()));
-      std::vector<DataObjectType> ::const_iterator _iter108;
-      for (_iter108 = this->experimentOutputs.begin(); _iter108 != this->experimentOutputs.end(); ++_iter108)
+      std::vector< ::apache::airavata::model::appcatalog::appinterface::OutputDataObjectType> ::const_iterator _iter107;
+      for (_iter107 = this->experimentOutputs.begin(); _iter107 != this->experimentOutputs.end(); ++_iter107)
       {
-        xfer += (*_iter108).write(oprot);
+        xfer += (*_iter107).write(oprot);
       }
       xfer += oprot->writeListEnd();
     }
@@ -3117,10 +2992,10 @@ uint32_t Experiment::write(::apache::thrift::protocol::TProtocol* oprot) const {
     xfer += oprot->writeFieldBegin("stateChangeList", ::apache::thrift::protocol::T_LIST, 16);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->stateChangeList.size()));
-      std::vector<WorkflowNodeStatus> ::const_iterator _iter109;
-      for (_iter109 = this->stateChangeList.begin(); _iter109 != this->stateChangeList.end(); ++_iter109)
+      std::vector<WorkflowNodeStatus> ::const_iterator _iter108;
+      for (_iter108 = this->stateChangeList.begin(); _iter108 != this->stateChangeList.end(); ++_iter108)
       {
-        xfer += (*_iter109).write(oprot);
+        xfer += (*_iter108).write(oprot);
       }
       xfer += oprot->writeListEnd();
     }
@@ -3130,10 +3005,10 @@ uint32_t Experiment::write(::apache::thrift::protocol::TProtocol* oprot) const {
     xfer += oprot->writeFieldBegin("workflowNodeDetailsList", ::apache::thrift::protocol::T_LIST, 17);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->workflowNodeDetailsList.size()));
-      std::vector<WorkflowNodeDetails> ::const_iterator _iter110;
-      for (_iter110 = this->workflowNodeDetailsList.begin(); _iter110 != this->workflowNodeDetailsList.end(); ++_iter110)
+      std::vector<WorkflowNodeDetails> ::const_iterator _iter109;
+      for (_iter109 = this->workflowNodeDetailsList.begin(); _iter109 != this->workflowNodeDetailsList.end(); ++_iter109)
       {
-        xfer += (*_iter110).write(oprot);
+        xfer += (*_iter109).write(oprot);
       }
       xfer += oprot->writeListEnd();
     }
@@ -3143,10 +3018,10 @@ uint32_t Experiment::write(::apache::thrift::protocol::TProtocol* oprot) const {
     xfer += oprot->writeFieldBegin("errors", ::apache::thrift::protocol::T_LIST, 18);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->errors.size()));
-      std::vector<ErrorDetails> ::const_iterator _iter111;
-      for (_iter111 = this->errors.begin(); _iter111 != this->errors.end(); ++_iter111)
+      std::vector<ErrorDetails> ::const_iterator _iter110;
+      for (_iter110 = this->errors.begin(); _iter110 != this->errors.end(); ++_iter110)
       {
-        xfer += (*_iter111).write(oprot);
+        xfer += (*_iter110).write(oprot);
       }
       xfer += oprot->writeListEnd();
     }
@@ -3275,14 +3150,14 @@ uint32_t ExperimentSummary::read(::apache::thrift::protocol::TProtocol* iprot) {
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->errors.clear();
-            uint32_t _size112;
-            ::apache::thrift::protocol::TType _etype115;
-            xfer += iprot->readListBegin(_etype115, _size112);
-            this->errors.resize(_size112);
-            uint32_t _i116;
-            for (_i116 = 0; _i116 < _size112; ++_i116)
+            uint32_t _size111;
+            ::apache::thrift::protocol::TType _etype114;
+            xfer += iprot->readListBegin(_etype114, _size111);
+            this->errors.resize(_size111);
+            uint32_t _i115;
+            for (_i115 = 0; _i115 < _size111; ++_i115)
             {
-              xfer += this->errors[_i116].read(iprot);
+              xfer += this->errors[_i115].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -3355,10 +3230,10 @@ uint32_t ExperimentSummary::write(::apache::thrift::protocol::TProtocol* oprot)
     xfer += oprot->writeFieldBegin("errors", ::apache::thrift::protocol::T_LIST, 9);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->errors.size()));
-      std::vector<ErrorDetails> ::const_iterator _iter117;
-      for (_iter117 = this->errors.begin(); _iter117 != this->errors.end(); ++_iter117)
+      std::vector<ErrorDetails> ::const_iterator _iter116;
+      for (_iter116 = this->errors.begin(); _iter116 != this->errors.end(); ++_iter116)
       {
-        xfer += (*_iter117).write(oprot);
+        xfer += (*_iter116).write(oprot);
       }
       xfer += oprot->writeListEnd();
     }

http://git-wip-us.apache.org/repos/asf/airavata/blob/198de990/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/experimentModel_types.h
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/experimentModel_types.h b/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/experimentModel_types.h
index fa1803e..c4edbb7 100644
--- a/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/experimentModel_types.h
+++ b/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/experimentModel_types.h
@@ -31,6 +31,7 @@
 
 #include <thrift/cxxfunctional.h>
 #include "computeResourceModel_types.h"
+#include "applicationInterfaceModel_types.h"
 
 
 namespace apache { namespace airavata { namespace model { namespace workspace { namespace experiment {
@@ -164,18 +165,6 @@ struct CorrectiveAction {
 
 extern const std::map<int, const char*> _CorrectiveAction_VALUES_TO_NAMES;
 
-struct DataType {
-  enum type {
-    STRING = 0,
-    INTEGER = 1,
-    URI = 2,
-    STDOUT = 3,
-    STDERR = 4
-  };
-};
-
-extern const std::map<int, const char*> _DataType_VALUES_TO_NAMES;
-
 struct ExecutionUnit {
   enum type {
     INPUT = 0,
@@ -505,81 +494,6 @@ class ApplicationStatus {
 
 void swap(ApplicationStatus &a, ApplicationStatus &b);
 
-typedef struct _DataObjectType__isset {
-  _DataObjectType__isset() : value(false), type(false), metaData(false) {}
-  bool value;
-  bool type;
-  bool metaData;
-} _DataObjectType__isset;
-
-class DataObjectType {
- public:
-
-  static const char* ascii_fingerprint; // = "544FBB8031AE070AEEB7AC0E4A90E43C";
-  static const uint8_t binary_fingerprint[16]; // = {0x54,0x4F,0xBB,0x80,0x31,0xAE,0x07,0x0A,0xEE,0xB7,0xAC,0x0E,0x4A,0x90,0xE4,0x3C};
-
-  DataObjectType() : key(), value(), type((DataType::type)0), metaData() {
-  }
-
-  virtual ~DataObjectType() throw() {}
-
-  std::string key;
-  std::string value;
-  DataType::type type;
-  std::string metaData;
-
-  _DataObjectType__isset __isset;
-
-  void __set_key(const std::string& val) {
-    key = val;
-  }
-
-  void __set_value(const std::string& val) {
-    value = val;
-    __isset.value = true;
-  }
-
-  void __set_type(const DataType::type val) {
-    type = val;
-    __isset.type = true;
-  }
-
-  void __set_metaData(const std::string& val) {
-    metaData = val;
-    __isset.metaData = true;
-  }
-
-  bool operator == (const DataObjectType & rhs) const
-  {
-    if (!(key == rhs.key))
-      return false;
-    if (__isset.value != rhs.__isset.value)
-      return false;
-    else if (__isset.value && !(value == rhs.value))
-      return false;
-    if (__isset.type != rhs.__isset.type)
-      return false;
-    else if (__isset.type && !(type == rhs.type))
-      return false;
-    if (__isset.metaData != rhs.__isset.metaData)
-      return false;
-    else if (__isset.metaData && !(metaData == rhs.metaData))
-      return false;
-    return true;
-  }
-  bool operator != (const DataObjectType &rhs) const {
-    return !(*this == rhs);
-  }
-
-  bool operator < (const DataObjectType & ) const;
-
-  uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
-  uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
-
-};
-
-void swap(DataObjectType &a, DataObjectType &b);
-
 typedef struct _ComputationalResourceScheduling__isset {
   _ComputationalResourceScheduling__isset() : resourceHostId(false), totalCPUCount(false), nodeCount(false), numberOfThreads(false), queueName(false), wallTimeLimit(false), jobStartTime(false), totalPhysicalMemory(false), computationalProjectAccount(false) {}
   bool resourceHostId;
@@ -1380,8 +1294,8 @@ typedef struct _TaskDetails__isset {
 class TaskDetails {
  public:
 
-  static const char* ascii_fingerprint; // = "5329C387E7633AF234038F8461F51097";
-  static const uint8_t binary_fingerprint[16]; // = {0x53,0x29,0xC3,0x87,0xE7,0x63,0x3A,0xF2,0x34,0x03,0x8F,0x84,0x61,0xF5,0x10,0x97};
+  static const char* ascii_fingerprint; // = "C0E50EB91BEBDC23A45D03BFD2BD630A";
+  static const uint8_t binary_fingerprint[16]; // = {0xC0,0xE5,0x0E,0xB9,0x1B,0xEB,0xDC,0x23,0xA4,0x5D,0x03,0xBF,0xD2,0xBD,0x63,0x0A};
 
   TaskDetails() : taskID("DO_NOT_SET_AT_CLIENTS"), creationTime(0), applicationId(), applicationVersion(), applicationDeploymentId() {
   }
@@ -1393,8 +1307,8 @@ class TaskDetails {
   std::string applicationId;
   std::string applicationVersion;
   std::string applicationDeploymentId;
-  std::vector<DataObjectType>  applicationInputs;
-  std::vector<DataObjectType>  applicationOutputs;
+  std::vector< ::apache::airavata::model::appcatalog::appinterface::InputDataObjectType>  applicationInputs;
+  std::vector< ::apache::airavata::model::appcatalog::appinterface::OutputDataObjectType>  applicationOutputs;
   ComputationalResourceScheduling taskScheduling;
   AdvancedInputDataHandling advancedInputDataHandling;
   AdvancedOutputDataHandling advancedOutputDataHandling;
@@ -1429,12 +1343,12 @@ class TaskDetails {
     __isset.applicationDeploymentId = true;
   }
 
-  void __set_applicationInputs(const std::vector<DataObjectType> & val) {
+  void __set_applicationInputs(const std::vector< ::apache::airavata::model::appcatalog::appinterface::InputDataObjectType> & val) {
     applicationInputs = val;
     __isset.applicationInputs = true;
   }
 
-  void __set_applicationOutputs(const std::vector<DataObjectType> & val) {
+  void __set_applicationOutputs(const std::vector< ::apache::airavata::model::appcatalog::appinterface::OutputDataObjectType> & val) {
     applicationOutputs = val;
     __isset.applicationOutputs = true;
   }
@@ -1559,8 +1473,8 @@ typedef struct _WorkflowNodeDetails__isset {
 class WorkflowNodeDetails {
  public:
 
-  static const char* ascii_fingerprint; // = "6ABC52FB94DCEC8D6AA3F1F3188E2691";
-  static const uint8_t binary_fingerprint[16]; // = {0x6A,0xBC,0x52,0xFB,0x94,0xDC,0xEC,0x8D,0x6A,0xA3,0xF1,0xF3,0x18,0x8E,0x26,0x91};
+  static const char* ascii_fingerprint; // = "F9600D5A8E84EAF65A64E38C18DFACAE";
+  static const uint8_t binary_fingerprint[16]; // = {0xF9,0x60,0x0D,0x5A,0x8E,0x84,0xEA,0xF6,0x5A,0x64,0xE3,0x8C,0x18,0xDF,0xAC,0xAE};
 
   WorkflowNodeDetails() : nodeInstanceId("DO_NOT_SET_AT_CLIENTS"), creationTime(0), nodeName("SINGLE_APP_NODE"), executionUnit((ExecutionUnit::type)1), executionUnitData() {
     executionUnit = (ExecutionUnit::type)1;
@@ -1574,8 +1488,8 @@ class WorkflowNodeDetails {
   std::string nodeName;
   ExecutionUnit::type executionUnit;
   std::string executionUnitData;
-  std::vector<DataObjectType>  nodeInputs;
-  std::vector<DataObjectType>  nodeOutputs;
+  std::vector< ::apache::airavata::model::appcatalog::appinterface::InputDataObjectType>  nodeInputs;
+  std::vector< ::apache::airavata::model::appcatalog::appinterface::OutputDataObjectType>  nodeOutputs;
   WorkflowNodeStatus workflowNodeStatus;
   std::vector<TaskDetails>  taskDetailsList;
   std::vector<ErrorDetails>  errors;
@@ -1604,12 +1518,12 @@ class WorkflowNodeDetails {
     __isset.executionUnitData = true;
   }
 
-  void __set_nodeInputs(const std::vector<DataObjectType> & val) {
+  void __set_nodeInputs(const std::vector< ::apache::airavata::model::appcatalog::appinterface::InputDataObjectType> & val) {
     nodeInputs = val;
     __isset.nodeInputs = true;
   }
 
-  void __set_nodeOutputs(const std::vector<DataObjectType> & val) {
+  void __set_nodeOutputs(const std::vector< ::apache::airavata::model::appcatalog::appinterface::OutputDataObjectType> & val) {
     nodeOutputs = val;
     __isset.nodeOutputs = true;
   }
@@ -1798,8 +1712,8 @@ typedef struct _Experiment__isset {
 class Experiment {
  public:
 
-  static const char* ascii_fingerprint; // = "EAE6C4E7D5F1EDAC82E4630FDDD892A9";
-  static const uint8_t binary_fingerprint[16]; // = {0xEA,0xE6,0xC4,0xE7,0xD5,0xF1,0xED,0xAC,0x82,0xE4,0x63,0x0F,0xDD,0xD8,0x92,0xA9};
+  static const char* ascii_fingerprint; // = "CDFB79AEABF988D5D38D8EEAEEBECC6F";
+  static const uint8_t binary_fingerprint[16]; // = {0xCD,0xFB,0x79,0xAE,0xAB,0xF9,0x88,0xD5,0xD3,0x8D,0x8E,0xEA,0xEE,0xBE,0xCC,0x6F};
 
   Experiment() : experimentID("DO_NOT_SET_AT_CLIENTS"), projectID("DEFAULT"), creationTime(0), userName(), name(), description(), applicationId(), applicationVersion(), workflowTemplateId(), workflowTemplateVersion(), workflowExecutionInstanceId() {
   }
@@ -1818,8 +1732,8 @@ class Experiment {
   std::string workflowTemplateVersion;
   UserConfigurationData userConfigurationData;
   std::string workflowExecutionInstanceId;
-  std::vector<DataObjectType>  experimentInputs;
-  std::vector<DataObjectType>  experimentOutputs;
+  std::vector< ::apache::airavata::model::appcatalog::appinterface::InputDataObjectType>  experimentInputs;
+  std::vector< ::apache::airavata::model::appcatalog::appinterface::OutputDataObjectType>  experimentOutputs;
   ExperimentStatus experimentStatus;
   std::vector<WorkflowNodeStatus>  stateChangeList;
   std::vector<WorkflowNodeDetails>  workflowNodeDetailsList;
@@ -1883,12 +1797,12 @@ class Experiment {
     __isset.workflowExecutionInstanceId = true;
   }
 
-  void __set_experimentInputs(const std::vector<DataObjectType> & val) {
+  void __set_experimentInputs(const std::vector< ::apache::airavata::model::appcatalog::appinterface::InputDataObjectType> & val) {
     experimentInputs = val;
     __isset.experimentInputs = true;
   }
 
-  void __set_experimentOutputs(const std::vector<DataObjectType> & val) {
+  void __set_experimentOutputs(const std::vector< ::apache::airavata::model::appcatalog::appinterface::OutputDataObjectType> & val) {
     experimentOutputs = val;
     __isset.experimentOutputs = true;
   }

http://git-wip-us.apache.org/repos/asf/airavata/blob/198de990/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/messagingEvents_types.cpp
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/messagingEvents_types.cpp b/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/messagingEvents_types.cpp
index c661759..4bfd1bc 100644
--- a/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/messagingEvents_types.cpp
+++ b/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/messagingEvents_types.cpp
@@ -607,8 +607,8 @@ void swap(TaskStatusChangeRequestEvent &a, TaskStatusChangeRequestEvent &b) {
   swap(a.taskIdentity, b.taskIdentity);
 }
 
-const char* TaskOutputChangeEvent::ascii_fingerprint = "A7F4390C8E4A64CE48431C1FB6A45F50";
-const uint8_t TaskOutputChangeEvent::binary_fingerprint[16] = {0xA7,0xF4,0x39,0x0C,0x8E,0x4A,0x64,0xCE,0x48,0x43,0x1C,0x1F,0xB6,0xA4,0x5F,0x50};
+const char* TaskOutputChangeEvent::ascii_fingerprint = "850570A8E8883AE24DE92AFC9AA86C7E";
+const uint8_t TaskOutputChangeEvent::binary_fingerprint[16] = {0x85,0x05,0x70,0xA8,0xE8,0x88,0x3A,0xE2,0x4D,0xE9,0x2A,0xFC,0x9A,0xA8,0x6C,0x7E};
 
 uint32_t TaskOutputChangeEvent::read(::apache::thrift::protocol::TProtocol* iprot) {
 
@@ -683,7 +683,7 @@ uint32_t TaskOutputChangeEvent::write(::apache::thrift::protocol::TProtocol* opr
   xfer += oprot->writeFieldBegin("output", ::apache::thrift::protocol::T_LIST, 1);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->output.size()));
-    std::vector< ::apache::airavata::model::workspace::experiment::DataObjectType> ::const_iterator _iter9;
+    std::vector< ::apache::airavata::model::appcatalog::appinterface::OutputDataObjectType> ::const_iterator _iter9;
     for (_iter9 = this->output.begin(); _iter9 != this->output.end(); ++_iter9)
     {
       xfer += (*_iter9).write(oprot);

http://git-wip-us.apache.org/repos/asf/airavata/blob/198de990/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/messagingEvents_types.h
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/messagingEvents_types.h b/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/messagingEvents_types.h
index 4eefe50..61dc55f 100644
--- a/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/messagingEvents_types.h
+++ b/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/messagingEvents_types.h
@@ -31,6 +31,7 @@
 
 #include <thrift/cxxfunctional.h>
 #include "experimentModel_types.h"
+#include "applicationInterfaceModel_types.h"
 
 
 namespace apache { namespace airavata { namespace model { namespace messaging { namespace event {
@@ -353,18 +354,18 @@ void swap(TaskStatusChangeRequestEvent &a, TaskStatusChangeRequestEvent &b);
 class TaskOutputChangeEvent {
  public:
 
-  static const char* ascii_fingerprint; // = "A7F4390C8E4A64CE48431C1FB6A45F50";
-  static const uint8_t binary_fingerprint[16]; // = {0xA7,0xF4,0x39,0x0C,0x8E,0x4A,0x64,0xCE,0x48,0x43,0x1C,0x1F,0xB6,0xA4,0x5F,0x50};
+  static const char* ascii_fingerprint; // = "850570A8E8883AE24DE92AFC9AA86C7E";
+  static const uint8_t binary_fingerprint[16]; // = {0x85,0x05,0x70,0xA8,0xE8,0x88,0x3A,0xE2,0x4D,0xE9,0x2A,0xFC,0x9A,0xA8,0x6C,0x7E};
 
   TaskOutputChangeEvent() {
   }
 
   virtual ~TaskOutputChangeEvent() throw() {}
 
-  std::vector< ::apache::airavata::model::workspace::experiment::DataObjectType>  output;
+  std::vector< ::apache::airavata::model::appcatalog::appinterface::OutputDataObjectType>  output;
   TaskIdentifier taskIdentity;
 
-  void __set_output(const std::vector< ::apache::airavata::model::workspace::experiment::DataObjectType> & val) {
+  void __set_output(const std::vector< ::apache::airavata::model::appcatalog::appinterface::OutputDataObjectType> & val) {
     output = val;
   }
 

http://git-wip-us.apache.org/repos/asf/airavata/blob/198de990/airavata-api/airavata-client-sdks/airavata-php-sdk/src/main/resources/lib/Airavata/API/Airavata.php
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-php-sdk/src/main/resources/lib/Airavata/API/Airavata.php b/airavata-api/airavata-client-sdks/airavata-php-sdk/src/main/resources/lib/Airavata/API/Airavata.php
index 20134a8..ba421d8 100644
--- a/airavata-api/airavata-client-sdks/airavata-php-sdk/src/main/resources/lib/Airavata/API/Airavata.php
+++ b/airavata-api/airavata-client-sdks/airavata-php-sdk/src/main/resources/lib/Airavata/API/Airavata.php
@@ -11357,7 +11357,7 @@ class Airavata_getExperimentOutputs_result {
           'etype' => TType::STRUCT,
           'elem' => array(
             'type' => TType::STRUCT,
-            'class' => '\Airavata\Model\Workspace\Experiment\DataObjectType',
+            'class' => '\Airavata\Model\AppCatalog\AppInterface\OutputDataObjectType',
             ),
           ),
         1 => array(
@@ -11429,7 +11429,7 @@ class Airavata_getExperimentOutputs_result {
             for ($_i74 = 0; $_i74 < $_size70; ++$_i74)
             {
               $elem75 = null;
-              $elem75 = new \Airavata\Model\Workspace\Experiment\DataObjectType();
+              $elem75 = new \Airavata\Model\AppCatalog\AppInterface\OutputDataObjectType();
               $xfer += $elem75->read($input);
               $this->success []= $elem75;
             }

http://git-wip-us.apache.org/repos/asf/airavata/blob/198de990/airavata-api/airavata-client-sdks/airavata-php-sdk/src/main/resources/lib/Airavata/Model/Messaging/Event/Types.php
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-php-sdk/src/main/resources/lib/Airavata/Model/Messaging/Event/Types.php b/airavata-api/airavata-client-sdks/airavata-php-sdk/src/main/resources/lib/Airavata/Model/Messaging/Event/Types.php
index 0be911b..d20392a 100644
--- a/airavata-api/airavata-client-sdks/airavata-php-sdk/src/main/resources/lib/Airavata/Model/Messaging/Event/Types.php
+++ b/airavata-api/airavata-client-sdks/airavata-php-sdk/src/main/resources/lib/Airavata/Model/Messaging/Event/Types.php
@@ -705,7 +705,7 @@ class TaskOutputChangeEvent {
           'etype' => TType::STRUCT,
           'elem' => array(
             'type' => TType::STRUCT,
-            'class' => '\Airavata\Model\Workspace\Experiment\DataObjectType',
+            'class' => '\Airavata\Model\AppCatalog\AppInterface\OutputDataObjectType',
             ),
           ),
         2 => array(
@@ -753,7 +753,7 @@ class TaskOutputChangeEvent {
             for ($_i4 = 0; $_i4 < $_size0; ++$_i4)
             {
               $elem5 = null;
-              $elem5 = new \Airavata\Model\Workspace\Experiment\DataObjectType();
+              $elem5 = new \Airavata\Model\AppCatalog\AppInterface\OutputDataObjectType();
               $xfer += $elem5->read($input);
               $this->output []= $elem5;
             }


[38/50] [abbrv] airavata git commit: Gfac - Removed AcutalParameter objects and replace Input and Output DataObjectTypes

Posted by ch...@apache.org.
http://git-wip-us.apache.org/repos/asf/airavata/blob/b52499eb/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/GFacUtils.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/GFacUtils.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/GFacUtils.java
index c5a96f9..6e1b8b7 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/GFacUtils.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/GFacUtils.java
@@ -28,7 +28,6 @@ import org.apache.airavata.common.utils.AiravataZKUtils;
 import org.apache.airavata.common.utils.DBUtil;
 import org.apache.airavata.common.utils.ServerSettings;
 import org.apache.airavata.common.utils.StringUtil;
-import org.apache.airavata.commons.gfac.type.ActualParameter;
 import org.apache.airavata.credential.store.store.CredentialReader;
 import org.apache.airavata.credential.store.store.impl.CredentialReaderImpl;
 import org.apache.airavata.gfac.Constants;
@@ -39,16 +38,48 @@ import org.apache.airavata.gfac.core.context.JobExecutionContext;
 import org.apache.airavata.gfac.core.handler.GFacHandlerException;
 import org.apache.airavata.gfac.core.states.GfacExperimentState;
 import org.apache.airavata.gfac.core.states.GfacPluginState;
-import org.apache.airavata.model.appcatalog.computeresource.*;
-import org.apache.airavata.model.workspace.experiment.*;
-import org.apache.airavata.model.workspace.experiment.DataType;
+import org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType;
+import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
+import org.apache.airavata.model.appcatalog.computeresource.CloudJobSubmission;
+import org.apache.airavata.model.appcatalog.computeresource.GlobusJobSubmission;
+import org.apache.airavata.model.appcatalog.computeresource.LOCALSubmission;
+import org.apache.airavata.model.appcatalog.computeresource.SSHJobSubmission;
+import org.apache.airavata.model.appcatalog.computeresource.UnicoreJobSubmission;
+import org.apache.airavata.model.workspace.experiment.ActionableGroup;
+import org.apache.airavata.model.workspace.experiment.CorrectiveAction;
+import org.apache.airavata.model.workspace.experiment.ErrorCategory;
+import org.apache.airavata.model.workspace.experiment.ErrorDetails;
+import org.apache.airavata.model.workspace.experiment.JobDetails;
+import org.apache.airavata.model.workspace.experiment.JobState;
+import org.apache.airavata.model.workspace.experiment.JobStatus;
 import org.apache.airavata.persistance.registry.jpa.impl.RegistryFactory;
 import org.apache.airavata.registry.cpi.ChildDataType;
 import org.apache.airavata.registry.cpi.CompositeIdentifier;
 import org.apache.airavata.registry.cpi.Registry;
-import org.apache.airavata.schemas.gfac.*;
+import org.apache.airavata.schemas.gfac.BooleanArrayType;
+import org.apache.airavata.schemas.gfac.BooleanParameterType;
+import org.apache.airavata.schemas.gfac.DataType;
+import org.apache.airavata.schemas.gfac.DoubleArrayType;
+import org.apache.airavata.schemas.gfac.DoubleParameterType;
+import org.apache.airavata.schemas.gfac.FileArrayType;
+import org.apache.airavata.schemas.gfac.FileParameterType;
+import org.apache.airavata.schemas.gfac.FloatArrayType;
+import org.apache.airavata.schemas.gfac.FloatParameterType;
+import org.apache.airavata.schemas.gfac.IntegerArrayType;
+import org.apache.airavata.schemas.gfac.IntegerParameterType;
+import org.apache.airavata.schemas.gfac.Parameter;
+import org.apache.airavata.schemas.gfac.StdErrParameterType;
+import org.apache.airavata.schemas.gfac.StdOutParameterType;
+import org.apache.airavata.schemas.gfac.StringArrayType;
+import org.apache.airavata.schemas.gfac.StringParameterType;
+import org.apache.airavata.schemas.gfac.URIArrayType;
+import org.apache.airavata.schemas.gfac.URIParameterType;
 import org.apache.axiom.om.OMElement;
-import org.apache.zookeeper.*;
+import org.apache.zookeeper.CreateMode;
+import org.apache.zookeeper.KeeperException;
+import org.apache.zookeeper.ZKUtil;
+import org.apache.zookeeper.ZooDefs;
+import org.apache.zookeeper.ZooKeeper;
 import org.apache.zookeeper.data.Stat;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -62,11 +93,25 @@ import javax.xml.xpath.XPathConstants;
 import javax.xml.xpath.XPathExpression;
 import javax.xml.xpath.XPathExpressionException;
 import javax.xml.xpath.XPathFactory;
-import java.io.*;
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.InputStream;
 import java.net.InetAddress;
 import java.net.URISyntaxException;
 import java.net.UnknownHostException;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+//import org.apache.airavata.commons.gfac.type.ActualParameter;
 
 public class GFacUtils {
 	private final static Logger log = LoggerFactory.getLogger(GFacUtils.class);
@@ -187,410 +232,410 @@ public class GFacUtils {
 		return buf.toString();
 	}
 
-	public static ActualParameter getInputActualParameter(Parameter parameter,
-			DataObjectType element) {
-		ActualParameter actualParameter = new ActualParameter();
-		if ("String".equals(parameter.getParameterType().getName())) {
-			actualParameter = new ActualParameter(StringParameterType.type);
-			if (!"".equals(element.getValue())) {
-				((StringParameterType) actualParameter.getType())
-						.setValue(element.getValue());
-			} else {
-				((StringParameterType) actualParameter.getType()).setValue("");
-			}
-		} else if ("Double".equals(parameter.getParameterType().getName())) {
-			actualParameter = new ActualParameter(DoubleParameterType.type);
-			if (!"".equals(element.getValue())) {
-				((DoubleParameterType) actualParameter.getType())
-						.setValue(new Double(element.getValue()));
-			}
-		} else if ("Integer".equals(parameter.getParameterType().getName())) {
-			actualParameter = new ActualParameter(IntegerParameterType.type);
-			if (!"".equals(element.getValue())) {
-				((IntegerParameterType) actualParameter.getType())
-						.setValue(new Integer(element.getValue()));
-			}
-		} else if ("Float".equals(parameter.getParameterType().getName())) {
-			actualParameter = new ActualParameter(FloatParameterType.type);
-			if (!"".equals(element.getValue())) {
-				((FloatParameterType) actualParameter.getType())
-						.setValue(new Float(element.getValue()));
-			}
-		} else if ("Boolean".equals(parameter.getParameterType().getName())) {
-			actualParameter = new ActualParameter(BooleanParameterType.type);
-			if (!"".equals(element.getValue())) {
-				((BooleanParameterType) actualParameter.getType())
-						.setValue(new Boolean(element.getValue()));
-			}
-		} else if ("File".equals(parameter.getParameterType().getName())) {
-			actualParameter = new ActualParameter(FileParameterType.type);
-			if (!"".equals(element.getValue())) {
-				((FileParameterType) actualParameter.getType())
-						.setValue(element.getValue());
-			}
-		} else if ("URI".equals(parameter.getParameterType().getName())) {
-			actualParameter = new ActualParameter(URIParameterType.type);
-			if (!"".equals(element.getValue())) {
-				((URIParameterType) actualParameter.getType()).setValue(element
-						.getValue());
-			} else {
-				((URIParameterType) actualParameter.getType()).setValue("");
-			}
-
-		} else if ("StdOut".equals(parameter.getParameterType().getName())) {
-			actualParameter = new ActualParameter(StdOutParameterType.type);
-			if (!"".equals(element.getValue())) {
-				((StdOutParameterType) actualParameter.getType())
-						.setValue(element.getValue());
-			} else {
-				((StdOutParameterType) actualParameter.getType()).setValue("");
-			}
-
-		} else if ("StdErr".equals(parameter.getParameterType().getName())) {
-			actualParameter = new ActualParameter(StdErrParameterType.type);
-			if (!"".equals(element.getValue())) {
-				((StdErrParameterType) actualParameter.getType())
-						.setValue(element.getValue());
-			} else {
-				((StdErrParameterType) actualParameter.getType()).setValue("");
-			}
-
-		}
-		return actualParameter;
-	}
-
-	public static ActualParameter getInputActualParameter(Parameter parameter,
-			OMElement element) {
-		OMElement innerelement = null;
-		ActualParameter actualParameter = new ActualParameter();
-		if ("String".equals(parameter.getParameterType().getName())) {
-			actualParameter = new ActualParameter(StringParameterType.type);
-			if (!"".equals(element.getText())) {
-				((StringParameterType) actualParameter.getType())
-						.setValue(element.getText());
-			} else if (element.getChildrenWithLocalName("value").hasNext()) {
-				innerelement = (OMElement) element.getChildrenWithLocalName(
-						"value").next();
-				((StringParameterType) actualParameter.getType())
-						.setValue(innerelement.getText());
-			} else {
-				((StringParameterType) actualParameter.getType()).setValue("");
-			}
-		} else if ("Double".equals(parameter.getParameterType().getName())) {
-			actualParameter = new ActualParameter(DoubleParameterType.type);
-			if (!"".equals(element.getText())) {
-				((DoubleParameterType) actualParameter.getType())
-						.setValue(new Double(innerelement.getText()));
-			} else {
-				innerelement = (OMElement) element.getChildrenWithLocalName(
-						"value").next();
-				((DoubleParameterType) actualParameter.getType())
-						.setValue(new Double(innerelement.getText()));
-			}
-		} else if ("Integer".equals(parameter.getParameterType().getName())) {
-			actualParameter = new ActualParameter(IntegerParameterType.type);
-			if (!"".equals(element.getText())) {
-				((IntegerParameterType) actualParameter.getType())
-						.setValue(new Integer(element.getText()));
-			} else {
-				innerelement = (OMElement) element.getChildrenWithLocalName(
-						"value").next();
-				((IntegerParameterType) actualParameter.getType())
-						.setValue(new Integer(innerelement.getText()));
-			}
-		} else if ("Float".equals(parameter.getParameterType().getName())) {
-			actualParameter = new ActualParameter(FloatParameterType.type);
-			if (!"".equals(element.getText())) {
-				((FloatParameterType) actualParameter.getType())
-						.setValue(new Float(element.getText()));
-			} else {
-				innerelement = (OMElement) element.getChildrenWithLocalName(
-						"value").next();
-				((FloatParameterType) actualParameter.getType())
-						.setValue(new Float(innerelement.getText()));
-			}
-		} else if ("Boolean".equals(parameter.getParameterType().getName())) {
-			actualParameter = new ActualParameter(BooleanParameterType.type);
-			if (!"".equals(element.getText())) {
-				((BooleanParameterType) actualParameter.getType())
-						.setValue(new Boolean(element.getText()));
-			} else {
-				innerelement = (OMElement) element.getChildrenWithLocalName(
-						"value").next();
-				((BooleanParameterType) actualParameter.getType())
-						.setValue(Boolean.parseBoolean(innerelement.getText()));
-			}
-		} else if ("File".equals(parameter.getParameterType().getName())) {
-			actualParameter = new ActualParameter(FileParameterType.type);
-			if (!"".equals(element.getText())) {
-				((FileParameterType) actualParameter.getType())
-						.setValue(element.getText());
-			} else {
-				innerelement = (OMElement) element.getChildrenWithLocalName(
-						"value").next();
-				((FileParameterType) actualParameter.getType())
-						.setValue(innerelement.getText());
-			}
-		} else if ("URI".equals(parameter.getParameterType().getName())) {
-			actualParameter = new ActualParameter(URIParameterType.type);
-			if (!"".equals(element.getText())) {
-				((URIParameterType) actualParameter.getType()).setValue(element
-						.getText());
-			} else if (element.getChildrenWithLocalName("value").hasNext()) {
-				innerelement = (OMElement) element.getChildrenWithLocalName(
-						"value").next();
-				System.out.println(actualParameter.getType().toString());
-				log.debug(actualParameter.getType().toString());
-				((URIParameterType) actualParameter.getType())
-						.setValue(innerelement.getText());
-			} else {
-				((URIParameterType) actualParameter.getType()).setValue("");
-			}
-		} else if ("StringArray".equals(parameter.getParameterType().getName())) {
-			actualParameter = new ActualParameter(StringArrayType.type);
-			Iterator value = element.getChildrenWithLocalName("value");
-			int i = 0;
-			if (!"".equals(element.getText())) {
-				String[] list = StringUtil.getElementsFromString(element
-						.getText());
-				for (String arrayValue : list) {
-					((StringArrayType) actualParameter.getType()).insertValue(
-							i++, arrayValue);
-				}
-			} else {
-				while (value.hasNext()) {
-					innerelement = (OMElement) value.next();
-					((StringArrayType) actualParameter.getType()).insertValue(
-							i++, innerelement.getText());
-				}
-			}
-		} else if ("DoubleArray".equals(parameter.getParameterType().getName())) {
-			actualParameter = new ActualParameter(DoubleArrayType.type);
-			Iterator value = element.getChildrenWithLocalName("value");
-			int i = 0;
-			if (!"".equals(element.getText())) {
-				String[] list = StringUtil.getElementsFromString(element
-						.getText());
-				for (String arrayValue : list) {
-					((DoubleArrayType) actualParameter.getType()).insertValue(
-							i++, new Double(arrayValue));
-				}
-			} else {
-				while (value.hasNext()) {
-					innerelement = (OMElement) value.next();
-					((DoubleArrayType) actualParameter.getType()).insertValue(
-							i++, new Double(innerelement.getText()));
-				}
-			}
-
-		} else if ("IntegerArray"
-				.equals(parameter.getParameterType().getName())) {
-			actualParameter = new ActualParameter(IntegerArrayType.type);
-			Iterator value = element.getChildrenWithLocalName("value");
-			int i = 0;
-			if (!"".equals(element.getText())) {
-				String[] list = StringUtil.getElementsFromString(element
-						.getText());
-				for (String arrayValue : list) {
-					((IntegerArrayType) actualParameter.getType()).insertValue(
-							i++, new Integer(arrayValue));
-				}
-			} else {
-				while (value.hasNext()) {
-					innerelement = (OMElement) value.next();
-					((IntegerArrayType) actualParameter.getType()).insertValue(
-							i++, new Integer(innerelement.getText()));
-				}
-			}
-		} else if ("FloatArray".equals(parameter.getParameterType().getName())) {
-			actualParameter = new ActualParameter(FloatArrayType.type);
-			Iterator value = element.getChildrenWithLocalName("value");
-			int i = 0;
-			if (!"".equals(element.getText())) {
-				String[] list = StringUtil.getElementsFromString(element
-						.getText());
-				for (String arrayValue : list) {
-					((FloatArrayType) actualParameter.getType()).insertValue(
-							i++, new Float(arrayValue));
-				}
-			} else {
-
-				while (value.hasNext()) {
-					innerelement = (OMElement) value.next();
-					((FloatArrayType) actualParameter.getType()).insertValue(
-							i++, new Float(innerelement.getText()));
-				}
-			}
-		} else if ("BooleanArray"
-				.equals(parameter.getParameterType().getName())) {
-			actualParameter = new ActualParameter(BooleanArrayType.type);
-			Iterator value = element.getChildrenWithLocalName("value");
-			int i = 0;
-			if (!"".equals(element.getText())) {
-				String[] list = StringUtil.getElementsFromString(element
-						.getText());
-				for (String arrayValue : list) {
-					((BooleanArrayType) actualParameter.getType()).insertValue(
-							i++, new Boolean(arrayValue));
-				}
-			} else {
-
-				while (value.hasNext()) {
-					innerelement = (OMElement) value.next();
-					((BooleanArrayType) actualParameter.getType()).insertValue(
-							i++, new Boolean(innerelement.getText()));
-				}
-			}
-		} else if ("FileArray".equals(parameter.getParameterType().getName())) {
-			actualParameter = new ActualParameter(FileArrayType.type);
-			Iterator value = element.getChildrenWithLocalName("value");
-			int i = 0;
-			if (!"".equals(element.getText())) {
-				String[] list = StringUtil.getElementsFromString(element
-						.getText());
-				for (String arrayValue : list) {
-					((FileArrayType) actualParameter.getType()).insertValue(
-							i++, arrayValue);
-				}
-			} else {
-
-				while (value.hasNext()) {
-					innerelement = (OMElement) value.next();
-					((FileArrayType) actualParameter.getType()).insertValue(
-							i++, innerelement.getText());
-				}
-			}
-		} else if ("URIArray".equals(parameter.getParameterType().getName())) {
-			actualParameter = new ActualParameter(URIArrayType.type);
-			Iterator value = element.getChildrenWithLocalName("value");
-			int i = 0;
-			if (!"".equals(element.getText())) {
-				String[] list = StringUtil.getElementsFromString(element
-						.getText());
-				for (String arrayValue : list) {
-					((URIArrayType) actualParameter.getType()).insertValue(i++,
-							arrayValue);
-				}
-			} else {
+//	public static ActualParameter getInputActualParameter(Parameter parameter,
+//			DataObjectType element) {
+//		ActualParameter actualParameter = new ActualParameter();
+//		if ("String".equals(parameter.getParameterType().getName())) {
+//			actualParameter = new ActualParameter(StringParameterType.type);
+//			if (!"".equals(element.getValue())) {
+//				((StringParameterType) actualParameter.getType())
+//						.setValue(element.getValue());
+//			} else {
+//				((StringParameterType) actualParameter.getType()).setValue("");
+//			}
+//		} else if ("Double".equals(parameter.getParameterType().getName())) {
+//			actualParameter = new ActualParameter(DoubleParameterType.type);
+//			if (!"".equals(element.getValue())) {
+//				((DoubleParameterType) actualParameter.getType())
+//						.setValue(new Double(element.getValue()));
+//			}
+//		} else if ("Integer".equals(parameter.getParameterType().getName())) {
+//			actualParameter = new ActualParameter(IntegerParameterType.type);
+//			if (!"".equals(element.getValue())) {
+//				((IntegerParameterType) actualParameter.getType())
+//						.setValue(new Integer(element.getValue()));
+//			}
+//		} else if ("Float".equals(parameter.getParameterType().getName())) {
+//			actualParameter = new ActualParameter(FloatParameterType.type);
+//			if (!"".equals(element.getValue())) {
+//				((FloatParameterType) actualParameter.getType())
+//						.setValue(new Float(element.getValue()));
+//			}
+//		} else if ("Boolean".equals(parameter.getParameterType().getName())) {
+//			actualParameter = new ActualParameter(BooleanParameterType.type);
+//			if (!"".equals(element.getValue())) {
+//				((BooleanParameterType) actualParameter.getType())
+//						.setValue(new Boolean(element.getValue()));
+//			}
+//		} else if ("File".equals(parameter.getParameterType().getName())) {
+//			actualParameter = new ActualParameter(FileParameterType.type);
+//			if (!"".equals(element.getValue())) {
+//				((FileParameterType) actualParameter.getType())
+//						.setValue(element.getValue());
+//			}
+//		} else if ("URI".equals(parameter.getParameterType().getName())) {
+//			actualParameter = new ActualParameter(URIParameterType.type);
+//			if (!"".equals(element.getValue())) {
+//				((URIParameterType) actualParameter.getType()).setValue(element
+//						.getValue());
+//			} else {
+//				((URIParameterType) actualParameter.getType()).setValue("");
+//			}
+//
+//		} else if ("StdOut".equals(parameter.getParameterType().getName())) {
+//			actualParameter = new ActualParameter(StdOutParameterType.type);
+//			if (!"".equals(element.getValue())) {
+//				((StdOutParameterType) actualParameter.getType())
+//						.setValue(element.getValue());
+//			} else {
+//				((StdOutParameterType) actualParameter.getType()).setValue("");
+//			}
+//
+//		} else if ("StdErr".equals(parameter.getParameterType().getName())) {
+//			actualParameter = new ActualParameter(StdErrParameterType.type);
+//			if (!"".equals(element.getValue())) {
+//				((StdErrParameterType) actualParameter.getType())
+//						.setValue(element.getValue());
+//			} else {
+//				((StdErrParameterType) actualParameter.getType()).setValue("");
+//			}
+//
+//		}
+//		return actualParameter;
+//	}
 
-				while (value.hasNext()) {
-					innerelement = (OMElement) value.next();
-					((URIArrayType) actualParameter.getType()).insertValue(i++,
-							innerelement.getText());
-				}
-			}
-		}
-		return actualParameter;
-	}
+//	public static ActualParameter getInputActualParameter(Parameter parameter,
+//			OMElement element) {
+//		OMElement innerelement = null;
+//		ActualParameter actualParameter = new ActualParameter();
+//		if ("String".equals(parameter.getParameterType().getName())) {
+//			actualParameter = new ActualParameter(StringParameterType.type);
+//			if (!"".equals(element.getText())) {
+//				((StringParameterType) actualParameter.getType())
+//						.setValue(element.getText());
+//			} else if (element.getChildrenWithLocalName("value").hasNext()) {
+//				innerelement = (OMElement) element.getChildrenWithLocalName(
+//						"value").next();
+//				((StringParameterType) actualParameter.getType())
+//						.setValue(innerelement.getText());
+//			} else {
+//				((StringParameterType) actualParameter.getType()).setValue("");
+//			}
+//		} else if ("Double".equals(parameter.getParameterType().getName())) {
+//			actualParameter = new ActualParameter(DoubleParameterType.type);
+//			if (!"".equals(element.getText())) {
+//				((DoubleParameterType) actualParameter.getType())
+//						.setValue(new Double(innerelement.getText()));
+//			} else {
+//				innerelement = (OMElement) element.getChildrenWithLocalName(
+//						"value").next();
+//				((DoubleParameterType) actualParameter.getType())
+//						.setValue(new Double(innerelement.getText()));
+//			}
+//		} else if ("Integer".equals(parameter.getParameterType().getName())) {
+//			actualParameter = new ActualParameter(IntegerParameterType.type);
+//			if (!"".equals(element.getText())) {
+//				((IntegerParameterType) actualParameter.getType())
+//						.setValue(new Integer(element.getText()));
+//			} else {
+//				innerelement = (OMElement) element.getChildrenWithLocalName(
+//						"value").next();
+//				((IntegerParameterType) actualParameter.getType())
+//						.setValue(new Integer(innerelement.getText()));
+//			}
+//		} else if ("Float".equals(parameter.getParameterType().getName())) {
+//			actualParameter = new ActualParameter(FloatParameterType.type);
+//			if (!"".equals(element.getText())) {
+//				((FloatParameterType) actualParameter.getType())
+//						.setValue(new Float(element.getText()));
+//			} else {
+//				innerelement = (OMElement) element.getChildrenWithLocalName(
+//						"value").next();
+//				((FloatParameterType) actualParameter.getType())
+//						.setValue(new Float(innerelement.getText()));
+//			}
+//		} else if ("Boolean".equals(parameter.getParameterType().getName())) {
+//			actualParameter = new ActualParameter(BooleanParameterType.type);
+//			if (!"".equals(element.getText())) {
+//				((BooleanParameterType) actualParameter.getType())
+//						.setValue(new Boolean(element.getText()));
+//			} else {
+//				innerelement = (OMElement) element.getChildrenWithLocalName(
+//						"value").next();
+//				((BooleanParameterType) actualParameter.getType())
+//						.setValue(Boolean.parseBoolean(innerelement.getText()));
+//			}
+//		} else if ("File".equals(parameter.getParameterType().getName())) {
+//			actualParameter = new ActualParameter(FileParameterType.type);
+//			if (!"".equals(element.getText())) {
+//				((FileParameterType) actualParameter.getType())
+//						.setValue(element.getText());
+//			} else {
+//				innerelement = (OMElement) element.getChildrenWithLocalName(
+//						"value").next();
+//				((FileParameterType) actualParameter.getType())
+//						.setValue(innerelement.getText());
+//			}
+//		} else if ("URI".equals(parameter.getParameterType().getName())) {
+//			actualParameter = new ActualParameter(URIParameterType.type);
+//			if (!"".equals(element.getText())) {
+//				((URIParameterType) actualParameter.getType()).setValue(element
+//						.getText());
+//			} else if (element.getChildrenWithLocalName("value").hasNext()) {
+//				innerelement = (OMElement) element.getChildrenWithLocalName(
+//						"value").next();
+//				System.out.println(actualParameter.getType().toString());
+//				log.debug(actualParameter.getType().toString());
+//				((URIParameterType) actualParameter.getType())
+//						.setValue(innerelement.getText());
+//			} else {
+//				((URIParameterType) actualParameter.getType()).setValue("");
+//			}
+//		} else if ("StringArray".equals(parameter.getParameterType().getName())) {
+//			actualParameter = new ActualParameter(StringArrayType.type);
+//			Iterator value = element.getChildrenWithLocalName("value");
+//			int i = 0;
+//			if (!"".equals(element.getText())) {
+//				String[] list = StringUtil.getElementsFromString(element
+//						.getText());
+//				for (String arrayValue : list) {
+//					((StringArrayType) actualParameter.getType()).insertValue(
+//							i++, arrayValue);
+//				}
+//			} else {
+//				while (value.hasNext()) {
+//					innerelement = (OMElement) value.next();
+//					((StringArrayType) actualParameter.getType()).insertValue(
+//							i++, innerelement.getText());
+//				}
+//			}
+//		} else if ("DoubleArray".equals(parameter.getParameterType().getName())) {
+//			actualParameter = new ActualParameter(DoubleArrayType.type);
+//			Iterator value = element.getChildrenWithLocalName("value");
+//			int i = 0;
+//			if (!"".equals(element.getText())) {
+//				String[] list = StringUtil.getElementsFromString(element
+//						.getText());
+//				for (String arrayValue : list) {
+//					((DoubleArrayType) actualParameter.getType()).insertValue(
+//							i++, new Double(arrayValue));
+//				}
+//			} else {
+//				while (value.hasNext()) {
+//					innerelement = (OMElement) value.next();
+//					((DoubleArrayType) actualParameter.getType()).insertValue(
+//							i++, new Double(innerelement.getText()));
+//				}
+//			}
+//
+//		} else if ("IntegerArray"
+//				.equals(parameter.getParameterType().getName())) {
+//			actualParameter = new ActualParameter(IntegerArrayType.type);
+//			Iterator value = element.getChildrenWithLocalName("value");
+//			int i = 0;
+//			if (!"".equals(element.getText())) {
+//				String[] list = StringUtil.getElementsFromString(element
+//						.getText());
+//				for (String arrayValue : list) {
+//					((IntegerArrayType) actualParameter.getType()).insertValue(
+//							i++, new Integer(arrayValue));
+//				}
+//			} else {
+//				while (value.hasNext()) {
+//					innerelement = (OMElement) value.next();
+//					((IntegerArrayType) actualParameter.getType()).insertValue(
+//							i++, new Integer(innerelement.getText()));
+//				}
+//			}
+//		} else if ("FloatArray".equals(parameter.getParameterType().getName())) {
+//			actualParameter = new ActualParameter(FloatArrayType.type);
+//			Iterator value = element.getChildrenWithLocalName("value");
+//			int i = 0;
+//			if (!"".equals(element.getText())) {
+//				String[] list = StringUtil.getElementsFromString(element
+//						.getText());
+//				for (String arrayValue : list) {
+//					((FloatArrayType) actualParameter.getType()).insertValue(
+//							i++, new Float(arrayValue));
+//				}
+//			} else {
+//
+//				while (value.hasNext()) {
+//					innerelement = (OMElement) value.next();
+//					((FloatArrayType) actualParameter.getType()).insertValue(
+//							i++, new Float(innerelement.getText()));
+//				}
+//			}
+//		} else if ("BooleanArray"
+//				.equals(parameter.getParameterType().getName())) {
+//			actualParameter = new ActualParameter(BooleanArrayType.type);
+//			Iterator value = element.getChildrenWithLocalName("value");
+//			int i = 0;
+//			if (!"".equals(element.getText())) {
+//				String[] list = StringUtil.getElementsFromString(element
+//						.getText());
+//				for (String arrayValue : list) {
+//					((BooleanArrayType) actualParameter.getType()).insertValue(
+//							i++, new Boolean(arrayValue));
+//				}
+//			} else {
+//
+//				while (value.hasNext()) {
+//					innerelement = (OMElement) value.next();
+//					((BooleanArrayType) actualParameter.getType()).insertValue(
+//							i++, new Boolean(innerelement.getText()));
+//				}
+//			}
+//		} else if ("FileArray".equals(parameter.getParameterType().getName())) {
+//			actualParameter = new ActualParameter(FileArrayType.type);
+//			Iterator value = element.getChildrenWithLocalName("value");
+//			int i = 0;
+//			if (!"".equals(element.getText())) {
+//				String[] list = StringUtil.getElementsFromString(element
+//						.getText());
+//				for (String arrayValue : list) {
+//					((FileArrayType) actualParameter.getType()).insertValue(
+//							i++, arrayValue);
+//				}
+//			} else {
+//
+//				while (value.hasNext()) {
+//					innerelement = (OMElement) value.next();
+//					((FileArrayType) actualParameter.getType()).insertValue(
+//							i++, innerelement.getText());
+//				}
+//			}
+//		} else if ("URIArray".equals(parameter.getParameterType().getName())) {
+//			actualParameter = new ActualParameter(URIArrayType.type);
+//			Iterator value = element.getChildrenWithLocalName("value");
+//			int i = 0;
+//			if (!"".equals(element.getText())) {
+//				String[] list = StringUtil.getElementsFromString(element
+//						.getText());
+//				for (String arrayValue : list) {
+//					((URIArrayType) actualParameter.getType()).insertValue(i++,
+//							arrayValue);
+//				}
+//			} else {
+//
+//				while (value.hasNext()) {
+//					innerelement = (OMElement) value.next();
+//					((URIArrayType) actualParameter.getType()).insertValue(i++,
+//							innerelement.getText());
+//				}
+//			}
+//		}
+//		return actualParameter;
+//	}
 
-	public static ActualParameter getInputActualParameter(Parameter parameter,
-			String inputVal) throws GFacException {
-		OMElement innerelement = null;
-		ActualParameter actualParameter = new ActualParameter();
-		if ("String".equals(parameter.getParameterType().getName())) {
-			actualParameter = new ActualParameter(StringParameterType.type);
-			((StringParameterType) actualParameter.getType())
-					.setValue(inputVal);
-		} else if ("Double".equals(parameter.getParameterType().getName())) {
-			actualParameter = new ActualParameter(DoubleParameterType.type);
-			((DoubleParameterType) actualParameter.getType())
-					.setValue(new Double(inputVal));
-		} else if ("Integer".equals(parameter.getParameterType().getName())) {
-			actualParameter = new ActualParameter(IntegerParameterType.type);
-			((IntegerParameterType) actualParameter.getType())
-					.setValue(new Integer(inputVal));
-		} else if ("Float".equals(parameter.getParameterType().getName())) {
-			actualParameter = new ActualParameter(FloatParameterType.type);
-			((FloatParameterType) actualParameter.getType())
-					.setValue(new Float(inputVal));
-		} else if ("Boolean".equals(parameter.getParameterType().getName())) {
-			actualParameter = new ActualParameter(BooleanParameterType.type);
-			((BooleanParameterType) actualParameter.getType())
-					.setValue(new Boolean(inputVal));
-		} else if ("File".equals(parameter.getParameterType().getName())) {
-			actualParameter = new ActualParameter(FileParameterType.type);
-			((FileParameterType) actualParameter.getType()).setValue(inputVal);
-		} else if ("URI".equals(parameter.getParameterType().getName())) {
-			actualParameter = new ActualParameter(URIParameterType.type);
-			((URIParameterType) actualParameter.getType()).setValue(inputVal);
-		} else if ("StringArray".equals(parameter.getParameterType().getName())) {
-			actualParameter = new ActualParameter(StringArrayType.type);
-			Iterator iterator = Arrays.asList(
-					StringUtil.getElementsFromString(inputVal)).iterator();
-			int i = 0;
-			while (iterator.hasNext()) {
-				innerelement = (OMElement) iterator.next();
-				((StringArrayType) actualParameter.getType()).insertValue(i++,
-						innerelement.getText());
-			}
-		} else if ("DoubleArray".equals(parameter.getParameterType().getName())) {
-			actualParameter = new ActualParameter(DoubleArrayType.type);
-			Iterator value = Arrays.asList(
-					StringUtil.getElementsFromString(inputVal)).iterator();
-			int i = 0;
-			while (value.hasNext()) {
-				innerelement = (OMElement) value.next();
-				((DoubleArrayType) actualParameter.getType()).insertValue(i++,
-						new Double(innerelement.getText()));
-			}
-		} else if ("IntegerArray"
-				.equals(parameter.getParameterType().getName())) {
-			actualParameter = new ActualParameter(IntegerArrayType.type);
-			Iterator value = Arrays.asList(
-					StringUtil.getElementsFromString(inputVal)).iterator();
-			int i = 0;
-			while (value.hasNext()) {
-				innerelement = (OMElement) value.next();
-				((IntegerArrayType) actualParameter.getType()).insertValue(i++,
-						new Integer(innerelement.getText()));
-			}
-		} else if ("FloatArray".equals(parameter.getParameterType().getName())) {
-			actualParameter = new ActualParameter(FloatArrayType.type);
-			Iterator value = Arrays.asList(
-					StringUtil.getElementsFromString(inputVal)).iterator();
-			int i = 0;
-			while (value.hasNext()) {
-				innerelement = (OMElement) value.next();
-				((FloatArrayType) actualParameter.getType()).insertValue(i++,
-						new Float(innerelement.getText()));
-			}
-		} else if ("BooleanArray"
-				.equals(parameter.getParameterType().getName())) {
-			actualParameter = new ActualParameter(BooleanArrayType.type);
-			Iterator value = Arrays.asList(
-					StringUtil.getElementsFromString(inputVal)).iterator();
-			int i = 0;
-			while (value.hasNext()) {
-				innerelement = (OMElement) value.next();
-				((BooleanArrayType) actualParameter.getType()).insertValue(i++,
-						new Boolean(innerelement.getText()));
-			}
-		} else if ("FileArray".equals(parameter.getParameterType().getName())) {
-			actualParameter = new ActualParameter(FileArrayType.type);
-			Iterator value = Arrays.asList(
-					StringUtil.getElementsFromString(inputVal)).iterator();
-			int i = 0;
-			while (value.hasNext()) {
-				innerelement = (OMElement) value.next();
-				((FileArrayType) actualParameter.getType()).insertValue(i++,
-						innerelement.getText());
-			}
-		} else if ("URIArray".equals(parameter.getParameterType().getName())) {
-			actualParameter = new ActualParameter(URIArrayType.type);
-			Iterator value = Arrays.asList(
-					StringUtil.getElementsFromString(inputVal)).iterator();
-			int i = 0;
-			while (value.hasNext()) {
-				innerelement = (OMElement) value.next();
-				((URIArrayType) actualParameter.getType()).insertValue(i++,
-						innerelement.getText());
-			}
-		} else {
-			throw new GFacException(
-					"Input parameters are not configured properly ");
-		}
-		return actualParameter;
-	}
+//	public static ActualParameter getInputActualParameter(Parameter parameter,
+//			String inputVal) throws GFacException {
+//		OMElement innerelement = null;
+//		ActualParameter actualParameter = new ActualParameter();
+//		if ("String".equals(parameter.getParameterType().getName())) {
+//			actualParameter = new ActualParameter(StringParameterType.type);
+//			((StringParameterType) actualParameter.getType())
+//					.setValue(inputVal);
+//		} else if ("Double".equals(parameter.getParameterType().getName())) {
+//			actualParameter = new ActualParameter(DoubleParameterType.type);
+//			((DoubleParameterType) actualParameter.getType())
+//					.setValue(new Double(inputVal));
+//		} else if ("Integer".equals(parameter.getParameterType().getName())) {
+//			actualParameter = new ActualParameter(IntegerParameterType.type);
+//			((IntegerParameterType) actualParameter.getType())
+//					.setValue(new Integer(inputVal));
+//		} else if ("Float".equals(parameter.getParameterType().getName())) {
+//			actualParameter = new ActualParameter(FloatParameterType.type);
+//			((FloatParameterType) actualParameter.getType())
+//					.setValue(new Float(inputVal));
+//		} else if ("Boolean".equals(parameter.getParameterType().getName())) {
+//			actualParameter = new ActualParameter(BooleanParameterType.type);
+//			((BooleanParameterType) actualParameter.getType())
+//					.setValue(new Boolean(inputVal));
+//		} else if ("File".equals(parameter.getParameterType().getName())) {
+//			actualParameter = new ActualParameter(FileParameterType.type);
+//			((FileParameterType) actualParameter.getType()).setValue(inputVal);
+//		} else if ("URI".equals(parameter.getParameterType().getName())) {
+//			actualParameter = new ActualParameter(URIParameterType.type);
+//			((URIParameterType) actualParameter.getType()).setValue(inputVal);
+//		} else if ("StringArray".equals(parameter.getParameterType().getName())) {
+//			actualParameter = new ActualParameter(StringArrayType.type);
+//			Iterator iterator = Arrays.asList(
+//					StringUtil.getElementsFromString(inputVal)).iterator();
+//			int i = 0;
+//			while (iterator.hasNext()) {
+//				innerelement = (OMElement) iterator.next();
+//				((StringArrayType) actualParameter.getType()).insertValue(i++,
+//						innerelement.getText());
+//			}
+//		} else if ("DoubleArray".equals(parameter.getParameterType().getName())) {
+//			actualParameter = new ActualParameter(DoubleArrayType.type);
+//			Iterator value = Arrays.asList(
+//					StringUtil.getElementsFromString(inputVal)).iterator();
+//			int i = 0;
+//			while (value.hasNext()) {
+//				innerelement = (OMElement) value.next();
+//				((DoubleArrayType) actualParameter.getType()).insertValue(i++,
+//						new Double(innerelement.getText()));
+//			}
+//		} else if ("IntegerArray"
+//				.equals(parameter.getParameterType().getName())) {
+//			actualParameter = new ActualParameter(IntegerArrayType.type);
+//			Iterator value = Arrays.asList(
+//					StringUtil.getElementsFromString(inputVal)).iterator();
+//			int i = 0;
+//			while (value.hasNext()) {
+//				innerelement = (OMElement) value.next();
+//				((IntegerArrayType) actualParameter.getType()).insertValue(i++,
+//						new Integer(innerelement.getText()));
+//			}
+//		} else if ("FloatArray".equals(parameter.getParameterType().getName())) {
+//			actualParameter = new ActualParameter(FloatArrayType.type);
+//			Iterator value = Arrays.asList(
+//					StringUtil.getElementsFromString(inputVal)).iterator();
+//			int i = 0;
+//			while (value.hasNext()) {
+//				innerelement = (OMElement) value.next();
+//				((FloatArrayType) actualParameter.getType()).insertValue(i++,
+//						new Float(innerelement.getText()));
+//			}
+//		} else if ("BooleanArray"
+//				.equals(parameter.getParameterType().getName())) {
+//			actualParameter = new ActualParameter(BooleanArrayType.type);
+//			Iterator value = Arrays.asList(
+//					StringUtil.getElementsFromString(inputVal)).iterator();
+//			int i = 0;
+//			while (value.hasNext()) {
+//				innerelement = (OMElement) value.next();
+//				((BooleanArrayType) actualParameter.getType()).insertValue(i++,
+//						new Boolean(innerelement.getText()));
+//			}
+//		} else if ("FileArray".equals(parameter.getParameterType().getName())) {
+//			actualParameter = new ActualParameter(FileArrayType.type);
+//			Iterator value = Arrays.asList(
+//					StringUtil.getElementsFromString(inputVal)).iterator();
+//			int i = 0;
+//			while (value.hasNext()) {
+//				innerelement = (OMElement) value.next();
+//				((FileArrayType) actualParameter.getType()).insertValue(i++,
+//						innerelement.getText());
+//			}
+//		} else if ("URIArray".equals(parameter.getParameterType().getName())) {
+//			actualParameter = new ActualParameter(URIArrayType.type);
+//			Iterator value = Arrays.asList(
+//					StringUtil.getElementsFromString(inputVal)).iterator();
+//			int i = 0;
+//			while (value.hasNext()) {
+//				innerelement = (OMElement) value.next();
+//				((URIArrayType) actualParameter.getType()).insertValue(i++,
+//						innerelement.getText());
+//			}
+//		} else {
+//			throw new GFacException(
+//					"Input parameters are not configured properly ");
+//		}
+//		return actualParameter;
+//	}
 
 //	public static ApplicationJob createApplicationJob(
 //			JobExecutionContext jobExecutionContext) {
@@ -811,89 +856,89 @@ public class GFacUtils {
 		}
 	}
 
-	public static Map<String, Object> getInMessageContext(
-			List<DataObjectType> experimentData, Parameter[] parameters)
-			throws GFacException {
-		HashMap<String, Object> stringObjectHashMap = new HashMap<String, Object>();
-		Map<String, DataObjectType> map = new HashMap<String, DataObjectType>();
-		for (DataObjectType objectType : experimentData) {
-			map.put(objectType.getKey(), objectType);
-		}
-		for (int i = 0; i < parameters.length; i++) {
-			DataObjectType input = map.get(parameters[i].getParameterName());
-			if (input != null) {
-				DataType t = DataType.STRING;
-				String type = parameters[i].getParameterType().getType().toString().toUpperCase();
-				if (type.equals("STRING")){
-					t=DataType.STRING;
-				}else if (type.equals("INTEGER")){
-					t=DataType.INTEGER;
-				}else if (type.equals("FLOAT")){
-					//FIXME
-					t=DataType.INTEGER;
-				}else if (type.equals("URI")){
-					t=DataType.URI;
-				}
-				input.setType(t);
-				stringObjectHashMap
-						.put(parameters[i].getParameterName(), GFacUtils
-								.getInputActualParameter(parameters[i], input));
-			} else {
-				throw new GFacException(
-						"Error finding the parameter: parameter Name"
-								+ parameters[i].getParameterName());
-			}
-		}
-		return stringObjectHashMap;
-	}
+//	public static Map<String, Object> getInMessageContext(
+//			List<DataObjectType> experimentData, Parameter[] parameters)
+//			throws GFacException {
+//		HashMap<String, Object> stringObjectHashMap = new HashMap<String, Object>();
+//		Map<String, DataObjectType> map = new HashMap<String, DataObjectType>();
+//		for (DataObjectType objectType : experimentData) {
+//			map.put(objectType.getKey(), objectType);
+//		}
+//		for (int i = 0; i < parameters.length; i++) {
+//			DataObjectType input = map.get(parameters[i].getParameterName());
+//			if (input != null) {
+//				DataType t = DataType.STRING;
+//				String type = parameters[i].getParameterType().getType().toString().toUpperCase();
+//				if (type.equals("STRING")){
+//					t=DataType.STRING;
+//				}else if (type.equals("INTEGER")){
+//					t=DataType.INTEGER;
+//				}else if (type.equals("FLOAT")){
+//					//FIXME
+//					t=DataType.INTEGER;
+//				}else if (type.equals("URI")){
+//					t=DataType.URI;
+//				}
+//				input.setType(t);
+//				stringObjectHashMap
+//						.put(parameters[i].getParameterName(), GFacUtils
+//								.getInputActualParameter(parameters[i], input));
+//			} else {
+//				throw new GFacException(
+//						"Error finding the parameter: parameter Name"
+//								+ parameters[i].getParameterName());
+//			}
+//		}
+//		return stringObjectHashMap;
+//	}
 
-    public static Map<String, Object> getInMessageContext(List<DataObjectType> experimentData) throws GFacException {
+    public static Map<String, Object> getInputParamMap(List<InputDataObjectType> experimentData) throws GFacException {
         Map<String, Object> map = new HashMap<String, Object>();
-        for (DataObjectType objectType : experimentData) {
-            map.put(objectType.getKey(), objectType);
+        for (InputDataObjectType objectType : experimentData) {
+            map.put(objectType.getName(), objectType);
         }
         return map;
     }
+//
+//	public static Map<String, Object> getOutMessageContext(
+//			List<DataObjectType> experimentData, Parameter[] parameters)
+//			throws GFacException {
+//		HashMap<String, Object> stringObjectHashMap = new HashMap<String, Object>();
+//		Map<String, DataObjectType> map = new HashMap<String, DataObjectType>();
+//		for (DataObjectType objectType : experimentData) {
+//			map.put(objectType.getKey(), objectType);
+//		}
+//		for (int i = 0; i < parameters.length; i++) {
+//			DataObjectType output = map.get(parameters[i].getParameterName());
+//			if (output==null){
+//				output=new DataObjectType();
+//				output.setKey(parameters[i].getParameterName());
+//				output.setValue("");
+//				String type = parameters[i].getParameterType().getType().toString().toUpperCase();
+//				DataType t = DataType.STRING;
+//				if (type.equals("STRING")){
+//					t=DataType.STRING;
+//				}else if (type.equals("INTEGER")){
+//					t=DataType.INTEGER;
+//				}else if (type.equals("FLOAT")){
+//					//FIXME
+//					t=DataType.INTEGER;
+//				}else if (type.equals("URI")){
+//					t=DataType.URI;
+//				}
+//				output.setType(t);
+//			}
+//			stringObjectHashMap
+//					.put(parameters[i].getParameterName(), GFacUtils
+//							.getInputActualParameter(parameters[i], output));
+//		}
+//		return stringObjectHashMap;
+//	}
 
-	public static Map<String, Object> getOutMessageContext(
-			List<DataObjectType> experimentData, Parameter[] parameters)
-			throws GFacException {
-		HashMap<String, Object> stringObjectHashMap = new HashMap<String, Object>();
-		Map<String, DataObjectType> map = new HashMap<String, DataObjectType>();
-		for (DataObjectType objectType : experimentData) {
-			map.put(objectType.getKey(), objectType);
-		}
-		for (int i = 0; i < parameters.length; i++) {
-			DataObjectType output = map.get(parameters[i].getParameterName());
-			if (output==null){
-				output=new DataObjectType();
-				output.setKey(parameters[i].getParameterName());
-				output.setValue("");
-				String type = parameters[i].getParameterType().getType().toString().toUpperCase();
-				DataType t = DataType.STRING;
-				if (type.equals("STRING")){
-					t=DataType.STRING;
-				}else if (type.equals("INTEGER")){
-					t=DataType.INTEGER;
-				}else if (type.equals("FLOAT")){
-					//FIXME
-					t=DataType.INTEGER;
-				}else if (type.equals("URI")){
-					t=DataType.URI;
-				}
-				output.setType(t);
-			}
-			stringObjectHashMap
-					.put(parameters[i].getParameterName(), GFacUtils
-							.getInputActualParameter(parameters[i], output));
-		}
-		return stringObjectHashMap;
-	}
-
-    public static Map<String, Object> getOutMessageContext(List<DataObjectType> experimentData) throws GFacException {
+    public static Map<String, Object> getOuputParamMap(List<OutputDataObjectType> experimentData) throws GFacException {
         Map<String, Object> map = new HashMap<String, Object>();
-        for (DataObjectType objectType : experimentData) {
-            map.put(objectType.getKey(), objectType);
+        for (OutputDataObjectType objectType : experimentData) {
+            map.put(objectType.getName(), objectType);
         }
         return map;
     }

http://git-wip-us.apache.org/repos/asf/airavata/blob/b52499eb/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/OutputUtils.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/OutputUtils.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/OutputUtils.java
index 4008721..574fab8 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/OutputUtils.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/OutputUtils.java
@@ -20,68 +20,64 @@
 */
 package org.apache.airavata.gfac.core.utils;
 
+import org.apache.airavata.common.utils.StringUtil;
+import org.apache.airavata.gfac.core.handler.GFacHandlerException;
+import org.apache.airavata.model.appcatalog.appinterface.DataType;
+import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
+
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
-import org.apache.airavata.common.utils.StringUtil;
-import org.apache.airavata.commons.gfac.type.ActualParameter;
-import org.apache.airavata.commons.gfac.type.MappingFactory;
-import org.apache.airavata.gfac.core.handler.GFacHandlerException;
-import org.apache.airavata.model.workspace.experiment.DataObjectType;
-import org.apache.airavata.model.workspace.experiment.DataType;
-import org.apache.airavata.schemas.gfac.StdErrParameterType;
-import org.apache.airavata.schemas.gfac.StdOutParameterType;
-
 public class OutputUtils {
     private static String regexPattern = "\\s*=\\s*(.*)\\r?\\n";
 
-	public static void fillOutputFromStdout(Map<String, Object> output, String stdout, String stderr, List<DataObjectType> outputArray) throws Exception {
+	public static void fillOutputFromStdout(Map<String, Object> output, String stdout, String stderr, List<OutputDataObjectType> outputArray) throws Exception {
 
 		if (stdout == null || stdout.equals("")) {
 			throw new GFacHandlerException("Standard output is empty.");
 		}
 
 		Set<String> keys = output.keySet();
+        OutputDataObjectType actual = null;
+        OutputDataObjectType resultOutput = null;
 		for (String paramName : keys) {
-			ActualParameter actual = (ActualParameter) output.get(paramName);
+			actual = (OutputDataObjectType) output.get(paramName);
 			// if parameter value is not already set, we let it go
 
 			if (actual == null) {
 				continue;
 			}
-			if ("StdOut".equals(actual.getType().getType().toString())) {
-		        ((StdOutParameterType) actual.getType()).setValue(stdout);
-		        DataObjectType out = new DataObjectType();
-				out.setKey(paramName);
-				out.setType(DataType.STDOUT);
-				out.setValue(stdout);
-				outputArray.add(out);
-			} else if ("StdErr".equals(actual.getType().getType().toString())) {
-				((StdErrParameterType) actual.getType()).setValue(stderr);
-		        DataObjectType out = new DataObjectType();
-				out.setKey(paramName);
-				out.setType(DataType.STDERR);
-				out.setValue(stderr);
-				outputArray.add(out);
-			}
+            resultOutput = new OutputDataObjectType();
+            if (DataType.STDOUT == actual.getType()) {
+                actual.setValue(stdout);
+                resultOutput.setName(paramName);
+                resultOutput.setType(DataType.STDOUT);
+                resultOutput.setValue(stdout);
+                outputArray.add(resultOutput);
+			} else if (DataType.STDERR == actual.getType()) {
+                actual.setValue(stderr);
+                resultOutput.setName(paramName);
+                resultOutput.setType(DataType.STDERR);
+                resultOutput.setValue(stderr);
+                outputArray.add(resultOutput);
+            }
 //			else if ("URI".equals(actual.getType().getType().toString())) {
 //				continue;
 //			} 
-			else {
-				String parseStdout = parseStdout(stdout, paramName);
-				if (parseStdout != null) {
-					DataObjectType out = new DataObjectType();
-					out.setKey(paramName);
-					out.setType(DataType.STRING);
-					out.setValue(parseStdout);
-					outputArray.add(out);
-					MappingFactory.fromString(actual, parseStdout);
-	          }
-			}
-		}
+            else {
+                String parseStdout = parseStdout(stdout, paramName);
+                if (parseStdout != null) {
+                    actual.setValue(parseStdout);
+                    resultOutput.setName(paramName);
+                    resultOutput.setType(DataType.STRING);
+                    resultOutput.setValue(parseStdout);
+                    outputArray.add(resultOutput);
+                }
+            }
+        }
 	}
 
     private static String parseStdout(String stdout, String outParam) throws Exception {

http://git-wip-us.apache.org/repos/asf/airavata/blob/b52499eb/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/handler/GridFTPOutputHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/handler/GridFTPOutputHandler.java b/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/handler/GridFTPOutputHandler.java
index 7e226ea..850608f 100644
--- a/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/handler/GridFTPOutputHandler.java
+++ b/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/handler/GridFTPOutputHandler.java
@@ -47,6 +47,7 @@ import org.apache.airavata.gfac.core.utils.OutputUtils;
 import org.apache.airavata.gfac.gram.security.GSISecurityContext;
 import org.apache.airavata.gfac.gram.external.GridFtp;
 import org.apache.airavata.gfac.gram.util.GramProviderUtils;
+import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
 import org.apache.airavata.model.workspace.experiment.CorrectiveAction;
 import org.apache.airavata.model.workspace.experiment.DataObjectType;
 import org.apache.airavata.model.workspace.experiment.DataTransferDetails;
@@ -157,7 +158,7 @@ public class GridFTPOutputHandler extends AbstractHandler {
                         log.error("Cannot download stdout/err files. One reason could be the job is not successfully finished:  "+e.getMessage());
                     }
 
-                    List<DataObjectType> outputArray = new ArrayList<DataObjectType>();
+                    List<OutputDataObjectType> outputArray = new ArrayList<OutputDataObjectType>();
                     Map<String, Object> output = jobExecutionContext.getOutMessageContext().getParameters();
                     Set<String> keys = output.keySet();
                     for (String paramName : keys) {

http://git-wip-us.apache.org/repos/asf/airavata/blob/b52499eb/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHInputHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHInputHandler.java b/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHInputHandler.java
index d9e669d..39f8df1 100644
--- a/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHInputHandler.java
+++ b/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHInputHandler.java
@@ -30,10 +30,10 @@ import org.apache.airavata.gfac.core.utils.GFacUtils;
 import org.apache.airavata.gfac.gsissh.security.GSISecurityContext;
 import org.apache.airavata.gfac.gsissh.util.GFACGSISSHUtils;
 import org.apache.airavata.gsi.ssh.api.Cluster;
+import org.apache.airavata.model.appcatalog.appinterface.DataType;
+import org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType;
 import org.apache.airavata.model.workspace.experiment.CorrectiveAction;
-import org.apache.airavata.model.workspace.experiment.DataObjectType;
 import org.apache.airavata.model.workspace.experiment.DataTransferDetails;
-import org.apache.airavata.model.workspace.experiment.DataType;
 import org.apache.airavata.model.workspace.experiment.ErrorCategory;
 import org.apache.airavata.model.workspace.experiment.TransferState;
 import org.apache.airavata.model.workspace.experiment.TransferStatus;
@@ -113,8 +113,8 @@ public class GSISSHInputHandler extends AbstractRecoverableHandler {
             MessageContext input = jobExecutionContext.getInMessageContext();
             Set<String> parameters = input.getParameters().keySet();
             for (String paramName : parameters) {
-                DataObjectType inputParamType = (DataObjectType) input.getParameters().get(paramName);
-                String paramValue = inputParamType.getKey();
+                InputDataObjectType inputParamType = (InputDataObjectType) input.getParameters().get(paramName);
+                String paramValue = inputParamType.getName();
                 //TODO: Review this with type
                 if (inputParamType.getType() == DataType.URI) {
                     if (index < oldIndex) {

http://git-wip-us.apache.org/repos/asf/airavata/blob/b52499eb/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHOutputHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHOutputHandler.java b/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHOutputHandler.java
index a714099..a25e66a 100644
--- a/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHOutputHandler.java
+++ b/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHOutputHandler.java
@@ -20,23 +20,10 @@
 */
 package org.apache.airavata.gfac.gsissh.handler;
 
-import java.io.File;
-import java.io.IOException;
-import java.util.*;
-
-import net.schmizz.sshj.connection.ConnectionException;
-import net.schmizz.sshj.transport.TransportException;
-
-import org.apache.aiaravata.application.catalog.data.impl.AppCatalogFactory;
-import org.apache.airavata.common.exception.ApplicationSettingsException;
-import org.apache.airavata.common.utils.Constants;
-import org.apache.airavata.commons.gfac.type.ActualParameter;
-import org.apache.airavata.commons.gfac.type.ApplicationDescription;
-import org.apache.airavata.commons.gfac.type.MappingFactory;
+//import org.apache.airavata.commons.gfac.type.ActualParameter;
+//import org.apache.airavata.commons.gfac.type.MappingFactory;
 import org.apache.airavata.gfac.GFacException;
 import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.cpi.BetterGfacImpl;
-import org.apache.airavata.gfac.core.handler.AbstractHandler;
 import org.apache.airavata.gfac.core.handler.AbstractRecoverableHandler;
 import org.apache.airavata.gfac.core.handler.GFacHandlerException;
 import org.apache.airavata.gfac.core.provider.GFacProviderException;
@@ -45,24 +32,26 @@ import org.apache.airavata.gfac.core.utils.OutputUtils;
 import org.apache.airavata.gfac.gsissh.security.GSISecurityContext;
 import org.apache.airavata.gfac.gsissh.util.GFACGSISSHUtils;
 import org.apache.airavata.gsi.ssh.api.Cluster;
-import org.apache.airavata.gsi.ssh.api.SSHApiException;
-import org.apache.airavata.gsi.ssh.api.job.JobDescriptor;
-import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol;
-import org.apache.airavata.model.appcatalog.computeresource.MonitorMode;
-import org.apache.airavata.model.appcatalog.computeresource.SSHJobSubmission;
-import org.apache.airavata.model.appcatalog.computeresource.SecurityProtocol;
-import org.apache.airavata.model.messaging.event.TaskIdentifier;
-import org.apache.airavata.model.messaging.event.TaskOutputChangeEvent;
-import org.apache.airavata.model.workspace.experiment.*;
+import org.apache.airavata.model.appcatalog.appinterface.DataType;
+import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
+import org.apache.airavata.model.workspace.experiment.CorrectiveAction;
+import org.apache.airavata.model.workspace.experiment.DataTransferDetails;
+import org.apache.airavata.model.workspace.experiment.ErrorCategory;
+import org.apache.airavata.model.workspace.experiment.TaskDetails;
+import org.apache.airavata.model.workspace.experiment.TransferState;
+import org.apache.airavata.model.workspace.experiment.TransferStatus;
 import org.apache.airavata.registry.cpi.ChildDataType;
-import org.apache.airavata.registry.cpi.RegistryModelType;
-import org.apache.airavata.registry.cpi.RegistryException;
-import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
-import org.apache.airavata.schemas.gfac.GsisshHostType;
-import org.apache.xmlbeans.XmlException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Set;
+
 public class GSISSHOutputHandler extends AbstractRecoverableHandler {
     private static final Logger log = LoggerFactory.getLogger(GSISSHOutputHandler.class);
 
@@ -182,12 +171,12 @@ public class GSISSHOutputHandler extends AbstractRecoverableHandler {
             registry.add(ChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
 
             //todo this is a mess we have to fix this
-            List<DataObjectType> outputArray = new ArrayList<DataObjectType>();
+            List<OutputDataObjectType> outputArray = new ArrayList<OutputDataObjectType>();
             Map<String, Object> output = jobExecutionContext.getOutMessageContext().getParameters();
             Set<String> keys = output.keySet();
             for (String paramName : keys) {
-                ActualParameter actualParameter = (ActualParameter) output.get(paramName);
-                if ("URI".equals(actualParameter.getType().getType().toString())) {
+                OutputDataObjectType outputDataObjectType = (OutputDataObjectType) output.get(paramName);
+                if (DataType.URI == outputDataObjectType.getType()) {
 
                     List<String> outputList = null;
                     int retry=3;
@@ -210,9 +199,9 @@ public class GSISSHOutputHandler extends AbstractRecoverableHandler {
                         Set<String> strings = output.keySet();
                         outputArray.clear();
                         for (String key : strings) {
-                            ActualParameter actualParameter1 = (ActualParameter) output.get(key);
-                            if ("URI".equals(actualParameter1.getType().getType().toString())) {
-                                String downloadFile = MappingFactory.toString(actualParameter1);
+                            OutputDataObjectType outputDataObjectType1 = (OutputDataObjectType) output.get(key);
+                            if (DataType.URI == outputDataObjectType1.getType()) {
+                                String downloadFile = outputDataObjectType1.getValue();
                                 String localFile;
                                 if (index < oldIndex) {
                                     localFile = oldFiles.get(index);
@@ -225,10 +214,10 @@ public class GSISSHOutputHandler extends AbstractRecoverableHandler {
                                     GFacUtils.savePluginData(jobExecutionContext, temp.insert(0, ++index), this.getClass().getName());
                                 }
                                 jobExecutionContext.addOutputFile(localFile);
-                                MappingFactory.fromString(actualParameter1, localFile);
-                                DataObjectType dataObjectType = new DataObjectType();
+                                outputDataObjectType1.setValue(localFile);
+                                OutputDataObjectType dataObjectType = new OutputDataObjectType();
                                 dataObjectType.setValue(localFile);
-                                dataObjectType.setKey(key);
+                                dataObjectType.setName(key);
                                 dataObjectType.setType(DataType.URI);
                                 outputArray.add(dataObjectType);
                             }
@@ -248,10 +237,10 @@ public class GSISSHOutputHandler extends AbstractRecoverableHandler {
                             GFacUtils.savePluginData(jobExecutionContext, temp.insert(0, ++index), this.getClass().getName());
                         }
                         jobExecutionContext.addOutputFile(outputFile);
-                        MappingFactory.fromString(actualParameter, outputFile);
-                        DataObjectType dataObjectType = new DataObjectType();
+                        outputDataObjectType.setValue(outputFile);
+                        OutputDataObjectType dataObjectType  = new OutputDataObjectType();
                         dataObjectType.setValue(valueList);
-                        dataObjectType.setKey(paramName);
+                        dataObjectType.setName(paramName);
                         dataObjectType.setType(DataType.URI);
                         outputArray.add(dataObjectType);
                     }

http://git-wip-us.apache.org/repos/asf/airavata/blob/b52499eb/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/local/provider/impl/LocalProvider.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/local/provider/impl/LocalProvider.java b/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/local/provider/impl/LocalProvider.java
index 4cdd0c0..e28726d 100644
--- a/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/local/provider/impl/LocalProvider.java
+++ b/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/local/provider/impl/LocalProvider.java
@@ -39,18 +39,16 @@ import org.apache.airavata.gfac.local.utils.InputStreamToFileWriter;
 import org.apache.airavata.gfac.local.utils.InputUtils;
 import org.apache.airavata.model.appcatalog.appdeployment.ApplicationDeploymentDescription;
 import org.apache.airavata.model.appcatalog.appdeployment.SetEnvPaths;
+import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
 import org.apache.airavata.model.messaging.event.JobIdentifier;
 import org.apache.airavata.model.messaging.event.JobStatusChangeEvent;
 import org.apache.airavata.model.messaging.event.TaskIdentifier;
 import org.apache.airavata.model.messaging.event.TaskOutputChangeEvent;
-import org.apache.airavata.model.workspace.experiment.DataObjectType;
 import org.apache.airavata.model.workspace.experiment.JobDetails;
 import org.apache.airavata.model.workspace.experiment.JobState;
 import org.apache.airavata.model.workspace.experiment.TaskDetails;
 import org.apache.airavata.registry.cpi.ChildDataType;
 import org.apache.airavata.registry.cpi.RegistryModelType;
-import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
-import org.apache.airavata.schemas.gfac.NameValuePairType;
 import org.apache.xmlbeans.XmlException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -219,7 +217,7 @@ public class LocalProvider extends AbstractProvider {
 
     public void dispose(JobExecutionContext jobExecutionContext) throws GFacProviderException {
         try {
-        	List<DataObjectType> outputArray = new ArrayList<DataObjectType>();
+        	List<OutputDataObjectType> outputArray = new ArrayList<OutputDataObjectType>();
             String stdOutStr = GFacUtils.readFileToString(jobExecutionContext.getStandardOutput());
             String stdErrStr = GFacUtils.readFileToString(jobExecutionContext.getStandardError());
 			Map<String, Object> output = jobExecutionContext.getOutMessageContext().getParameters();

http://git-wip-us.apache.org/repos/asf/airavata/blob/b52499eb/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPOutputHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPOutputHandler.java b/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPOutputHandler.java
index f34b82a..e46dfa5 100644
--- a/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPOutputHandler.java
+++ b/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPOutputHandler.java
@@ -28,21 +28,18 @@ import org.apache.airavata.gfac.core.context.JobExecutionContext;
 import org.apache.airavata.gfac.core.handler.AbstractHandler;
 import org.apache.airavata.gfac.core.handler.GFacHandlerException;
 import org.apache.airavata.gfac.core.utils.GFacUtils;
-import org.apache.airavata.gfac.ssh.context.SSHAuthWrapper;
 import org.apache.airavata.gfac.ssh.security.SSHSecurityContext;
 import org.apache.airavata.gfac.ssh.util.GFACSSHUtils;
 import org.apache.airavata.gsi.ssh.api.Cluster;
 import org.apache.airavata.gsi.ssh.api.SSHApiException;
-import org.apache.airavata.gsi.ssh.api.ServerInfo;
 import org.apache.airavata.gsi.ssh.api.authentication.AuthenticationInfo;
 import org.apache.airavata.gsi.ssh.impl.authentication.DefaultPasswordAuthenticationInfo;
 import org.apache.airavata.gsi.ssh.impl.authentication.DefaultPublicKeyFileAuthentication;
+import org.apache.airavata.model.appcatalog.appinterface.DataType;
+import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
 import org.apache.airavata.model.workspace.experiment.CorrectiveAction;
-import org.apache.airavata.model.workspace.experiment.DataObjectType;
-import org.apache.airavata.model.workspace.experiment.DataType;
 import org.apache.airavata.model.workspace.experiment.ErrorCategory;
 import org.apache.airavata.registry.cpi.ChildDataType;
-import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -147,7 +144,7 @@ public class AdvancedSCPOutputHandler extends AbstractHandler {
             }
             pbsCluster.scpTo(outputPath, standardError);
             pbsCluster.scpTo(outputPath, standardOutput);
-            List<DataObjectType> outputArray = new ArrayList<DataObjectType>();
+            List<OutputDataObjectType> outputArray = new ArrayList<OutputDataObjectType>();
             Map<String, Object> output = jobExecutionContext.getOutMessageContext().getParameters();
             Set<String> keys = output.keySet();
             for (String paramName : keys) {
@@ -160,9 +157,9 @@ public class AdvancedSCPOutputHandler extends AbstractHandler {
                 	}
                 	pbsCluster.scpTo(outputPath, downloadFile);
                     String fileName = downloadFile.substring(downloadFile.lastIndexOf(File.separatorChar)+1, downloadFile.length());
-                    DataObjectType dataObjectType = new DataObjectType();
+                    OutputDataObjectType dataObjectType = new OutputDataObjectType();
                     dataObjectType.setValue(outputPath + File.separatorChar + fileName);
-                    dataObjectType.setKey(paramName);
+                    dataObjectType.setName(paramName);
                     dataObjectType.setType(DataType.URI);
                     outputArray.add(dataObjectType);
                 }

http://git-wip-us.apache.org/repos/asf/airavata/blob/b52499eb/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHOutputHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHOutputHandler.java b/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHOutputHandler.java
index d80e92b..94f667e 100644
--- a/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHOutputHandler.java
+++ b/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHOutputHandler.java
@@ -20,20 +20,6 @@
 */
 package org.apache.airavata.gfac.ssh.handler;
 
-import java.io.File;
-import java.io.IOException;
-import java.util.*;
-
-import net.schmizz.sshj.connection.ConnectionException;
-import net.schmizz.sshj.transport.TransportException;
-
-import org.airavata.appcatalog.cpi.AppCatalog;
-import org.apache.aiaravata.application.catalog.data.impl.AppCatalogFactory;
-import org.apache.airavata.common.exception.ApplicationSettingsException;
-import org.apache.airavata.common.utils.Constants;
-import org.apache.airavata.commons.gfac.type.ActualParameter;
-import org.apache.airavata.commons.gfac.type.ApplicationDescription;
-import org.apache.airavata.commons.gfac.type.MappingFactory;
 import org.apache.airavata.gfac.GFacException;
 import org.apache.airavata.gfac.core.context.JobExecutionContext;
 import org.apache.airavata.gfac.core.handler.AbstractHandler;
@@ -44,21 +30,25 @@ import org.apache.airavata.gfac.core.utils.OutputUtils;
 import org.apache.airavata.gfac.ssh.security.SSHSecurityContext;
 import org.apache.airavata.gfac.ssh.util.GFACSSHUtils;
 import org.apache.airavata.gsi.ssh.api.Cluster;
-import org.apache.airavata.gsi.ssh.api.SSHApiException;
-import org.apache.airavata.gsi.ssh.api.job.JobDescriptor;
-import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol;
-import org.apache.airavata.model.appcatalog.computeresource.SSHJobSubmission;
-import org.apache.airavata.model.appcatalog.computeresource.SecurityProtocol;
-import org.apache.airavata.model.workspace.experiment.*;
+import org.apache.airavata.model.appcatalog.appinterface.DataType;
+import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
+import org.apache.airavata.model.workspace.experiment.CorrectiveAction;
+import org.apache.airavata.model.workspace.experiment.DataTransferDetails;
+import org.apache.airavata.model.workspace.experiment.ErrorCategory;
+import org.apache.airavata.model.workspace.experiment.TaskDetails;
+import org.apache.airavata.model.workspace.experiment.TransferState;
+import org.apache.airavata.model.workspace.experiment.TransferStatus;
 import org.apache.airavata.registry.cpi.ChildDataType;
-import org.apache.airavata.registry.cpi.RegistryModelType;
-import org.apache.airavata.registry.cpi.RegistryException;
-import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
-import org.apache.airavata.schemas.gfac.GsisshHostType;
-import org.apache.xmlbeans.XmlException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.io.File;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Set;
+
 public class SSHOutputHandler extends AbstractHandler {
     private static final Logger log = LoggerFactory.getLogger(SSHOutputHandler.class);
 
@@ -141,12 +131,12 @@ public class SSHOutputHandler extends AbstractHandler {
             registry.add(ChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
 
 
-            List<DataObjectType> outputArray = new ArrayList<DataObjectType>();
+            List<OutputDataObjectType> outputArray = new ArrayList<OutputDataObjectType>();
             Map<String, Object> output = jobExecutionContext.getOutMessageContext().getParameters();
             Set<String> keys = output.keySet();
             for (String paramName : keys) {
-                ActualParameter actualParameter = (ActualParameter) output.get(paramName);
-                if ("URI".equals(actualParameter.getType().getType().toString())) {
+                OutputDataObjectType actualParameter = (OutputDataObjectType) output.get(paramName);
+                if (DataType.URI == actualParameter.getType()) {
                     List<String> outputList = null;
                     int retry = 3;
                     while (retry > 0) {
@@ -163,17 +153,17 @@ public class SSHOutputHandler extends AbstractHandler {
                         Set<String> strings = output.keySet();
                         outputArray.clear();
                         for (String key : strings) {
-                            ActualParameter actualParameter1 = (ActualParameter) output.get(key);
-                            if ("URI".equals(actualParameter1.getType().getType().toString())) {
-                                String downloadFile = MappingFactory.toString(actualParameter1);
+                            OutputDataObjectType actualParameter1 = (OutputDataObjectType) output.get(key);
+                            if (DataType.URI == actualParameter1.getType()) {
+                                String downloadFile = actualParameter1.getValue();
                                 cluster.scpFrom(downloadFile, outputDataDir);
                                 String fileName = downloadFile.substring(downloadFile.lastIndexOf(File.separatorChar) + 1, downloadFile.length());
                                 String localFile = outputDataDir + File.separator + fileName;
                                 jobExecutionContext.addOutputFile(localFile);
-                                MappingFactory.fromString(actualParameter1, localFile);
-                                DataObjectType dataObjectType = new DataObjectType();
+                                actualParameter1.setValue(localFile);
+                                OutputDataObjectType dataObjectType = new OutputDataObjectType();
                                 dataObjectType.setValue(localFile);
-                                dataObjectType.setKey(key);
+                                dataObjectType.setName(key);
                                 dataObjectType.setType(DataType.URI);
                                 outputArray.add(dataObjectType);
                             }
@@ -185,10 +175,10 @@ public class SSHOutputHandler extends AbstractHandler {
                         cluster.scpFrom(jobExecutionContext.getOutputDir() + File.separator + valueList, outputDataDir);
                         String outputPath = outputDataDir + File.separator + valueList;
                         jobExecutionContext.addOutputFile(outputPath);
-                        MappingFactory.fromString(actualParameter, outputPath);
-                        DataObjectType dataObjectType = new DataObjectType();
+                        actualParameter.setValue(outputPath);
+                        OutputDataObjectType dataObjectType = new OutputDataObjectType();
                         dataObjectType.setValue(outputPath);
-                        dataObjectType.setKey(paramName);
+                        dataObjectType.setName(paramName);
                         dataObjectType.setType(DataType.URI);
                         outputArray.add(dataObjectType);
                     }


[03/50] [abbrv] airavata git commit: adding BES provider changes

Posted by ch...@apache.org.
adding BES provider changes


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/04f09e7d
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/04f09e7d
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/04f09e7d

Branch: refs/heads/master
Commit: 04f09e7d424ee94d514c6423cd5f524eb7d53181
Parents: f29dfbe
Author: chathuriw <ka...@gmail.com>
Authored: Fri Oct 31 14:40:50 2014 -0400
Committer: Chathuri Wimalasena <ka...@gmail.com>
Committed: Fri Oct 31 14:40:50 2014 -0400

----------------------------------------------------------------------
 .../gfac/bes/handlers/AbstractSMSHandler.java   |  74 ++--
 .../gfac/bes/provider/impl/BESProvider.java     | 378 +++++++++----------
 .../bes/security/UNICORESecurityContext.java    |   4 +-
 .../gfac/bes/utils/ApplicationProcessor.java    | 212 ++++-------
 .../airavata/gfac/core/utils/GFacUtils.java     |  23 +-
 .../apache/airavata/gfac/ec2/EC2Provider.java   |  15 +-
 6 files changed, 306 insertions(+), 400 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/04f09e7d/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/bes/handlers/AbstractSMSHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/bes/handlers/AbstractSMSHandler.java b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/bes/handlers/AbstractSMSHandler.java
index 8f6fcf4..71ca0db 100644
--- a/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/bes/handlers/AbstractSMSHandler.java
+++ b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/bes/handlers/AbstractSMSHandler.java
@@ -2,6 +2,7 @@ package org.apache.airavata.gfac.bes.handlers;
 
 import java.util.Properties;
 
+import org.airavata.appcatalog.cpi.AppCatalogException;
 import org.apache.airavata.gfac.GFacException;
 import org.apache.airavata.gfac.bes.security.UNICORESecurityContext;
 import org.apache.airavata.gfac.bes.security.X509SecurityContext;
@@ -13,6 +14,7 @@ import org.apache.airavata.gfac.core.context.JobExecutionContext;
 import org.apache.airavata.gfac.core.handler.GFacHandler;
 import org.apache.airavata.gfac.core.handler.GFacHandlerException;
 import org.apache.airavata.gfac.core.utils.GFacUtils;
+import org.apache.airavata.model.appcatalog.computeresource.*;
 import org.apache.airavata.model.workspace.experiment.CorrectiveAction;
 import org.apache.airavata.model.workspace.experiment.ErrorCategory;
 import org.apache.airavata.schemas.gfac.JobDirectoryModeDocument.JobDirectoryMode;
@@ -43,42 +45,42 @@ public abstract class AbstractSMSHandler implements BESConstants, GFacHandler{
 	@Override
 	public void invoke(JobExecutionContext jobExecutionContext)
 			throws GFacHandlerException {
-		
-		// if not SMS then not to pass further
-//		if(!isSMSEnabled(jobExecutionContext)) return;
-		
-		initSecurityProperties(jobExecutionContext);
-		
+		try {
+            initSecurityProperties(jobExecutionContext);
+            JobSubmissionInterface preferredJobSubmissionInterface = jobExecutionContext.getPreferredJobSubmissionInterface();
+            JobSubmissionProtocol protocol = preferredJobSubmissionInterface.getJobSubmissionProtocol();
+            String interfaceId = preferredJobSubmissionInterface.getJobSubmissionInterfaceId();
+            String factoryUrl = null;
+            if (protocol.equals(JobSubmissionProtocol.UNICORE)) {
+                    UnicoreJobSubmission unicoreJobSubmission = GFacUtils.getUnicoreJobSubmission(interfaceId);
+                    factoryUrl = unicoreJobSubmission.getUnicoreEndPointURL();
+            }
+            storageClient = null;
 
-		
-		UnicoreHostType host = (UnicoreHostType) jobExecutionContext.getApplicationContext().getHostDescription()
-                .getType();
-        String factoryUrl = host.getUnicoreBESEndPointArray()[0];
-        
-        storageClient = null;
-        
-        if(!isSMSInstanceExisting(jobExecutionContext)) {
-            EndpointReferenceType eprt = EndpointReferenceType.Factory.newInstance();
-            eprt.addNewAddress().setStringValue(factoryUrl);
-            StorageCreator storageCreator = new StorageCreator(secProperties, factoryUrl, 5, null);
-            try {
-                storageClient = storageCreator.createStorage();
-            } catch (Exception e2) {
-                log.error("Cannot create storage..");
-                throw new GFacHandlerException("Cannot create storage..", e2);
+            if (!isSMSInstanceExisting(jobExecutionContext)) {
+                EndpointReferenceType eprt = EndpointReferenceType.Factory.newInstance();
+                eprt.addNewAddress().setStringValue(factoryUrl);
+                StorageCreator storageCreator = new StorageCreator(secProperties, factoryUrl, 5, null);
+                try {
+                    storageClient = storageCreator.createStorage();
+                } catch (Exception e2) {
+                    log.error("Cannot create storage..");
+                    throw new GFacHandlerException("Cannot create storage..", e2);
+                }
+                jobExecutionContext.setProperty(PROP_SMS_EPR, storageClient.getEPR());
+            } else {
+                EndpointReferenceType eprt = (EndpointReferenceType) jobExecutionContext.getProperty(PROP_SMS_EPR);
+                try {
+                    storageClient = new StorageClient(eprt, secProperties);
+                } catch (Exception e) {
+                    throw new GFacHandlerException("Cannot create storage..", e);
+                }
             }
-            jobExecutionContext.setProperty(PROP_SMS_EPR, storageClient.getEPR());
-        }
-        else {
-        	EndpointReferenceType eprt = (EndpointReferenceType)jobExecutionContext.getProperty(PROP_SMS_EPR);
-        		try {
-					storageClient = new StorageClient(eprt, secProperties);
-				} catch (Exception e) {
-					throw new GFacHandlerException("Cannot create storage..", e);
-				}
+            dataTransferrer = new DataTransferrer(jobExecutionContext, storageClient);
+        } catch (AppCatalogException e) {
+            throw new GFacHandlerException("Error occurred while retrieving unicore job submission interface..", e);
         }
-        dataTransferrer = new DataTransferrer(jobExecutionContext, storageClient);
-	}
+    }
 	
 	protected void initSecurityProperties(JobExecutionContext jobExecutionContext) throws GFacHandlerException{
 		log.debug("Initializing SMSInHandler security properties ..");
@@ -136,9 +138,9 @@ public abstract class AbstractSMSHandler implements BESConstants, GFacHandler{
 	 * of the job execution context.
 	 * */
 	protected boolean isSMSEnabled(JobExecutionContext jobExecutionContext){
-		if(((UnicoreHostType)jobExecutionContext.getApplicationContext().getHostDescription().getType()).getJobDirectoryMode() == JobDirectoryMode.SMS_BYTE_IO) {
-			return true;
-		}
+//		if(((UnicoreHostType)jobExecutionContext.getApplicationContext().getHostDescription().getType()).getJobDirectoryMode() == JobDirectoryMode.SMS_BYTE_IO) {
+//			return true;
+//		}
 		return false;
 	}
 	

http://git-wip-us.apache.org/repos/asf/airavata/blob/04f09e7d/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/bes/provider/impl/BESProvider.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/bes/provider/impl/BESProvider.java b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/bes/provider/impl/BESProvider.java
index 7ed038a..398f05c 100644
--- a/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/bes/provider/impl/BESProvider.java
+++ b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/bes/provider/impl/BESProvider.java
@@ -23,6 +23,7 @@ package org.apache.airavata.gfac.bes.provider.impl;
 import java.util.Calendar;
 import java.util.Map;
 
+import org.airavata.appcatalog.cpi.AppCatalogException;
 import org.apache.airavata.common.exception.ApplicationSettingsException;
 import org.apache.airavata.gfac.GFacException;
 import org.apache.airavata.gfac.bes.security.UNICORESecurityContext;
@@ -40,6 +41,9 @@ import org.apache.airavata.gfac.core.provider.AbstractProvider;
 import org.apache.airavata.gfac.core.provider.GFacProvider;
 import org.apache.airavata.gfac.core.provider.GFacProviderException;
 import org.apache.airavata.gfac.core.utils.GFacUtils;
+import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionInterface;
+import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol;
+import org.apache.airavata.model.appcatalog.computeresource.UnicoreJobSubmission;
 import org.apache.airavata.model.workspace.experiment.JobDetails;
 import org.apache.airavata.model.workspace.experiment.JobState;
 import org.apache.airavata.schemas.gfac.UnicoreHostType;
@@ -101,209 +105,165 @@ public class BESProvider extends AbstractProvider implements GFacProvider,
 
 	public void execute(JobExecutionContext jobExecutionContext)
 			throws GFacProviderException, GFacException {
-		UnicoreHostType host = (UnicoreHostType) jobExecutionContext
-				.getApplicationContext().getHostDescription().getType();
-
-		String factoryUrl = host.getUnicoreBESEndPointArray()[0];
-
-		EndpointReferenceType eprt = EndpointReferenceType.Factory
-				.newInstance();
-		eprt.addNewAddress().setStringValue(factoryUrl);
-
-		// WSUtilities.addServerIdentity(eprt, serverDN);
-
-		String userDN = getUserName(jobExecutionContext);
-
-		// TODO: to be removed
-		if (userDN == null || userDN.equalsIgnoreCase("admin")) {
-			userDN = "CN=zdv575, O=Ultrascan Gateway, C=DE";
-		}
-
-		StorageClient sc = null;
-
-		try {
-
-			CreateActivityDocument cad = CreateActivityDocument.Factory
-					.newInstance();
-			JobDefinitionDocument jobDefDoc = JobDefinitionDocument.Factory
-					.newInstance();
-
-//			String xlogin = getCNFromUserDN(userDN);
-
-			// create storage
-			StorageCreator storageCreator = new StorageCreator(secProperties,
-					factoryUrl, 5, null);
-
-			try {
-				sc = storageCreator.createStorage();
-			} catch (Exception e2) {
-				log.error("Cannot create storage..");
-				throw new GFacProviderException("Cannot create storage..", e2);
-			}
-
-			JobDefinitionType jobDefinition = jobDefDoc.addNewJobDefinition();
-			try {
-				jobDefinition = JSDLGenerator.buildJSDLInstance(
-						jobExecutionContext, sc.getUrl()).getJobDefinition();
-				cad.addNewCreateActivity().addNewActivityDocument()
-						.setJobDefinition(jobDefinition);
-				log.info("JSDL" + jobDefDoc.toString());
-			} catch (Exception e1) {
-				throw new GFacProviderException(
-						"Cannot generate JSDL instance from the JobExecutionContext.",
-						e1);
-			}
-
-			// upload files if any
-			DataTransferrer dt = new DataTransferrer(jobExecutionContext, sc);
-			dt.uploadLocalFiles();
-
-			FactoryClient factory = null;
-			JobDetails jobDetails = new JobDetails();
-
-			try {
-				factory = new FactoryClient(eprt, secProperties);
-			} catch (Exception e) {
-				throw new GFacProviderException(e.getLocalizedMessage(), e);
-			}
-			CreateActivityResponseDocument response = null;
-			try {
-				log.info(String.format("Activity Submitting to %s ... \n",
-						factoryUrl));
-				jobExecutionContext.getNotifier().publish(new StartExecutionEvent());
-				response = factory.createActivity(cad);
-				log.info(String.format("Activity Submitted to %s \n", factoryUrl));
-			} catch (Exception e) {
-				throw new GFacProviderException("Cannot create activity.", e);
-			}
-			EndpointReferenceType activityEpr = response.getCreateActivityResponse().getActivityIdentifier();
-
-			log.info("Activity : " + activityEpr.getAddress().getStringValue()	+ " Submitted.");
-
-			// factory.waitWhileActivityIsDone(activityEpr, 1000);
-			jobId = WSUtilities.extractResourceID(activityEpr);
-			if (jobId == null) {
-				jobId = new Long(Calendar.getInstance().getTimeInMillis())
-						.toString();
-			}
-			log.info("JobID: " + jobId);
-			jobDetails.setJobID(activityEpr.toString());
-			jobDetails.setJobDescription(activityEpr.toString());
-
-			jobExecutionContext.setJobDetails(jobDetails);
-			try {
-			log.info(formatStatusMessage(activityEpr.getAddress()
-					.getStringValue(), factory.getActivityStatus(activityEpr)
-					.toString()));
-
-			jobExecutionContext.getNotifier().publish(new UnicoreJobIDEvent(jobId));
-			GFacUtils.saveJobStatus(jobExecutionContext, details,JobState.SUBMITTED);
-
-			factory.getActivityStatus(activityEpr);
-			log.info(formatStatusMessage(activityEpr.getAddress()
-					.getStringValue(), factory.getActivityStatus(activityEpr)
-					.toString()));
-
-			// TODO publish the status messages to the message bus
-			while ((factory.getActivityStatus(activityEpr) != ActivityStateEnumeration.FINISHED)
-					&& (factory.getActivityStatus(activityEpr) != ActivityStateEnumeration.FAILED)
-					&& (factory.getActivityStatus(activityEpr) != ActivityStateEnumeration.CANCELLED)) {
-
-				ActivityStatusType activityStatus = null;
-				try {
-					activityStatus = getStatus(factory, activityEpr);
-					JobState applicationJobStatus = getApplicationJobStatus(activityStatus);
-					String jobStatusMessage = "Status of job " + jobId + "is "
-							+ applicationJobStatus;
-					GFacUtils.updateJobStatus(jobExecutionContext, jobDetails,
-							applicationJobStatus);
-
-					jobExecutionContext.getNotifier().publish(
-							new StatusChangeEvent(jobStatusMessage));
-
-					// GFacUtils.updateApplicationJobStatus(jobExecutionContext,jobId,
-					// applicationJobStatus);
-				} catch (UnknownActivityIdentifierFault e) {
-					throw new GFacProviderException(e.getMessage(),
-							e.getCause());
-				}
-
-				try {
-					Thread.sleep(5000);
-				} catch (InterruptedException e) {
-				}
-				continue;
-			}
-			}catch(Exception e) {
-				throw new GFacProviderException(e.getMessage(),
-						e.getCause());
-				
-			}
-			
-			ActivityStatusType activityStatus = null;
-			try {
-				activityStatus = getStatus(factory, activityEpr);
-				log.info(formatStatusMessage(activityEpr.getAddress().getStringValue(), activityStatus.getState().toString()));
-				ActivityClient activityClient;
-				activityClient = new ActivityClient(activityEpr,secProperties);
-				dt.setStorageClient(activityClient.getUspaceClient());
-			} catch (Exception e1) {
-				throw new GFacProviderException(e1.getMessage(),
-						e1.getCause());
-			}
-
-			
-
-			if ((activityStatus.getState() == ActivityStateEnumeration.FAILED)) {
-				String error = activityStatus.getFault().getFaultcode()
-						.getLocalPart()
-						+ "\n"
-						+ activityStatus.getFault().getFaultstring()
-						+ "\n EXITCODE: " + activityStatus.getExitCode();
-				log.info(error);
-				try {
-					Thread.sleep(5000);
-				} catch (InterruptedException e) {
-				}
-				dt.downloadStdOuts();
-			} else if (activityStatus.getState() == ActivityStateEnumeration.CANCELLED) {
-				JobState applicationJobStatus = JobState.CANCELED;
-				String jobStatusMessage = "Status of job " + jobId + "is "
-						+ applicationJobStatus;
-				jobExecutionContext.getNotifier().publish(
-						new StatusChangeEvent(jobStatusMessage));
-				GFacUtils.updateJobStatus(jobExecutionContext, jobDetails,
-						applicationJobStatus);
-				throw new GFacProviderException(
-						jobExecutionContext.getExperimentID() + "Job Canceled");
-			}
-
-			else if (activityStatus.getState() == ActivityStateEnumeration.FINISHED) {
-				try {
-					Thread.sleep(5000);
-				} catch (InterruptedException e) {
-				}
-				if (activityStatus.getExitCode() == 0) {
-					dt.downloadRemoteFiles();
-				} else {
-					dt.downloadStdOuts();
-				}
-			}
-
-		} finally {
-			// destroy sms instance
-			try {
-				if (sc != null) {
-					sc.destroy();
-				}
-			} catch (Exception e) {
-				log.warn(
-						"Cannot destroy temporary SMS instance:" + sc.getUrl(),
-						e);
-			}
-		}
-
-	}
+        StorageClient sc = null;
+        try {
+            JobSubmissionInterface preferredJobSubmissionInterface = jobExecutionContext.getPreferredJobSubmissionInterface();
+            JobSubmissionProtocol protocol = preferredJobSubmissionInterface.getJobSubmissionProtocol();
+            String interfaceId = preferredJobSubmissionInterface.getJobSubmissionInterfaceId();
+            String factoryUrl = null;
+            if (protocol.equals(JobSubmissionProtocol.UNICORE)) {
+                UnicoreJobSubmission unicoreJobSubmission = GFacUtils.getUnicoreJobSubmission(interfaceId);
+                factoryUrl = unicoreJobSubmission.getUnicoreEndPointURL();
+            }
+            EndpointReferenceType eprt = EndpointReferenceType.Factory
+                    .newInstance();
+            eprt.addNewAddress().setStringValue(factoryUrl);
+            String userDN = getUserName(jobExecutionContext);
+
+            // TODO: to be removed
+            if (userDN == null || userDN.equalsIgnoreCase("admin")) {
+                userDN = "CN=zdv575, O=Ultrascan Gateway, C=DE";
+            }
+            CreateActivityDocument cad = CreateActivityDocument.Factory
+                    .newInstance();
+            JobDefinitionDocument jobDefDoc = JobDefinitionDocument.Factory
+                    .newInstance();
+
+            // create storage
+            StorageCreator storageCreator = new StorageCreator(secProperties,
+                    factoryUrl, 5, null);
+            sc = storageCreator.createStorage();
+
+            JobDefinitionType jobDefinition = JSDLGenerator.buildJSDLInstance(
+                    jobExecutionContext, sc.getUrl()).getJobDefinition();
+            cad.addNewCreateActivity().addNewActivityDocument()
+                    .setJobDefinition(jobDefinition);
+            log.info("JSDL" + jobDefDoc.toString());
+
+            // upload files if any
+            DataTransferrer dt = new DataTransferrer(jobExecutionContext, sc);
+            dt.uploadLocalFiles();
+
+            JobDetails jobDetails = new JobDetails();
+            FactoryClient factory = new FactoryClient(eprt, secProperties);
+
+            log.info(String.format("Activity Submitting to %s ... \n",
+                    factoryUrl));
+            jobExecutionContext.getNotifier().publish(new StartExecutionEvent());
+            CreateActivityResponseDocument response = factory.createActivity(cad);
+            log.info(String.format("Activity Submitted to %s \n", factoryUrl));
+
+            EndpointReferenceType activityEpr = response.getCreateActivityResponse().getActivityIdentifier();
+
+            log.info("Activity : " + activityEpr.getAddress().getStringValue() + " Submitted.");
+
+            // factory.waitWhileActivityIsDone(activityEpr, 1000);
+            jobId = WSUtilities.extractResourceID(activityEpr);
+            if (jobId == null) {
+                jobId = new Long(Calendar.getInstance().getTimeInMillis())
+                        .toString();
+            }
+            log.info("JobID: " + jobId);
+            jobDetails.setJobID(activityEpr.toString());
+            jobDetails.setJobDescription(activityEpr.toString());
+
+            jobExecutionContext.setJobDetails(jobDetails);
+            log.info(formatStatusMessage(activityEpr.getAddress()
+                    .getStringValue(), factory.getActivityStatus(activityEpr)
+                    .toString()));
+
+            jobExecutionContext.getNotifier().publish(new UnicoreJobIDEvent(jobId));
+            GFacUtils.saveJobStatus(jobExecutionContext, details, JobState.SUBMITTED);
+
+            factory.getActivityStatus(activityEpr);
+            log.info(formatStatusMessage(activityEpr.getAddress()
+                    .getStringValue(), factory.getActivityStatus(activityEpr)
+                    .toString()));
+
+            // TODO publish the status messages to the message bus
+            while ((factory.getActivityStatus(activityEpr) != ActivityStateEnumeration.FINISHED)
+                    && (factory.getActivityStatus(activityEpr) != ActivityStateEnumeration.FAILED)
+                    && (factory.getActivityStatus(activityEpr) != ActivityStateEnumeration.CANCELLED)) {
+
+                ActivityStatusType activityStatus = getStatus(factory, activityEpr);
+                JobState applicationJobStatus = getApplicationJobStatus(activityStatus);
+                String jobStatusMessage = "Status of job " + jobId + "is "
+                        + applicationJobStatus;
+                GFacUtils.updateJobStatus(jobExecutionContext, jobDetails,
+                        applicationJobStatus);
+
+                jobExecutionContext.getNotifier().publish(
+                        new StatusChangeEvent(jobStatusMessage));
+
+                // GFacUtils.updateApplicationJobStatus(jobExecutionContext,jobId,
+                // applicationJobStatus);
+                try {
+                    Thread.sleep(5000);
+                } catch (InterruptedException e) {
+                }
+                continue;
+            }
+
+            ActivityStatusType activityStatus = null;
+            activityStatus = getStatus(factory, activityEpr);
+            log.info(formatStatusMessage(activityEpr.getAddress().getStringValue(), activityStatus.getState().toString()));
+            ActivityClient activityClient;
+            activityClient = new ActivityClient(activityEpr, secProperties);
+            dt.setStorageClient(activityClient.getUspaceClient());
+
+            if ((activityStatus.getState() == ActivityStateEnumeration.FAILED)) {
+                String error = activityStatus.getFault().getFaultcode()
+                        .getLocalPart()
+                        + "\n"
+                        + activityStatus.getFault().getFaultstring()
+                        + "\n EXITCODE: " + activityStatus.getExitCode();
+                log.info(error);
+                try {
+                    Thread.sleep(5000);
+                } catch (InterruptedException e) {
+                }
+                dt.downloadStdOuts();
+            } else if (activityStatus.getState() == ActivityStateEnumeration.CANCELLED) {
+                JobState applicationJobStatus = JobState.CANCELED;
+                String jobStatusMessage = "Status of job " + jobId + "is "
+                        + applicationJobStatus;
+                jobExecutionContext.getNotifier().publish(
+                        new StatusChangeEvent(jobStatusMessage));
+                GFacUtils.updateJobStatus(jobExecutionContext, jobDetails,
+                        applicationJobStatus);
+                throw new GFacProviderException(
+                        jobExecutionContext.getExperimentID() + "Job Canceled");
+            } else if (activityStatus.getState() == ActivityStateEnumeration.FINISHED) {
+                try {
+                    Thread.sleep(5000);
+                } catch (InterruptedException e) {
+                }
+                if (activityStatus.getExitCode() == 0) {
+                    dt.downloadRemoteFiles();
+                } else {
+                    dt.downloadStdOuts();
+                }
+            }
+        } catch (AppCatalogException e) {
+            log.error("Error while retrieving UNICORE job submission..");
+            throw new GFacProviderException("Error while retrieving UNICORE job submission..", e);
+        } catch (Exception e) {
+            log.error("Cannot create storage..");
+            throw new GFacProviderException("Cannot create storage..", e);
+        } finally {
+            // destroy sms instance
+            try {
+                if (sc != null) {
+                    sc.destroy();
+                }
+            } catch (Exception e) {
+                log.warn(
+                        "Cannot destroy temporary SMS instance:" + sc.getUrl(),
+                        e);
+            }
+        }
+
+    }
 
 	private JobState getApplicationJobStatus(ActivityStatusType activityStatus) {
 		if (activityStatus == null) {
@@ -368,10 +328,14 @@ public class BESProvider extends AbstractProvider implements GFacProvider,
 			// initSecurityProperties(jobExecutionContext);
 			EndpointReferenceType eprt = EndpointReferenceType.Factory
 					.parse(activityEpr);
-			UnicoreHostType host = (UnicoreHostType) jobExecutionContext
-					.getApplicationContext().getHostDescription().getType();
-
-			String factoryUrl = host.getUnicoreBESEndPointArray()[0];
+            JobSubmissionInterface preferredJobSubmissionInterface = jobExecutionContext.getPreferredJobSubmissionInterface();
+            JobSubmissionProtocol protocol = preferredJobSubmissionInterface.getJobSubmissionProtocol();
+            String interfaceId = preferredJobSubmissionInterface.getJobSubmissionInterfaceId();
+            String factoryUrl = null;
+            if (protocol.equals(JobSubmissionProtocol.UNICORE)) {
+                UnicoreJobSubmission unicoreJobSubmission = GFacUtils.getUnicoreJobSubmission(interfaceId);
+                factoryUrl = unicoreJobSubmission.getUnicoreEndPointURL();
+            }
 			EndpointReferenceType epr = EndpointReferenceType.Factory
 					.newInstance();
 			epr.addNewAddress().setStringValue(factoryUrl);

http://git-wip-us.apache.org/repos/asf/airavata/blob/04f09e7d/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/bes/security/UNICORESecurityContext.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/bes/security/UNICORESecurityContext.java b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/bes/security/UNICORESecurityContext.java
index 7285c2c..855335f 100644
--- a/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/bes/security/UNICORESecurityContext.java
+++ b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/bes/security/UNICORESecurityContext.java
@@ -38,7 +38,7 @@ public class UNICORESecurityContext extends X509SecurityContext {
 	 * @return an instance of the default client configuration
 	 * @throws GFacException
 	 * @throws ApplicationSettingsException 
-	 * @throws GFacProviderException
+	 * @throws GFacException, ApplicationSettingsException
 	 */
 	public DefaultClientConfiguration getDefaultConfiguration() throws GFacException, ApplicationSettingsException {
 		try{
@@ -69,7 +69,7 @@ public class UNICORESecurityContext extends X509SecurityContext {
 	 * @param caKeyPath
 	 * @param caKeyPwd
 	 * @return
-	 * @throws GFacProviderException
+	 * @throws GFacException
 	 */
 	public DefaultClientConfiguration getServerSignedConfiguration(String userID, String userDN, String caCertPath, String caKeyPath, String caKeyPwd) throws GFacException {
 		try {

http://git-wip-us.apache.org/repos/asf/airavata/blob/04f09e7d/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/bes/utils/ApplicationProcessor.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/bes/utils/ApplicationProcessor.java b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/bes/utils/ApplicationProcessor.java
index d624340..ee58565 100644
--- a/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/bes/utils/ApplicationProcessor.java
+++ b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/bes/utils/ApplicationProcessor.java
@@ -22,21 +22,18 @@
 package org.apache.airavata.gfac.bes.utils;
 
 import org.apache.airavata.gfac.core.context.JobExecutionContext;
+import org.apache.airavata.model.appcatalog.appdeployment.ApplicationDeploymentDescription;
+import org.apache.airavata.model.appcatalog.appdeployment.ApplicationParallelismType;
 import org.apache.airavata.schemas.gfac.ExtendedKeyValueType;
 import org.apache.airavata.schemas.gfac.HpcApplicationDeploymentType;
-import org.apache.airavata.schemas.gfac.JobTypeType;
-import org.apache.airavata.schemas.gfac.NameValuePairType;
 import org.ggf.schemas.jsdl.x2005.x11.jsdl.ApplicationType;
 import org.ggf.schemas.jsdl.x2005.x11.jsdl.JobDefinitionType;
-import org.ggf.schemas.jsdl.x2005.x11.jsdlPosix.EnvironmentType;
 import org.ggf.schemas.jsdl.x2005.x11.jsdlPosix.FileNameType;
 import org.ggf.schemas.jsdl.x2005.x11.jsdlPosix.UserNameType;
 import org.ogf.schemas.jsdl.x2007.x02.jsdlSpmd.NumberOfProcessesType;
 import org.ogf.schemas.jsdl.x2007.x02.jsdlSpmd.ProcessesPerHostType;
 import org.ogf.schemas.jsdl.x2007.x02.jsdlSpmd.ThreadsPerProcessType;
 
-import java.io.File;
-
 
 public class ApplicationProcessor {
 	
@@ -47,40 +44,50 @@ public class ApplicationProcessor {
 			userName = "CN=zdv575, O=Ultrascan Gateway, C=DE";
 		}
 		
-		HpcApplicationDeploymentType appDepType = (HpcApplicationDeploymentType) context
-				.getApplicationContext().getApplicationDeploymentDescription()
-				.getType();
-		
-		createGenericApplication(value, appDepType);
-		
-		if (appDepType.getApplicationEnvironmentArray().length > 0) {
-			createApplicationEnvironment(value,
-					appDepType.getApplicationEnvironmentArray(), appDepType);
-		}
+		ApplicationDeploymentDescription appDep= context.getApplicationContext().getApplicationDeploymentDescription();
+        String appname = context.getApplicationContext().getApplicationInterfaceDescription().getApplicationName();
+        ApplicationParallelismType parallelism = appDep.getParallelism();
 
-		
-		if (appDepType.getExecutableLocation() != null) {
+        ApplicationType appType = JSDLUtils.getOrCreateApplication(value);
+        appType.setApplicationName(appname);
+        JSDLUtils.getOrCreateJobIdentification(value).setJobName(appname);
+
+//		if (appDep.getSetEnvironment().size() > 0) {
+//            createApplicationEnvironment(value, appDep.getSetEnvironment(), parallelism);
+//		}
+//
+        String stdout = context.getStandardOutput();
+        String stderr = context.getStandardError();
+        if (appDep.getExecutablePath() != null) {
 			FileNameType fNameType = FileNameType.Factory.newInstance();
-			fNameType.setStringValue(appDepType.getExecutableLocation());
-			if(isParallelJob(appDepType)) {
+			fNameType.setStringValue(appDep.getExecutablePath());
+			if(parallelism.equals(ApplicationParallelismType.MPI) || parallelism.equals(ApplicationParallelismType.OPENMP_MPI)) {
 				JSDLUtils.getOrCreateSPMDApplication(value).setExecutable(fNameType);
-				JSDLUtils.getSPMDApplication(value).setSPMDVariation(getSPMDVariation(appDepType));
-				
-				if(getValueFromMap(appDepType, JSDLUtils.NUMBEROFPROCESSES)!=null){
+                if (parallelism.equals(ApplicationParallelismType.OPENMP_MPI)){
+                    JSDLUtils.getSPMDApplication(value).setSPMDVariation(SPMDVariations.OpenMPI.value());
+                }else if (parallelism.equals(ApplicationParallelismType.MPI)){
+                    JSDLUtils.getSPMDApplication(value).setSPMDVariation(SPMDVariations.MPI.value());
+                }
+
+                int totalCPUCount = context.getTaskData().getTaskScheduling().getTotalCPUCount();
+                if(totalCPUCount > 0){
 					NumberOfProcessesType num = NumberOfProcessesType.Factory.newInstance();
-					num.setStringValue(getValueFromMap(appDepType, JSDLUtils.NUMBEROFPROCESSES));
+                    num.setStringValue(String.valueOf(totalCPUCount));
 					JSDLUtils.getSPMDApplication(value).setNumberOfProcesses(num);
 				}
-							
-				if(getValueFromMap(appDepType, JSDLUtils.PROCESSESPERHOST)!=null){
-					ProcessesPerHostType pph = ProcessesPerHostType.Factory.newInstance();
-					pph.setStringValue(getValueFromMap(appDepType, JSDLUtils.PROCESSESPERHOST));
-					JSDLUtils.getSPMDApplication(value).setProcessesPerHost(pph);
-				}
-				
-				if(getValueFromMap(appDepType, JSDLUtils.THREADSPERHOST)!=null){
+
+                int totalNodeCount = context.getTaskData().getTaskScheduling().getNodeCount();
+                if(totalNodeCount > 0){
+                    int ppn = totalCPUCount / totalNodeCount;
+                    ProcessesPerHostType pph = ProcessesPerHostType.Factory.newInstance();
+                    pph.setStringValue(String.valueOf(ppn));
+                    JSDLUtils.getSPMDApplication(value).setProcessesPerHost(pph);
+                }
+
+                int totalThreadCount = context.getTaskData().getTaskScheduling().getNumberOfThreads();
+                if(totalThreadCount > 0){
 					ThreadsPerProcessType tpp = ThreadsPerProcessType.Factory.newInstance();
-					tpp.setStringValue(getValueFromMap(appDepType, JSDLUtils.THREADSPERHOST));
+					tpp.setStringValue(String.valueOf(totalThreadCount));
 					JSDLUtils.getSPMDApplication(value).setThreadsPerProcess(tpp);
 					
 				}
@@ -90,6 +97,18 @@ public class ApplicationProcessor {
 					userNameType.setStringValue(userName);
 					JSDLUtils.getSPMDApplication(value).setUserName(userNameType);
 				}
+                if (stdout != null){
+                    FileNameType fName = FileNameType.Factory.newInstance();
+                    fName.setStringValue(stdout);
+                    JSDLUtils.getOrCreateSPMDApplication(value).setOutput(fName);
+                }
+                if (stderr != null){
+                    FileNameType fName = FileNameType.Factory.newInstance();
+                    fName.setStringValue(stderr);
+                    JSDLUtils.getOrCreateSPMDApplication(value).setError(fName);
+                }
+
+
 			}
 			else {
 				JSDLUtils.getOrCreatePOSIXApplication(value).setExecutable(fNameType);
@@ -98,17 +117,18 @@ public class ApplicationProcessor {
 					userNameType.setStringValue(userName);
 					JSDLUtils.getOrCreatePOSIXApplication(value).setUserName(userNameType);
 				}
+                if (stdout != null){
+                    FileNameType fName = FileNameType.Factory.newInstance();
+                    fName.setStringValue(stdout);
+                    JSDLUtils.getOrCreatePOSIXApplication(value).setOutput(fName);
+                }
+                if (stderr != null){
+                    FileNameType fName = FileNameType.Factory.newInstance();
+                    fName.setStringValue(stderr);
+                    JSDLUtils.getOrCreatePOSIXApplication(value).setError(fName);
+                }
 			}
 		}
-		
-
-		String stdout = (appDepType.getStandardOutput() != null) ? new File(appDepType.getStandardOutput()).getName(): "stdout"; 
-		ApplicationProcessor.setApplicationStdOut(value, appDepType, stdout);
-		
-	
-		String stderr = (appDepType.getStandardError() != null) ? new File(appDepType.getStandardError()).getName() : "stderr"; 
-		ApplicationProcessor.setApplicationStdErr(value, appDepType, stderr);
-	
 	}
 	
 	public static String getUserNameFromContext(JobExecutionContext jobContext) {
@@ -117,79 +137,7 @@ public class ApplicationProcessor {
 		//FIXME: Discuss to get user and change this
 		return "admin";
 	}
-	public static boolean isParallelJob(HpcApplicationDeploymentType appDepType) {
-		
-		boolean isParallel = false;
-		
-		if (appDepType.getJobType() != null) {
-			// TODO set data output directory
-			int status = appDepType.getJobType().intValue();
-
-			switch (status) {
-			// TODO: this check should be done outside this class
-			case JobTypeType.INT_MPI:
-			case JobTypeType.INT_OPEN_MP:
-				isParallel = true;
-				break;
-				
-			case JobTypeType.INT_SERIAL:
-			case JobTypeType.INT_SINGLE:
-				isParallel = false;
-				break;
 
-			default:
-				isParallel = false;
-				break;
-			}
-		}
-		return isParallel;
-	}
-
-	
-	public static void createApplicationEnvironment(JobDefinitionType value, NameValuePairType[] nameValuePairs, HpcApplicationDeploymentType appDepType) {
-		
-		if(isParallelJob(appDepType)) {
-			for (NameValuePairType nv : nameValuePairs) {
-				EnvironmentType envType = JSDLUtils.getOrCreateSPMDApplication(value).addNewEnvironment();
-				envType.setName(nv.getName());
-				envType.setStringValue(nv.getValue());
-			}
-		}
-		else {
-			for (NameValuePairType nv : nameValuePairs) {
-				EnvironmentType envType = JSDLUtils.getOrCreatePOSIXApplication(value).addNewEnvironment();
-				envType.setName(nv.getName());
-				envType.setStringValue(nv.getValue());
-			}
-		}
-
-	}
-	
-	
-	public static String getSPMDVariation (HpcApplicationDeploymentType appDepType) {
-		
-		String variation = null;
-		
-		if (appDepType.getJobType() != null) {
-			// TODO set data output directory
-			int status = appDepType.getJobType().intValue();
-
-			switch (status) {
-			// TODO: this check should be done outside this class
-			case JobTypeType.INT_MPI:
-				variation = SPMDVariations.MPI.value();				
-				break;
-				
-			case JobTypeType.INT_OPEN_MP:
-				variation = SPMDVariations.OpenMPI.value();
-				break;
-				
-			}
-		}
-		return variation;
-	}
-	
-	
 	public static void addApplicationArgument(JobDefinitionType value, HpcApplicationDeploymentType appDepType, String stringPrm) {
 		if(isParallelJob(appDepType)) 		
 			JSDLUtils.getOrCreateSPMDApplication(value)
@@ -200,24 +148,6 @@ public class ApplicationProcessor {
 
 	}
 	
-	public static void setApplicationStdErr(JobDefinitionType value, HpcApplicationDeploymentType appDepType, String stderr) {
-		FileNameType fName = FileNameType.Factory.newInstance();
-		fName.setStringValue(stderr);
-		if (isParallelJob(appDepType)) 
-			JSDLUtils.getOrCreateSPMDApplication(value).setError(fName);
-		else 
-			JSDLUtils.getOrCreatePOSIXApplication(value).setError(fName);
-	}
-	
-	public static void setApplicationStdOut(JobDefinitionType value, HpcApplicationDeploymentType appDepType, String stderr) {
-		FileNameType fName = FileNameType.Factory.newInstance();
-		fName.setStringValue(stderr);
-		if (isParallelJob(appDepType)) 
-			JSDLUtils.getOrCreateSPMDApplication(value).setOutput(fName);
-		else 
-			JSDLUtils.getOrCreatePOSIXApplication(value).setOutput(fName);
-	}
-	
 	public static String getApplicationStdOut(JobDefinitionType value, HpcApplicationDeploymentType appDepType) throws RuntimeException {
 		if (isParallelJob(appDepType)) return JSDLUtils.getOrCreateSPMDApplication(value).getOutput().getStringValue();
 		else return JSDLUtils.getOrCreatePOSIXApplication(value).getOutput().getStringValue();
@@ -228,18 +158,14 @@ public class ApplicationProcessor {
 		else return JSDLUtils.getOrCreatePOSIXApplication(value).getError().getStringValue();
 	}
 	
-	public static void createGenericApplication(JobDefinitionType value, HpcApplicationDeploymentType appDepType) {
-		if (appDepType.getApplicationName() != null) {
-			ApplicationType appType = JSDLUtils.getOrCreateApplication(value);
-			String appName = appDepType.getApplicationName()
-					.getStringValue();
-			appType.setApplicationName(appName);
-			JSDLUtils.getOrCreateJobIdentification(value).setJobName(appName);
-		}
-	}
-	
-	
-	public static String getValueFromMap(HpcApplicationDeploymentType appDepType, String name) {
+	public static void createGenericApplication(JobDefinitionType value, String appName) {
+        ApplicationType appType = JSDLUtils.getOrCreateApplication(value);
+        appType.setApplicationName(appName);
+        JSDLUtils.getOrCreateJobIdentification(value).setJobName(appName);
+    }
+
+
+    public static String getValueFromMap(HpcApplicationDeploymentType appDepType, String name) {
 		ExtendedKeyValueType[] extended = appDepType.getKeyValuePairsArray();
 		for(ExtendedKeyValueType e: extended) {
 			if(e.getName().equalsIgnoreCase(name)) {

http://git-wip-us.apache.org/repos/asf/airavata/blob/04f09e7d/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/GFacUtils.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/GFacUtils.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/GFacUtils.java
index 695c943..1cb1250 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/GFacUtils.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/GFacUtils.java
@@ -39,7 +39,9 @@ import org.apache.airavata.gfac.core.context.JobExecutionContext;
 import org.apache.airavata.gfac.core.handler.GFacHandlerException;
 import org.apache.airavata.gfac.core.states.GfacExperimentState;
 import org.apache.airavata.gfac.core.states.GfacPluginState;
+import org.apache.airavata.model.appcatalog.computeresource.GlobusJobSubmission;
 import org.apache.airavata.model.appcatalog.computeresource.LOCALSubmission;
+import org.apache.airavata.model.appcatalog.computeresource.SSHJobSubmission;
 import org.apache.airavata.model.appcatalog.computeresource.UnicoreJobSubmission;
 import org.apache.airavata.model.workspace.experiment.*;
 import org.apache.airavata.model.workspace.experiment.DataType;
@@ -1257,21 +1259,34 @@ public class GFacUtils {
             AppCatalog appCatalog = AppCatalogFactory.getAppCatalog();
             return appCatalog.getComputeResource().getUNICOREJobSubmission(submissionId);
         }catch (Exception e){
-            String errorMsg = "Error while retrieving local job submission with submission id : " + submissionId;
+            String errorMsg = "Error while retrieving UNICORE job submission with submission id : " + submissionId;
             log.error(errorMsg, e);
             throw new AppCatalogException(errorMsg, e);
         }
     }
 
-    public static UnicoreJobSubmission getJobSubmission (String submissionId) throws AppCatalogException{
+    public static GlobusJobSubmission getGlobusJobSubmission (String submissionId) throws AppCatalogException{
+        return null;
+//        try {
+//            AppCatalog appCatalog = AppCatalogFactory.getAppCatalog();
+//            return appCatalog.getComputeResource().getGlobus(submissionId);
+//        }catch (Exception e){
+//            String errorMsg = "Error while retrieving local job submission with submission id : " + submissionId;
+//            log.error(errorMsg, e);
+//            throw new AppCatalogException(errorMsg, e);
+//        }
+    }
+
+    public static SSHJobSubmission getSSHJobSubmission (String submissionId) throws AppCatalogException{
         try {
             AppCatalog appCatalog = AppCatalogFactory.getAppCatalog();
-            return appCatalog.getComputeResource().getUNICOREJobSubmission(submissionId);
+            return appCatalog.getComputeResource().getSSHJobSubmission(submissionId);
         }catch (Exception e){
-            String errorMsg = "Error while retrieving local job submission with submission id : " + submissionId;
+            String errorMsg = "Error while retrieving SSH job submission with submission id : " + submissionId;
             log.error(errorMsg, e);
             throw new AppCatalogException(errorMsg, e);
         }
     }
 
+
 }

http://git-wip-us.apache.org/repos/asf/airavata/blob/04f09e7d/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/EC2Provider.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/EC2Provider.java b/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/EC2Provider.java
index 940fff3..5c5af53 100644
--- a/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/EC2Provider.java
+++ b/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/EC2Provider.java
@@ -38,6 +38,7 @@ import org.apache.airavata.gfac.core.provider.utils.ProviderUtils;
 import org.apache.airavata.gfac.core.utils.GFacUtils;
 import org.apache.airavata.gfac.ec2.util.AmazonEC2Util;
 import org.apache.airavata.gfac.ec2.util.EC2ProviderUtil;
+import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
 import org.apache.airavata.model.workspace.experiment.JobState;
 import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
 import org.apache.airavata.schemas.gfac.Ec2ApplicationDeploymentType;
@@ -90,7 +91,7 @@ public class EC2Provider extends AbstractProvider {
 
     public void initialize(JobExecutionContext jobExecutionContext) throws GFacProviderException,GFacException{
         if (jobExecutionContext != null) {
-    		jobId="EC2_"+jobExecutionContext.getApplicationContext().getHostDescription().getType().getHostAddress()+"_"+Calendar.getInstance().getTimeInMillis();
+    		jobId="EC2_"+jobExecutionContext.getHostName()+"_"+Calendar.getInstance().getTimeInMillis();
             if (jobExecutionContext.getSecurityContext(AmazonSecurityContext.AMAZON_SECURITY_CONTEXT)
                     instanceof AmazonSecurityContext) {
                 this.amazonSecurityContext = (AmazonSecurityContext) jobExecutionContext.
@@ -156,10 +157,9 @@ public class EC2Provider extends AbstractProvider {
         try
         {
             String outParamName;
-            OutputParameterType[] outputParametersArray = jobExecutionContext.getApplicationContext().
-                    getServiceDescription().getType().getOutputParametersArray();
-            if(outputParametersArray != null) {
-                outParamName = outputParametersArray[0].getParameterName();
+            List<OutputDataObjectType> outputs = jobExecutionContext.getApplicationContext().getApplicationInterfaceDescription().getApplicationOutputs();
+            if(outputs != null && !outputs.isEmpty()) {
+                outParamName = outputs.get(0).getName();
             } else {
                 throw new GFacProviderException("Output parameter name is not set. Therefore, not being able " +
                         "to filter the job result from standard out ");
@@ -217,11 +217,10 @@ public class EC2Provider extends AbstractProvider {
             executionResult = executionResult.replace("\r","").replace("\n","");
             log.info("Result of the job : " + executionResult);
 
-            for(OutputParameterType outparamType : outputParametersArray){
+            for(OutputDataObjectType outparamType : outputs){
                 /* Assuming that there is just a single result. If you want to add more results, update the necessary
                    logic below */
-                String paramName = outparamType.getParameterName();
-                ActualParameter outParam = new ActualParameter();
+                String paramName = outparamType.getName();
                 outParam.getType().changeType(StringParameterType.type);
                 ((StringParameterType) outParam.getType()).setValue(executionResult);
                 jobExecutionContext.getOutMessageContext().addParameter(paramName, outParam);


[11/50] [abbrv] airavata git commit: Changing gfac-core to use app catalog

Posted by ch...@apache.org.
Changing gfac-core to use app catalog


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/a1e0ec81
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/a1e0ec81
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/a1e0ec81

Branch: refs/heads/master
Commit: a1e0ec813969000d755e47ee77c2be5fbb401f2b
Parents: 8abe8dc
Author: chathuriw <ka...@gmail.com>
Authored: Thu Oct 30 10:11:19 2014 -0400
Committer: Chathuri Wimalasena <ka...@gmail.com>
Committed: Wed Nov 5 11:16:14 2014 -0500

----------------------------------------------------------------------
 .../org/apache/airavata/gfac/Scheduler.java     | 35 ++++-----
 .../gfac/core/context/JobExecutionContext.java  | 21 ++++++
 .../airavata/gfac/core/cpi/BetterGfacImpl.java  | 47 ++++++++++++
 .../core/handler/AppDescriptorCheckHandler.java | 17 -----
 .../airavata/gfac/core/monitor/MonitorID.java   |  1 +
 .../apache/airavata/job/GFacConfigXmlTest.java  | 78 ++++++++++++++++----
 6 files changed, 148 insertions(+), 51 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/a1e0ec81/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/Scheduler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/Scheduler.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/Scheduler.java
index 9b70fae..8f5847f 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/Scheduler.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/Scheduler.java
@@ -21,30 +21,26 @@
 
 package org.apache.airavata.gfac;
 
-import java.io.File;
-import java.io.IOException;
-import java.net.URL;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-
-import javax.xml.parsers.DocumentBuilder;
-import javax.xml.parsers.DocumentBuilderFactory;
-import javax.xml.parsers.ParserConfigurationException;
-import javax.xml.xpath.XPathExpressionException;
-
-import org.apache.airavata.commons.gfac.type.ApplicationDescription;
 import org.apache.airavata.commons.gfac.type.HostDescription;
 import org.apache.airavata.gfac.core.context.JobExecutionContext;
 import org.apache.airavata.gfac.core.provider.GFacProvider;
 import org.apache.airavata.gfac.core.provider.GFacProviderConfig;
 import org.apache.airavata.gfac.core.provider.GFacProviderException;
-import org.apache.airavata.model.appcatalog.computeresource.ComputeResourceDescription;
+import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionInterface;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.w3c.dom.Document;
 import org.xml.sax.SAXException;
 
+import javax.xml.parsers.DocumentBuilder;
+import javax.xml.parsers.DocumentBuilderFactory;
+import javax.xml.parsers.ParserConfigurationException;
+import javax.xml.xpath.XPathExpressionException;
+import java.io.File;
+import java.io.IOException;
+import java.net.URL;
+import java.util.List;
+
 
 /**
  * Scheduler decides the execution order of handlers based on application description. In addition
@@ -76,7 +72,6 @@ public class Scheduler {
      * @return GFacProvider instance.
      */
     private static GFacProvider getProvider(JobExecutionContext jobExecutionContext) throws GFacException {
-        ComputeResourceDescription hostDescription = jobExecutionContext.getApplicationContext().getComputeResourceDescription();
         String applicationName = jobExecutionContext.getServiceName();
 
         URL resource = Scheduler.class.getClassLoader().getResource(org.apache.airavata.common.utils.Constants.GFAC_CONFIG_XML);
@@ -113,8 +108,8 @@ public class Scheduler {
             // We give higher preference to applications specific provider if configured
             if (provider == null) {
 
-                jobExecutionContext.getApplicationContext().getComputeResourcePreference().getPreferredJobSubmissionProtocol()
-                String hostClass = hostDescription.getType().getClass().getName();
+                List<JobSubmissionInterface> jobSubmissionInterfaces = jobExecutionContext.getApplicationContext().getComputeResourceDescription().getJobSubmissionInterfaces();
+                String hostClass = jobExecutionContext.getPrefferedJobSubmissionProtocal();
                 providerClassName = GFacConfiguration.getAttributeValue(GFacConfiguration.getHandlerDoc(), Constants.XPATH_EXPR_PROVIDER_ON_HOST + hostClass + "']", Constants.GFAC_CONFIG_CLASS_ATTRIBUTE);
                 Class<? extends GFacProvider> aClass1 = Class.forName(providerClassName).asSubclass(GFacProvider.class);
                 provider = aClass1.newInstance();
@@ -144,9 +139,7 @@ public class Scheduler {
         return provider;
     }
     public static ExecutionMode getExecutionMode(JobExecutionContext jobExecutionContext)throws GFacException{
-       HostDescription hostDescription = jobExecutionContext.getApplicationContext().getHostDescription();
-        String applicationName = jobExecutionContext.getServiceName();
-
+        String applicationName = jobExecutionContext.getApplicationContext().getApplicationInterfaceDescription().getApplicationName();
         URL resource = Scheduler.class.getClassLoader().getResource(org.apache.airavata.common.utils.Constants.GFAC_CONFIG_XML);
         DocumentBuilderFactory docBuilderFactory = DocumentBuilderFactory.newInstance();
         DocumentBuilder docBuilder = null;
@@ -169,7 +162,7 @@ public class Scheduler {
             // This should be have a single element only.
 
             if (executionMode == null || "".equals(executionMode)) {
-                String hostClass = hostDescription.getType().getClass().getName();
+                String hostClass = jobExecutionContext.getPrefferedJobSubmissionProtocal();
                 executionMode = GFacConfiguration.getAttributeValue(GFacConfiguration.getHandlerDoc(), Constants.XPATH_EXPR_PROVIDER_ON_HOST + hostClass + "']", Constants.GFAC_CONFIG_EXECUTION_MODE_ATTRIBUTE);
             }
         } catch (XPathExpressionException e) {

http://git-wip-us.apache.org/repos/asf/airavata/blob/a1e0ec81/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
index da716c5..2b2255f 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
@@ -52,12 +52,14 @@ public class JobExecutionContext extends AbstractContext implements Serializable
 
     private GFacNotifier notifier;
 
+    //FIXME : not needed for gfac
     private Experiment experiment;
 
     private TaskDetails taskData;
 
     private JobDetails jobDetails;
 
+    // FIXME : not needed for gfac
     private WorkflowNodeDetails workflowNodeDetails;
 
     private GFac gfac;
@@ -72,6 +74,9 @@ public class JobExecutionContext extends AbstractContext implements Serializable
     private String outputDir;
     private String standaredOutput;
     private String standaredError;
+    private String prefferedJobSubmissionProtocal;
+    private String prefferedDataMovementProtocal;
+
 
 //    private ContextHeaderDocument.ContextHeader contextHeader;
 
@@ -364,4 +369,20 @@ public class JobExecutionContext extends AbstractContext implements Serializable
     public void setStandaredError(String standaredError) {
         this.standaredError = standaredError;
     }
+
+    public String getPrefferedJobSubmissionProtocal() {
+        return prefferedJobSubmissionProtocal;
+    }
+
+    public void setPrefferedJobSubmissionProtocal(String prefferedJobSubmissionProtocal) {
+        this.prefferedJobSubmissionProtocal = prefferedJobSubmissionProtocal;
+    }
+
+    public String getPrefferedDataMovementProtocal() {
+        return prefferedDataMovementProtocal;
+    }
+
+    public void setPrefferedDataMovementProtocal(String prefferedDataMovementProtocal) {
+        this.prefferedDataMovementProtocal = prefferedDataMovementProtocal;
+    }
 }

http://git-wip-us.apache.org/repos/asf/airavata/blob/a1e0ec81/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
index 16c49e6..fd43c65 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
@@ -52,6 +52,7 @@ import org.apache.airavata.messaging.core.PublisherFactory;
 import org.apache.airavata.model.appcatalog.appdeployment.ApplicationDeploymentDescription;
 import org.apache.airavata.model.appcatalog.appinterface.ApplicationInterfaceDescription;
 import org.apache.airavata.model.appcatalog.computeresource.ComputeResourceDescription;
+import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionInterface;
 import org.apache.airavata.model.appcatalog.gatewayprofile.ComputeResourcePreference;
 import org.apache.airavata.model.messaging.event.*;
 import org.apache.airavata.model.workspace.experiment.*;
@@ -298,6 +299,52 @@ public class BetterGfacImpl implements GFac,Watcher {
         jobExecutionContext.setGfac(this);
         jobExecutionContext.setZk(zk);
         jobExecutionContext.setCredentialStoreToken(AiravataZKUtils.getExpTokenId(zk, experimentID, taskID));
+        if (gatewayResourcePreferences != null ) {
+            if (gatewayResourcePreferences.getScratchLocation() == null) {
+                gatewayResourcePreferences.setScratchLocation("/tmp");
+            }
+
+            /**
+             * Working dir
+             */
+            String workingDir = gatewayResourcePreferences.getScratchLocation() + File.separator + jobExecutionContext.getExperimentID();
+            jobExecutionContext.setWorkingDir(workingDir);
+
+            /*
+            * Input and Output Directory
+            */
+            jobExecutionContext.setInputDir(workingDir + File.separator + Constants.INPUT_DATA_DIR_VAR_NAME);
+            jobExecutionContext.setOutputDir(workingDir + File.separator + Constants.OUTPUT_DATA_DIR_VAR_NAME);
+
+            /*
+            * Stdout and Stderr for Shell
+            */
+            jobExecutionContext.setStandaredOutput(workingDir + File.separator + applicationInterface.getApplicationName().replaceAll("\\s+", "") + ".stdout");
+            jobExecutionContext.setStandaredError(workingDir + File.separator + applicationInterface.getApplicationName().replaceAll("\\s+", "") + ".stderr");
+        }
+
+        List<JobSubmissionInterface> jobSubmissionInterfaces = computeResource.getJobSubmissionInterfaces();
+        String preferredJobSubmissionProtocol = gatewayResourcePreferences.getPreferredJobSubmissionProtocol();
+        String hostClass;
+        if (preferredJobSubmissionProtocol != null){
+            hostClass = preferredJobSubmissionProtocol;
+        }else {
+            if (jobSubmissionInterfaces != null && !jobSubmissionInterfaces.isEmpty()){
+                int lowestPriority = jobSubmissionInterfaces.get(0).getPriorityOrder();
+                String selectedHost = null;
+                for (int i = 0; i < jobSubmissionInterfaces.size() - 1; i++){
+                    if (jobSubmissionInterfaces.get(i+1).getPriorityOrder() < lowestPriority ){
+                        lowestPriority = jobSubmissionInterfaces.get(i+1).getPriorityOrder();
+                        selectedHost = jobSubmissionInterfaces.get(i+1).getJobSubmissionProtocol().toString();
+                    }
+                }
+                hostClass = selectedHost;
+            }else {
+                throw new GFacException("Compute resource should have atleast one job submission interface defined...");
+            }
+        }
+        jobExecutionContext.setPrefferedJobSubmissionProtocal(hostClass);
+
         return jobExecutionContext;
     }
 

http://git-wip-us.apache.org/repos/asf/airavata/blob/a1e0ec81/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/handler/AppDescriptorCheckHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/handler/AppDescriptorCheckHandler.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/handler/AppDescriptorCheckHandler.java
index 676a15a..4627bf5 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/handler/AppDescriptorCheckHandler.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/handler/AppDescriptorCheckHandler.java
@@ -20,16 +20,13 @@
 */
 package org.apache.airavata.gfac.core.handler;
 
-import org.apache.airavata.gfac.Constants;
 import org.apache.airavata.gfac.core.context.JobExecutionContext;
 import org.apache.airavata.gfac.core.states.GfacPluginState;
 import org.apache.airavata.gfac.core.utils.GFacUtils;
-import org.apache.airavata.model.appcatalog.appinterface.ApplicationInterfaceDescription;
 import org.apache.airavata.model.appcatalog.gatewayprofile.ComputeResourcePreference;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.io.File;
 import java.util.Properties;
 
 public class AppDescriptorCheckHandler implements GFacRecoverableHandler {
@@ -43,33 +40,19 @@ public class AppDescriptorCheckHandler implements GFacRecoverableHandler {
             logger.info("Error saving plugin status to ZK");
         }
         StringBuffer data = new StringBuffer();
-        ApplicationInterfaceDescription appInterface = jobExecutionContext.getApplicationContext().getApplicationInterfaceDescription();
         ComputeResourcePreference computeResourcePreference = jobExecutionContext.getApplicationContext().getComputeResourcePreference();
 
-        if (computeResourcePreference.getScratchLocation() == null) {
-            computeResourcePreference.setScratchLocation("/tmp");
-        }
-        /*
-        * Working dir
-        */
-
-        String workingDir = computeResourcePreference.getScratchLocation() + File.separator+ jobExecutionContext.getExperimentID();
-        jobExecutionContext.setWorkingDir(workingDir);
         data.append(computeResourcePreference.getScratchLocation());
         data.append(",").append(jobExecutionContext.getWorkingDir());
 
         /*
         * Input and Output Directory
         */
-        jobExecutionContext.setInputDir(workingDir + File.separator + Constants.INPUT_DATA_DIR_VAR_NAME );
-        jobExecutionContext.setOutputDir(workingDir + File.separator + Constants.OUTPUT_DATA_DIR_VAR_NAME);
         data.append(",").append(jobExecutionContext.getInputDir()).append(",").append(jobExecutionContext.getOutputDir());
 
         /*
         * Stdout and Stderr for Shell
         */
-        jobExecutionContext.setStandaredOutput(workingDir + File.separator + appInterface.getApplicationName().replaceAll("\\s+", "") + ".stdout");
-        jobExecutionContext.setStandaredError(workingDir + File.separator + appInterface.getApplicationName().replaceAll("\\s+", "") + ".stderr");
         data.append(",").append(jobExecutionContext.getStandaredOutput()).append(",").append(jobExecutionContext.getStandaredError());
 
 

http://git-wip-us.apache.org/repos/asf/airavata/blob/a1e0ec81/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/monitor/MonitorID.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/monitor/MonitorID.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/monitor/MonitorID.java
index fa4ecd2..6ea1839 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/monitor/MonitorID.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/monitor/MonitorID.java
@@ -24,6 +24,7 @@ import org.apache.airavata.common.logger.AiravataLogger;
 import org.apache.airavata.common.logger.AiravataLoggerFactory;
 import org.apache.airavata.commons.gfac.type.HostDescription;
 import org.apache.airavata.gfac.core.context.JobExecutionContext;
+import org.apache.airavata.model.appcatalog.computeresource.ComputeResourceDescription;
 import org.apache.airavata.model.workspace.experiment.JobState;
 
 import java.sql.Timestamp;

http://git-wip-us.apache.org/repos/asf/airavata/blob/a1e0ec81/modules/gfac/gfac-core/src/test/java/org/apache/airavata/job/GFacConfigXmlTest.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/test/java/org/apache/airavata/job/GFacConfigXmlTest.java b/modules/gfac/gfac-core/src/test/java/org/apache/airavata/job/GFacConfigXmlTest.java
index e32bd9b..7e6bc0d 100644
--- a/modules/gfac/gfac-core/src/test/java/org/apache/airavata/job/GFacConfigXmlTest.java
+++ b/modules/gfac/gfac-core/src/test/java/org/apache/airavata/job/GFacConfigXmlTest.java
@@ -21,6 +21,9 @@
 package org.apache.airavata.job;
 
 import junit.framework.Assert;
+import org.airavata.appcatalog.cpi.AppCatalog;
+import org.airavata.appcatalog.cpi.AppCatalogException;
+import org.apache.aiaravata.application.catalog.data.impl.AppCatalogFactory;
 import org.apache.airavata.commons.gfac.type.HostDescription;
 import org.apache.airavata.gfac.ExecutionMode;
 import org.apache.airavata.gfac.GFacConfiguration;
@@ -29,6 +32,7 @@ import org.apache.airavata.gfac.Scheduler;
 import org.apache.airavata.gfac.core.context.ApplicationContext;
 import org.apache.airavata.gfac.core.context.JobExecutionContext;
 import org.apache.airavata.gfac.core.cpi.BetterGfacImpl;
+import org.apache.airavata.model.appcatalog.computeresource.*;
 import org.apache.airavata.schemas.gfac.GsisshHostType;
 import org.testng.annotations.BeforeClass;
 import org.testng.annotations.Test;
@@ -53,12 +57,34 @@ public class GFacConfigXmlTest {
         try {
             JobExecutionContext jec = new JobExecutionContext(GFacConfiguration.create(gfac.getGfacConfigFile(), null), "testService");
             ApplicationContext applicationContext = new ApplicationContext();
-            HostDescription host = new HostDescription(GsisshHostType.type);
-            host.getType().setHostAddress("trestles.sdsc.edu");
-            host.getType().setHostName("trestles");
-            ((GsisshHostType) host.getType()).setPort(22);
-            ((GsisshHostType) host.getType()).setInstalledPath("/opt/torque/bin/");
-            applicationContext.setHostDescription(host);
+            ComputeResourceDescription computeResourceDescription = new ComputeResourceDescription();
+            computeResourceDescription.setHostName("trestles.sdsc.xsede.org");
+            computeResourceDescription.setResourceDescription("SDSC Trestles Cluster");
+
+            AppCatalog appCatalog = AppCatalogFactory.getAppCatalog();
+
+            ResourceJobManager resourceJobManager = new ResourceJobManager();
+            resourceJobManager.setResourceJobManagerType(ResourceJobManagerType.PBS);
+            resourceJobManager.setPushMonitoringEndpoint("push");
+            resourceJobManager.setJobManagerBinPath("/opt/torque/bin/");
+
+            SSHJobSubmission sshJobSubmission = new SSHJobSubmission();
+            sshJobSubmission.setResourceJobManager(resourceJobManager);
+            sshJobSubmission.setSecurityProtocol(SecurityProtocol.GSI);
+            sshJobSubmission.setSshPort(22);
+            sshJobSubmission.setResourceJobManager(resourceJobManager);
+
+            String jobSubmissionId = appCatalog.getComputeResource().addSSHJobSubmission(sshJobSubmission);
+
+            JobSubmissionInterface submissionInterface = new JobSubmissionInterface();
+            submissionInterface.setJobSubmissionInterfaceId(jobSubmissionId);
+            submissionInterface.setJobSubmissionProtocol(JobSubmissionProtocol.SSH);
+            submissionInterface.setPriorityOrder(0);
+
+            computeResourceDescription.addToJobSubmissionInterfaces(submissionInterface);
+
+            appCatalog.getComputeResource().addComputeResource(computeResourceDescription);
+            applicationContext.setComputeResourceDescription(computeResourceDescription);
             jec.setApplicationContext(applicationContext);
             Scheduler.schedule(jec);
             Assert.assertEquals(ExecutionMode.ASYNCHRONOUS, jec.getGFacConfiguration().getExecutionMode());
@@ -73,6 +99,8 @@ public class GFacConfigXmlTest {
             e.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
         } catch (GFacException e) {
             e.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
+        } catch (AppCatalogException e) {
+            e.printStackTrace();
         }
     }
     @Test
@@ -82,12 +110,34 @@ public class GFacConfigXmlTest {
             try {
                 JobExecutionContext jec = new JobExecutionContext(GFacConfiguration.create(gfac.getGfacConfigFile(), null), "UltraScan");
                 ApplicationContext applicationContext = new ApplicationContext();
-                HostDescription host = new HostDescription(GsisshHostType.type);
-                host.getType().setHostAddress("trestles.sdsc.edu");
-                host.getType().setHostName("trestles");
-                ((GsisshHostType) host.getType()).setPort(22);
-                ((GsisshHostType) host.getType()).setInstalledPath("/opt/torque/bin/");
-                applicationContext.setHostDescription(host);
+                ComputeResourceDescription computeResourceDescription = new ComputeResourceDescription();
+                computeResourceDescription.setHostName("trestles.sdsc.xsede.org");
+                computeResourceDescription.setResourceDescription("SDSC Trestles Cluster");
+
+                AppCatalog appCatalog = AppCatalogFactory.getAppCatalog();
+
+                ResourceJobManager resourceJobManager = new ResourceJobManager();
+                resourceJobManager.setResourceJobManagerType(ResourceJobManagerType.PBS);
+                resourceJobManager.setPushMonitoringEndpoint("push");
+                resourceJobManager.setJobManagerBinPath("/opt/torque/bin/");
+
+                SSHJobSubmission sshJobSubmission = new SSHJobSubmission();
+                sshJobSubmission.setResourceJobManager(resourceJobManager);
+                sshJobSubmission.setSecurityProtocol(SecurityProtocol.GSI);
+                sshJobSubmission.setSshPort(22);
+                sshJobSubmission.setResourceJobManager(resourceJobManager);
+
+                String jobSubmissionId = appCatalog.getComputeResource().addSSHJobSubmission(sshJobSubmission);
+
+                JobSubmissionInterface submissionInterface = new JobSubmissionInterface();
+                submissionInterface.setJobSubmissionInterfaceId(jobSubmissionId);
+                submissionInterface.setJobSubmissionProtocol(JobSubmissionProtocol.SSH);
+                submissionInterface.setPriorityOrder(0);
+
+                computeResourceDescription.addToJobSubmissionInterfaces(submissionInterface);
+
+                appCatalog.getComputeResource().addComputeResource(computeResourceDescription);
+                applicationContext.setComputeResourceDescription(computeResourceDescription);
                 jec.setApplicationContext(applicationContext);
                 Scheduler.schedule(jec);
                 Assert.assertEquals(3, jec.getGFacConfiguration().getInHandlers().size());
@@ -106,8 +156,10 @@ public class GFacConfigXmlTest {
                 e.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
             } catch (GFacException e) {
                 e.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
+            } catch (AppCatalogException e) {
+                e.printStackTrace();
             }
-        }
+    }
 
 
 }


[47/50] [abbrv] airavata git commit: fixing experiment output not set properly

Posted by ch...@apache.org.
fixing experiment output not set properly


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/0b867da4
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/0b867da4
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/0b867da4

Branch: refs/heads/master
Commit: 0b867da49363dbba47dceaff646d086f09de6c1c
Parents: 225b674
Author: chathuriw <ka...@gmail.com>
Authored: Fri Nov 7 15:30:31 2014 -0500
Committer: Chathuri Wimalasena <ka...@gmail.com>
Committed: Fri Nov 7 15:30:31 2014 -0500

----------------------------------------------------------------------
 .../airavata/client/samples/CreateLaunchExperiment.java | 12 ++++++------
 .../apache/airavata/gfac/core/cpi/BetterGfacImpl.java   |  4 ++--
 2 files changed, 8 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/0b867da4/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperiment.java
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperiment.java b/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperiment.java
index d437c4a..63fe5ad 100644
--- a/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperiment.java
+++ b/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperiment.java
@@ -164,7 +164,7 @@ public class CreateLaunchExperiment {
 
             List<OutputDataObjectType> exOut = new ArrayList<OutputDataObjectType>();
             OutputDataObjectType output = new OutputDataObjectType();
-            output.setName("echo_output");
+            output.setName("Echoed_Output");
             output.setType(DataType.STRING);
             output.setValue("");
             exOut.add(output);
@@ -216,7 +216,7 @@ public class CreateLaunchExperiment {
 
             List<OutputDataObjectType> exOut = new ArrayList<OutputDataObjectType>();
             OutputDataObjectType output = new OutputDataObjectType();
-            output.setName("echo_output");
+            output.setName("Echoed_Output");
             output.setType(DataType.STRING);
             output.setValue("");
             exOut.add(output);
@@ -492,12 +492,12 @@ public class CreateLaunchExperiment {
 //            OutputDataObjectType input = new OutputDataObjectType();
 //            input.setName("echo_input");
 //            input.setType(DataType.STRING);
-//            input.setValue("echo_output=Hello World");
+//            input.setValue("Echoed_Output=Hello World");
 //            exInputs.add(input);
 //
 //            List<OutputDataObjectType> exOut = new ArrayList<OutputDataObjectType>();
 //            OutputDataObjectType output = new OutputDataObjectType();
-//            output.setName("echo_output");
+//            output.setName("Echoed_Output");
 //            output.setType(DataType.STRING);
 //            output.setValue("");
 //            exOut.add(output);
@@ -592,12 +592,12 @@ public class CreateLaunchExperiment {
 //            OutputDataObjectType input = new OutputDataObjectType();
 //            input.setName("echo_input");
 //            input.setType(DataType.STRING);
-//            input.setValue("echo_output=Hello World");
+//            input.setValue("Echoed_Output=Hello World");
 //            exInputs.add(input);
 //
 //            List<OutputDataObjectType> exOut = new ArrayList<OutputDataObjectType>();
 //            OutputDataObjectType output = new OutputDataObjectType();
-//            output.setName("echo_output");
+//            output.setName("Echoed_Output");
 //            output.setType(DataType.STRING);
 //            output.setValue("");
 //            exOut.add(output);

http://git-wip-us.apache.org/repos/asf/airavata/blob/0b867da4/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
index cbf32f7..814efb3 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
@@ -297,10 +297,10 @@ public class BetterGfacImpl implements GFac,Watcher {
         applicationContext.setComputeResourcePreference(gatewayResourcePreferences);
         jobExecutionContext.setApplicationContext(applicationContext);
 
-        List<InputDataObjectType> experimentInputs = taskData.getApplicationInputs();
+        List<InputDataObjectType> experimentInputs = experiment.getExperimentInputs();
         jobExecutionContext.setInMessageContext(new MessageContext(GFacUtils.getInputParamMap(experimentInputs)));
 
-        List<OutputDataObjectType> outputData = taskData.getApplicationOutputs();
+        List<OutputDataObjectType> outputData = experiment.getExperimentOutputs();
         jobExecutionContext.setOutMessageContext(new MessageContext(GFacUtils.getOuputParamMap(outputData)));
 
         jobExecutionContext.setProperty(Constants.PROP_TOPIC, experimentID);


[24/50] [abbrv] airavata git commit: Merge remote-tracking branch 'origin/gfac_appcatalog_int' into gfac_appcatalog_int

Posted by ch...@apache.org.
Merge remote-tracking branch 'origin/gfac_appcatalog_int' into gfac_appcatalog_int

Conflicts:
	modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/util/GFACGSISSHUtils.java
	modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/HPCPullMonitor.java
	modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/util/GFACSSHUtils.java


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/7b8d9844
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/7b8d9844
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/7b8d9844

Branch: refs/heads/master
Commit: 7b8d9844f72c81e2d16fb961a60876e9dbeea82d
Parents: fa666b3 e9ee22b
Author: Chathuri Wimalasena <ka...@gmail.com>
Authored: Wed Nov 5 13:27:45 2014 -0500
Committer: Chathuri Wimalasena <ka...@gmail.com>
Committed: Wed Nov 5 13:27:45 2014 -0500

----------------------------------------------------------------------
 .../airavata/gfac/monitor/impl/pull/qstat/HPCPullMonitor.java     | 3 +++
 1 file changed, 3 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/7b8d9844/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/HPCPullMonitor.java
----------------------------------------------------------------------


[23/50] [abbrv] airavata git commit: merging

Posted by ch...@apache.org.
merging


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/fa666b30
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/fa666b30
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/fa666b30

Branch: refs/heads/master
Commit: fa666b30ce5c3408b43cb5616c99d4d3d17a3b51
Parents: d856d24
Author: Chathuri Wimalasena <ka...@gmail.com>
Authored: Wed Nov 5 13:19:50 2014 -0500
Committer: Chathuri Wimalasena <ka...@gmail.com>
Committed: Wed Nov 5 13:19:50 2014 -0500

----------------------------------------------------------------------
 .../airavata/gfac/monitor/impl/pull/qstat/HPCPullMonitor.java   | 5 -----
 .../airavata/gfac/ssh/handler/AdvancedSCPOutputHandler.java     | 4 ----
 2 files changed, 9 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/fa666b30/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/HPCPullMonitor.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/HPCPullMonitor.java b/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/HPCPullMonitor.java
index 122d1e2..331663f 100644
--- a/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/HPCPullMonitor.java
+++ b/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/HPCPullMonitor.java
@@ -35,16 +35,12 @@ import org.apache.airavata.gfac.monitor.UserMonitorData;
 import org.apache.airavata.gfac.monitor.core.PullMonitor;
 import org.apache.airavata.gfac.monitor.exception.AiravataMonitorException;
 import org.apache.airavata.gfac.monitor.impl.push.amqp.SimpleJobFinishConsumer;
-import org.apache.airavata.gfac.monitor.util.CommonUtils;
 import org.apache.airavata.gsi.ssh.api.SSHApiException;
 import org.apache.airavata.gsi.ssh.api.authentication.AuthenticationInfo;
 import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol;
 import org.apache.airavata.model.messaging.event.JobIdentifier;
 import org.apache.airavata.model.messaging.event.JobStatusChangeRequestEvent;
 import org.apache.airavata.model.workspace.experiment.JobState;
-import org.apache.airavata.schemas.gfac.GsisshHostType;
-import org.apache.airavata.schemas.gfac.SSHHostType;
-import org.apache.zookeeper.ZooKeeper;
 
 import java.sql.Timestamp;
 import java.util.*;
@@ -239,7 +235,6 @@ public class HPCPullMonitor extends PullMonitor {
                                 !JobState.COMPLETE.equals(iMonitorID.getStatus())) {
                             iMonitorID.setStatus(jobStatuses.get(iMonitorID.getJobID() + "," + iMonitorID.getJobName()));    //IMPORTANT this is NOT a simple setter we have a logic
                         }else if(JobState.COMPLETE.equals(iMonitorID.getStatus())){
-                            completedJobs.put(iMonitorID.getJobName(), iMonitorID);
                             logger.debugId(iMonitorID.getJobID(), "Moved job {} to completed jobs map, experiment {}, " +
                                     "task {}", iMonitorID.getJobID(), iMonitorID.getExperimentID(), iMonitorID.getTaskID());
                             iterator.remove();

http://git-wip-us.apache.org/repos/asf/airavata/blob/fa666b30/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPOutputHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPOutputHandler.java b/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPOutputHandler.java
index f508e23..f34b82a 100644
--- a/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPOutputHandler.java
+++ b/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPOutputHandler.java
@@ -110,10 +110,6 @@ public class AdvancedSCPOutputHandler extends AbstractHandler {
                     this.passPhrase);
         }
         try {
-            ApplicationDeploymentDescriptionType app = jobExecutionContext.getApplicationContext()
-                    .getApplicationDeploymentDescription().getType();
-            String standardError = app.getStandardError();
-            String standardOutput = app.getStandardOutput();
             if (jobExecutionContext.getSecurityContext(SSHSecurityContext.SSH_SECURITY_CONTEXT) == null) {
                 try {
                     GFACSSHUtils.addSecurityContext(jobExecutionContext);


[19/50] [abbrv] airavata git commit: Integrated appCatalog for ssh and gsi modules, commented out old test classes, need to fix this

Posted by ch...@apache.org.
http://git-wip-us.apache.org/repos/asf/airavata/blob/d94e8c95/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/util/GFACSSHUtils.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/util/GFACSSHUtils.java b/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/util/GFACSSHUtils.java
index ad2731a..f726024 100644
--- a/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/util/GFACSSHUtils.java
+++ b/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/util/GFACSSHUtils.java
@@ -20,11 +20,13 @@
 */
 package org.apache.airavata.gfac.ssh.util;
 
+import org.airavata.appcatalog.cpi.AppCatalog;
+import org.airavata.appcatalog.cpi.AppCatalogException;
+import org.apache.aiaravata.application.catalog.data.impl.AppCatalogFactory;
 import org.apache.airavata.common.exception.ApplicationSettingsException;
 import org.apache.airavata.common.utils.ServerSettings;
 import org.apache.airavata.common.utils.StringUtil;
 import org.apache.airavata.commons.gfac.type.ActualParameter;
-import org.apache.airavata.commons.gfac.type.HostDescription;
 import org.apache.airavata.commons.gfac.type.MappingFactory;
 import org.apache.airavata.credential.store.credential.impl.ssh.SSHCredential;
 import org.apache.airavata.gfac.Constants;
@@ -38,21 +40,20 @@ import org.apache.airavata.gfac.ssh.context.SSHAuthWrapper;
 import org.apache.airavata.gfac.ssh.security.SSHSecurityContext;
 import org.apache.airavata.gfac.ssh.security.TokenizedSSHAuthInfo;
 import org.apache.airavata.gsi.ssh.api.Cluster;
-import org.apache.airavata.gsi.ssh.api.SSHApiException;
 import org.apache.airavata.gsi.ssh.api.ServerInfo;
 import org.apache.airavata.gsi.ssh.api.authentication.AuthenticationInfo;
 import org.apache.airavata.gsi.ssh.api.job.JobDescriptor;
-import org.apache.airavata.gsi.ssh.api.job.JobManagerConfiguration;
-import org.apache.airavata.gsi.ssh.impl.GSISSHAbstractCluster;
 import org.apache.airavata.gsi.ssh.impl.PBSCluster;
-import org.apache.airavata.gsi.ssh.impl.authentication.DefaultPasswordAuthenticationInfo;
-import org.apache.airavata.gsi.ssh.impl.authentication.DefaultPublicKeyFileAuthentication;
 import org.apache.airavata.gsi.ssh.util.CommonUtils;
-import org.apache.airavata.model.workspace.experiment.ComputationalResourceScheduling;
+import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionInterface;
+import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol;
+import org.apache.airavata.model.appcatalog.computeresource.SSHJobSubmission;
+import org.apache.airavata.model.appcatalog.computeresource.SecurityProtocol;
 import org.apache.airavata.model.workspace.experiment.CorrectiveAction;
 import org.apache.airavata.model.workspace.experiment.ErrorCategory;
-import org.apache.airavata.model.workspace.experiment.TaskDetails;
-import org.apache.airavata.schemas.gfac.*;
+import org.apache.airavata.schemas.gfac.FileArrayType;
+import org.apache.airavata.schemas.gfac.StringArrayType;
+import org.apache.airavata.schemas.gfac.URIArrayType;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -72,77 +73,84 @@ public class GFACSSHUtils {
      * @throws ApplicationSettingsException
      */
     public static void addSecurityContext(JobExecutionContext jobExecutionContext) throws GFacException, ApplicationSettingsException {
-        HostDescription registeredHost = jobExecutionContext.getApplicationContext().getHostDescription();
-        if (registeredHost.getType() instanceof GlobusHostType || registeredHost.getType() instanceof UnicoreHostType) {
+        JobSubmissionProtocol preferredJobSubmissionProtocol = jobExecutionContext.getPreferredJobSubmissionProtocol();
+        JobSubmissionInterface preferredJobSubmissionInterface = jobExecutionContext.getPreferredJobSubmissionInterface();
+        if (preferredJobSubmissionProtocol == JobSubmissionProtocol.GLOBUS || preferredJobSubmissionProtocol == JobSubmissionProtocol.UNICORE) {
             logger.error("This is a wrong method to invoke to non ssh host types,please check your gfac-config.xml");
-        } else if (registeredHost.getType() instanceof SSHHostType
-                || registeredHost.getType() instanceof GsisshHostType) {
-            SSHSecurityContext sshSecurityContext = new SSHSecurityContext();
-            String credentialStoreToken = jobExecutionContext.getCredentialStoreToken(); // this is set by the framework
-            RequestData requestData = new RequestData(ServerSettings.getDefaultUserGateway());
-            requestData.setTokenId(credentialStoreToken);
-
-            ServerInfo serverInfo = new ServerInfo(null, registeredHost.getType().getHostAddress());
-            Cluster pbsCluster = null;
+        } else if (preferredJobSubmissionProtocol == JobSubmissionProtocol.SSH) {
             try {
-                TokenizedSSHAuthInfo tokenizedSSHAuthInfo = new TokenizedSSHAuthInfo(requestData);
-                String installedParentPath = ((HpcApplicationDeploymentType)
-                        jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription().getType()).getInstalledParentPath();
-                if (installedParentPath == null) {
-                    installedParentPath = "/";
-                }
+                AppCatalog appCatalog = AppCatalogFactory.getAppCatalog();
+                SSHJobSubmission sshJobSubmission = appCatalog.getComputeResource().getSSHJobSubmission(preferredJobSubmissionInterface.getJobSubmissionInterfaceId());
+                if (sshJobSubmission.getSecurityProtocol() == SecurityProtocol.GSI) {
+                    SSHSecurityContext sshSecurityContext = new SSHSecurityContext();
+                    String credentialStoreToken = jobExecutionContext.getCredentialStoreToken(); // this is set by the framework
+                    RequestData requestData = new RequestData(ServerSettings.getDefaultUserGateway());
+                    requestData.setTokenId(credentialStoreToken);
 
-                SSHCredential credentials = tokenizedSSHAuthInfo.getCredentials();// this is just a call to get and set credentials in to this object,data will be used
-                serverInfo.setUserName(credentials.getPortalUserName());
-                jobExecutionContext.getExperiment().setUserName(credentials.getPortalUserName());
-                // inside the pbsCluser object
+                    ServerInfo serverInfo = new ServerInfo(null, jobExecutionContext.getHostName());
 
-                String key = credentials.getPortalUserName() + registeredHost.getType().getHostAddress() +
-                        serverInfo.getPort();
-                boolean recreate = false;
-                synchronized (clusters) {
-                    if (clusters.containsKey(key) && clusters.get(key).size() < maxClusterCount) {
-                        recreate = true;
-                    } else if (clusters.containsKey(key)) {
-                        int i = new Random().nextInt(Integer.MAX_VALUE) % maxClusterCount;
-                        if (clusters.get(key).get(i).getSession().isConnected()) {
-                            pbsCluster = clusters.get(key).get(i);
-                        } else {
-                            clusters.get(key).remove(i);
-                            recreate = true;
+                    Cluster pbsCluster = null;
+                    try {
+                        TokenizedSSHAuthInfo tokenizedSSHAuthInfo = new TokenizedSSHAuthInfo(requestData);
+                        String installedParentPath = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription().getExecutablePath();
+                        if (installedParentPath == null) {
+                            installedParentPath = "/";
                         }
-                        if (!recreate) {
-                            try {
-                                pbsCluster.listDirectory("~/"); // its hard to trust isConnected method, so we try to connect if it works we are good,else we recreate
-                            } catch (Exception e) {
-                                clusters.get(key).remove(i);
-                                logger.info("Connection found the connection map is expired, so we create from the scratch");
-                                maxClusterCount++;
-                                recreate = true; // we make the pbsCluster to create again if there is any exception druing connection
+
+                        SSHCredential credentials = tokenizedSSHAuthInfo.getCredentials();// this is just a call to get and set credentials in to this object,data will be used
+                        serverInfo.setUserName(credentials.getPortalUserName());
+                        jobExecutionContext.getExperiment().setUserName(credentials.getPortalUserName());
+                        // inside the pbsCluser object
+
+                        String key = credentials.getPortalUserName() + jobExecutionContext.getHostName() + serverInfo.getPort();
+                        boolean recreate = false;
+                        synchronized (clusters) {
+                            if (clusters.containsKey(key) && clusters.get(key).size() < maxClusterCount) {
+                                recreate = true;
+                            } else if (clusters.containsKey(key)) {
+                                int i = new Random().nextInt(Integer.MAX_VALUE) % maxClusterCount;
+                                if (clusters.get(key).get(i).getSession().isConnected()) {
+                                    pbsCluster = clusters.get(key).get(i);
+                                } else {
+                                    clusters.get(key).remove(i);
+                                    recreate = true;
+                                }
+                                if (!recreate) {
+                                    try {
+                                        pbsCluster.listDirectory("~/"); // its hard to trust isConnected method, so we try to connect if it works we are good,else we recreate
+                                    } catch (Exception e) {
+                                        clusters.get(key).remove(i);
+                                        logger.info("Connection found the connection map is expired, so we create from the scratch");
+                                        maxClusterCount++;
+                                        recreate = true; // we make the pbsCluster to create again if there is any exception druing connection
+                                    }
+                                }
+                                logger.info("Re-using the same connection used with the connection string:" + key);
+                            } else {
+                                recreate = true;
+                            }
+                            if (recreate) {
+                                pbsCluster = new PBSCluster(serverInfo, tokenizedSSHAuthInfo,
+                                        CommonUtils.getPBSJobManager(installedParentPath));
+                                List<Cluster> pbsClusters = null;
+                                if (!(clusters.containsKey(key))) {
+                                    pbsClusters = new ArrayList<Cluster>();
+                                } else {
+                                    pbsClusters = clusters.get(key);
+                                }
+                                pbsClusters.add(pbsCluster);
+                                clusters.put(key, pbsClusters);
                             }
                         }
-                        logger.info("Re-using the same connection used with the connection string:" + key);
-                    } else {
-                        recreate = true;
-                    }
-                    if (recreate) {
-                        pbsCluster = new PBSCluster(serverInfo, tokenizedSSHAuthInfo,
-                                    CommonUtils.getPBSJobManager(installedParentPath));
-                        List<Cluster> pbsClusters = null;
-                        if (!(clusters.containsKey(key))) {
-                            pbsClusters = new ArrayList<Cluster>();
-                        } else {
-                            pbsClusters = clusters.get(key);
-                        }
-                        pbsClusters.add(pbsCluster);
-                        clusters.put(key, pbsClusters);
+                    } catch (Exception e) {
+                        throw new GFacException("Error occurred...", e);
                     }
+                    sshSecurityContext.setPbsCluster(pbsCluster);
+                    jobExecutionContext.addSecurityContext(Constants.SSH_SECURITY_CONTEXT, sshSecurityContext);
                 }
-            } catch (Exception e) {
-                e.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
+            } catch (AppCatalogException e) {
+                throw new GFacException("Error while getting SSH Submission object from app catalog", e);
             }
-            sshSecurityContext.setPbsCluster(pbsCluster);
-            jobExecutionContext.addSecurityContext(Constants.SSH_SECURITY_CONTEXT+"-"+registeredHost.getType().getHostAddress(), sshSecurityContext);
         }
     }
 
@@ -154,76 +162,75 @@ public class GFACSSHUtils {
      * @throws ApplicationSettingsException
      */
     public static void addSecurityContext(JobExecutionContext jobExecutionContext,SSHAuthWrapper sshAuth) throws GFacException, ApplicationSettingsException {
-            try {
-                if(sshAuth== null) {
-                    throw new GFacException("Error adding security Context, because sshAuthWrapper is null");
-                }
-                SSHSecurityContext sshSecurityContext = new SSHSecurityContext();
-                Cluster pbsCluster = null;
-                String key=sshAuth.getKey();
-                boolean recreate = false;
-                synchronized (clusters) {
-                    if (clusters.containsKey(key) && clusters.get(key).size() < maxClusterCount) {
-                        recreate = true;
-                    } else if (clusters.containsKey(key)) {
-                        int i = new Random().nextInt(Integer.MAX_VALUE) % maxClusterCount;
-                        if (clusters.get(key).get(i).getSession().isConnected()) {
-                            pbsCluster = clusters.get(key).get(i);
-                        } else {
-                            clusters.get(key).remove(i);
-                            recreate = true;
-                        }
-                        if (!recreate) {
-                            try {
-                                pbsCluster.listDirectory("~/"); // its hard to trust isConnected method, so we try to connect if it works we are good,else we recreate
-                            } catch (Exception e) {
-                                clusters.get(key).remove(i);
-                                logger.info("Connection found the connection map is expired, so we create from the scratch");
-                                maxClusterCount++;
-                                recreate = true; // we make the pbsCluster to create again if there is any exception druing connection
-                            }
-                        }
-                        logger.info("Re-using the same connection used with the connection string:" + key);
+        try {
+            if(sshAuth== null) {
+                throw new GFacException("Error adding security Context, because sshAuthWrapper is null");
+            }
+            SSHSecurityContext sshSecurityContext = new SSHSecurityContext();
+            Cluster pbsCluster = null;
+            String key=sshAuth.getKey();
+            boolean recreate = false;
+            synchronized (clusters) {
+                if (clusters.containsKey(key) && clusters.get(key).size() < maxClusterCount) {
+                    recreate = true;
+                } else if (clusters.containsKey(key)) {
+                    int i = new Random().nextInt(Integer.MAX_VALUE) % maxClusterCount;
+                    if (clusters.get(key).get(i).getSession().isConnected()) {
+                        pbsCluster = clusters.get(key).get(i);
                     } else {
+                        clusters.get(key).remove(i);
                         recreate = true;
                     }
-                    if (recreate) {
-                        pbsCluster = new PBSCluster(sshAuth.getServerInfo(), sshAuth.getAuthenticationInfo(),null);
-                        key = sshAuth.getKey();
-                        List<Cluster> pbsClusters = null;
-                        if (!(clusters.containsKey(key))) {
-                            pbsClusters = new ArrayList<Cluster>();
-                        } else {
-                            pbsClusters = clusters.get(key);
+                    if (!recreate) {
+                        try {
+                            pbsCluster.listDirectory("~/"); // its hard to trust isConnected method, so we try to connect if it works we are good,else we recreate
+                        } catch (Exception e) {
+                            clusters.get(key).remove(i);
+                            logger.info("Connection found the connection map is expired, so we create from the scratch");
+                            maxClusterCount++;
+                            recreate = true; // we make the pbsCluster to create again if there is any exception druing connection
                         }
-                        pbsClusters.add(pbsCluster);
-                        clusters.put(key, pbsClusters);
                     }
+                    logger.info("Re-using the same connection used with the connection string:" + key);
+                } else {
+                    recreate = true;
+                }
+                if (recreate) {
+                    pbsCluster = new PBSCluster(sshAuth.getServerInfo(), sshAuth.getAuthenticationInfo(),null);
+                    key = sshAuth.getKey();
+                    List<Cluster> pbsClusters = null;
+                    if (!(clusters.containsKey(key))) {
+                        pbsClusters = new ArrayList<Cluster>();
+                    } else {
+                        pbsClusters = clusters.get(key);
+                    }
+                    pbsClusters.add(pbsCluster);
+                    clusters.put(key, pbsClusters);
                 }
-                sshSecurityContext.setPbsCluster(pbsCluster);
-                jobExecutionContext.addSecurityContext(Constants.SSH_SECURITY_CONTEXT+key, sshSecurityContext);
-            } catch (Exception e) {
-                e.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
             }
+            sshSecurityContext.setPbsCluster(pbsCluster);
+            jobExecutionContext.addSecurityContext(Constants.SSH_SECURITY_CONTEXT+key, sshSecurityContext);
+        } catch (Exception e) {
+            e.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
+        }
     }
 
-    public static JobDescriptor createJobDescriptor(JobExecutionContext jobExecutionContext,
-                                                    ApplicationDeploymentDescriptionType app, Cluster cluster) {
+
+    public static JobDescriptor createJobDescriptor(JobExecutionContext jobExecutionContext, Cluster cluster) {
         JobDescriptor jobDescriptor = new JobDescriptor();
         // this is common for any application descriptor
         jobDescriptor.setCallBackIp(ServerSettings.getIp());
         jobDescriptor.setCallBackPort(ServerSettings.getSetting(org.apache.airavata.common.utils.Constants.GFAC_SERVER_PORT, "8950"));
-        jobDescriptor.setInputDirectory(app.getInputDataDirectory());
-        jobDescriptor.setOutputDirectory(app.getOutputDataDirectory());
-        jobDescriptor.setExecutablePath(app.getExecutableLocation());
-        jobDescriptor.setStandardOutFile(app.getStandardOutput());
-        jobDescriptor.setStandardErrorFile(app.getStandardError());
+        jobDescriptor.setInputDirectory(jobExecutionContext.getInputDir());
+        jobDescriptor.setOutputDirectory(jobExecutionContext.getOutputDir());
+        jobDescriptor.setExecutablePath(jobExecutionContext.getApplicationContext()
+                .getApplicationDeploymentDescription().getExecutablePath());
+        jobDescriptor.setStandardOutFile(jobExecutionContext.getStandardOutput());
+        jobDescriptor.setStandardErrorFile(jobExecutionContext.getStandardError());
         Random random = new Random();
         int i = random.nextInt(Integer.MAX_VALUE);
         jobDescriptor.setJobName(String.valueOf(i + 99999999));
-        jobDescriptor.setWorkingDirectory(app.getStaticWorkingDirectory());
-
-
+        jobDescriptor.setWorkingDirectory(jobExecutionContext.getWorkingDir());
         List<String> inputValues = new ArrayList<String>();
         MessageContext input = jobExecutionContext.getInMessageContext();
         Map<String, Object> inputs = input.getParameters();
@@ -249,51 +256,6 @@ public class GFACSSHUtils {
         }
         jobDescriptor.setInputValues(inputValues);
 
-        // this part will fill out the hpcApplicationDescriptor
-        if (app instanceof HpcApplicationDeploymentType) {
-            HpcApplicationDeploymentType applicationDeploymentType
-                    = (HpcApplicationDeploymentType) app;
-            jobDescriptor.setUserName(((GSISSHAbstractCluster) cluster).getServerInfo().getUserName());
-            jobDescriptor.setShellName("/bin/bash");
-            jobDescriptor.setAllEnvExport(true);
-            jobDescriptor.setMailOptions("n");
-            jobDescriptor.setNodes(applicationDeploymentType.getNodeCount());
-            jobDescriptor.setProcessesPerNode(applicationDeploymentType.getProcessorsPerNode());
-            jobDescriptor.setMaxWallTime(String.valueOf(applicationDeploymentType.getMaxWallTime()));
-            jobDescriptor.setJobSubmitter(applicationDeploymentType.getJobSubmitterCommand());
-            jobDescriptor.setCPUCount(applicationDeploymentType.getCpuCount());
-            if (applicationDeploymentType.getProjectAccount() != null) {
-                if (applicationDeploymentType.getProjectAccount().getProjectAccountNumber() != null) {
-                    jobDescriptor.setAcountString(applicationDeploymentType.getProjectAccount().getProjectAccountNumber());
-                }
-            }
-            if (applicationDeploymentType.getQueue() != null) {
-                if (applicationDeploymentType.getQueue().getQueueName() != null) {
-                    jobDescriptor.setQueueName(applicationDeploymentType.getQueue().getQueueName());
-                }
-            }
-            jobDescriptor.setOwner(((PBSCluster) cluster).getServerInfo().getUserName());
-            TaskDetails taskData = jobExecutionContext.getTaskData();
-            if (taskData != null && taskData.isSetTaskScheduling()) {
-                ComputationalResourceScheduling computionnalResource = taskData.getTaskScheduling();
-                if (computionnalResource.getNodeCount() > 0) {
-                    jobDescriptor.setNodes(computionnalResource.getNodeCount());
-                }
-                if (computionnalResource.getComputationalProjectAccount() != null) {
-                    jobDescriptor.setAcountString(computionnalResource.getComputationalProjectAccount());
-                }
-                if (computionnalResource.getQueueName() != null) {
-                    jobDescriptor.setQueueName(computionnalResource.getQueueName());
-                }
-                if (computionnalResource.getTotalCPUCount() > 0) {
-                    jobDescriptor.setProcessesPerNode(computionnalResource.getTotalCPUCount());
-                }
-                if (computionnalResource.getWallTimeLimit() > 0) {
-                    jobDescriptor.setMaxWallTime(String.valueOf(computionnalResource.getWallTimeLimit()));
-                }
-            }
-
-        }
         return jobDescriptor;
     }
 

http://git-wip-us.apache.org/repos/asf/airavata/blob/d94e8c95/modules/gfac/gfac-ssh/src/test/java/org/apache/airavata/core/gfac/services/impl/BigRed2TestWithSSHAuth.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ssh/src/test/java/org/apache/airavata/core/gfac/services/impl/BigRed2TestWithSSHAuth.java b/modules/gfac/gfac-ssh/src/test/java/org/apache/airavata/core/gfac/services/impl/BigRed2TestWithSSHAuth.java
index e84848c..c65f386 100644
--- a/modules/gfac/gfac-ssh/src/test/java/org/apache/airavata/core/gfac/services/impl/BigRed2TestWithSSHAuth.java
+++ b/modules/gfac/gfac-ssh/src/test/java/org/apache/airavata/core/gfac/services/impl/BigRed2TestWithSSHAuth.java
@@ -1,252 +1,252 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.core.gfac.services.impl;
-
-import org.apache.airavata.commons.gfac.type.ActualParameter;
-import org.apache.airavata.commons.gfac.type.ApplicationDescription;
-import org.apache.airavata.commons.gfac.type.HostDescription;
-import org.apache.airavata.commons.gfac.type.ServiceDescription;
-import org.apache.airavata.gfac.GFacConfiguration;
-import org.apache.airavata.gfac.GFacException;
-import org.apache.airavata.gfac.SecurityContext;
-import org.apache.airavata.gfac.core.context.ApplicationContext;
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.context.MessageContext;
-import org.apache.airavata.gfac.core.cpi.BetterGfacImpl;
-import org.apache.airavata.gfac.ssh.security.SSHSecurityContext;
-import org.apache.airavata.gsi.ssh.api.Cluster;
-import org.apache.airavata.gsi.ssh.api.SSHApiException;
-import org.apache.airavata.gsi.ssh.api.ServerInfo;
-import org.apache.airavata.gsi.ssh.api.authentication.AuthenticationInfo;
-import org.apache.airavata.gsi.ssh.api.job.JobManagerConfiguration;
-import org.apache.airavata.gsi.ssh.impl.PBSCluster;
-import org.apache.airavata.gsi.ssh.impl.authentication.DefaultPasswordAuthenticationInfo;
-import org.apache.airavata.gsi.ssh.impl.authentication.DefaultPublicKeyFileAuthentication;
-import org.apache.airavata.gsi.ssh.util.CommonUtils;
-import org.apache.airavata.model.workspace.experiment.TaskDetails;
-import org.apache.airavata.persistance.registry.jpa.impl.RegistryFactory;
-import org.apache.airavata.schemas.gfac.*;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.net.URL;
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.List;
-import java.util.UUID;
-
-public class BigRed2TestWithSSHAuth {
-    private JobExecutionContext jobExecutionContext;
-
-    private String userName;
-    private String password;
-    private String passPhrase;
-    private String hostName;
-    private String workingDirectory;
-    private String privateKeyPath;
-    private String publicKeyPath;
-
-    @BeforeClass
-    public void setUp() throws Exception {
-
-        System.out.println("Test case name " + this.getClass().getName());
-//        System.setProperty("ssh.host","bigred2.uits.iu.edu");        //default ssh host
-//        System.setProperty("ssh.user", "lginnali");
-//        System.setProperty("ssh.private.key.path", "/Users/lahirugunathilake/.ssh/id_dsa");
-//        System.setProperty("ssh.public.key.path", "/Users/lahirugunathilake/.ssh/id_dsa.pub");
-//        System.setProperty("ssh.working.directory", "/tmp");
-
-        this.hostName = "bigred2.uits.iu.edu";
-        this.hostName = System.getProperty("ssh.host");
-        this.userName = System.getProperty("ssh.username");
-        this.password = System.getProperty("ssh.password");
-        this.privateKeyPath = System.getProperty("private.ssh.key");
-        this.publicKeyPath = System.getProperty("public.ssh.key");
-        this.passPhrase = System.getProperty("ssh.keypass");
-        this.workingDirectory = System.getProperty("ssh.working.directory");
-
-
-         if (this.userName == null
-                || (this.password==null && (this.publicKeyPath == null || this.privateKeyPath == null)) || this.workingDirectory == null) {
-            System.out.println("########### In order to test you have to either username password or private,public keys");
-            System.out.println("Use -Dssh.username=xxx -Dssh.password=yyy -Dssh.keypass=zzz " +
-                    "-Dprivate.ssh.key -Dpublic.ssh.key -Dssh.working.directory ");
-        }
-        URL resource = BigRed2TestWithSSHAuth.class.getClassLoader().getResource(org.apache.airavata.common.utils.Constants.GFAC_CONFIG_XML);
-        assert resource != null;
-        System.out.println(resource.getFile());
-        GFacConfiguration gFacConfiguration = GFacConfiguration.create(new File(resource.getPath()), null);
-
-//        gFacConfiguration.setMyProxyLifeCycle(3600);
-//        gFacConfiguration.setMyProxyServer("myproxy.teragrid.org");
-//        gFacConfiguration.setMyProxyUser("*****");
-//        gFacConfiguration.setMyProxyPassphrase("*****");
-//        gFacConfiguration.setTrustedCertLocation("./certificates");
-//        //have to set InFlwo Handlers and outFlowHandlers
-//        gFacConfiguration.setInHandlers(Arrays.asList(new String[] {"org.apache.airavata.gfac.handler.GramDirectorySetupHandler","org.apache.airavata.gfac.handler.GridFTPInputHandler"}));
-//        gFacConfiguration.setOutHandlers(Arrays.asList(new String[] {"org.apache.airavata.gfac.handler.GridFTPOutputHandler"}));
-
-        /*
-        * Host
-        */
-        HostDescription host = new HostDescription(SSHHostType.type);
-        host.getType().setHostAddress(hostName);
-        host.getType().setHostName(hostName);
-        ((SSHHostType)host.getType()).setHpcResource(true);
-        /*
-        * App
-        */
-        ApplicationDescription appDesc = new ApplicationDescription(HpcApplicationDeploymentType.type);
-        HpcApplicationDeploymentType app = (HpcApplicationDeploymentType) appDesc.getType();
-        ApplicationDeploymentDescriptionType.ApplicationName name = ApplicationDeploymentDescriptionType.ApplicationName.Factory.newInstance();
-        name.setStringValue("EchoLocal");
-        app.setApplicationName(name);
-
-        app.setCpuCount(1);
-        app.setJobType(JobTypeType.SERIAL);
-        app.setNodeCount(1);
-        app.setProcessorsPerNode(1);
-
-        /*
-        * Use bat file if it is compiled on Windows
-        */
-        app.setExecutableLocation("/bin/echo");
-
-        /*
-        * Default tmp location
-        */
-        String tempDir = "/tmp";
-        String date = (new Date()).toString();
-        date = date.replaceAll(" ", "_");
-        date = date.replaceAll(":", "_");
-
-        tempDir = tempDir + File.separator
-                + "SimpleEcho" + "_" + date + "_" + UUID.randomUUID();
-
-        System.out.println(tempDir);
-        app.setScratchWorkingDirectory(tempDir);
-        app.setStaticWorkingDirectory(tempDir);
-        app.setInputDataDirectory(tempDir + File.separator + "inputData");
-        app.setOutputDataDirectory(tempDir + File.separator + "outputData");
-        app.setStandardOutput(tempDir + File.separator + app.getApplicationName().getStringValue() + ".stdout");
-        app.setStandardError(tempDir + File.separator + app.getApplicationName().getStringValue() + ".stderr");
-        app.setMaxWallTime(5);
-        app.setJobSubmitterCommand("aprun -n 1");
-        app.setInstalledParentPath("/opt/torque/torque-4.2.3.1/bin/");
-
-        /*
-        * Service
-        */
-        ServiceDescription serv = new ServiceDescription();
-        serv.getType().setName("SimpleEcho");
-
-        List<InputParameterType> inputList = new ArrayList<InputParameterType>();
-
-        InputParameterType input = InputParameterType.Factory.newInstance();
-        input.setParameterName("echo_input");
-        input.setParameterType(StringParameterType.Factory.newInstance());
-        inputList.add(input);
-
-        InputParameterType[] inputParamList = inputList.toArray(new InputParameterType[inputList
-
-                .size()]);
-        List<OutputParameterType> outputList = new ArrayList<OutputParameterType>();
-        OutputParameterType output = OutputParameterType.Factory.newInstance();
-        output.setParameterName("echo_output");
-        output.setParameterType(StringParameterType.Factory.newInstance());
-        outputList.add(output);
-
-        OutputParameterType[] outputParamList = outputList
-                .toArray(new OutputParameterType[outputList.size()]);
-
-        serv.getType().setInputParametersArray(inputParamList);
-        serv.getType().setOutputParametersArray(outputParamList);
-
-        jobExecutionContext = new JobExecutionContext(gFacConfiguration, serv.getType().getName());
-        // Adding security context
-        jobExecutionContext.addSecurityContext(SSHSecurityContext.SSH_SECURITY_CONTEXT, getSecurityContext(app));
-        ApplicationContext applicationContext = new ApplicationContext();
-        jobExecutionContext.setApplicationContext(applicationContext);
-        applicationContext.setServiceDescription(serv);
-        applicationContext.setApplicationDeploymentDescription(appDesc);
-        applicationContext.setHostDescription(host);
-
-        MessageContext inMessage = new MessageContext();
-        ActualParameter echo_input = new ActualParameter();
-        ((StringParameterType) echo_input.getType()).setValue("echo_output=hello");
-        inMessage.addParameter("echo_input", echo_input);
-
-
-        jobExecutionContext.setInMessageContext(inMessage);
-
-        MessageContext outMessage = new MessageContext();
-        ActualParameter echo_out = new ActualParameter();
-//		((StringParameterType)echo_input.getType()).setValue("echo_output=hello");
-        outMessage.addParameter("echo_output", echo_out);
-        jobExecutionContext.setRegistry(RegistryFactory.getLoggingRegistry());
-        jobExecutionContext.setTaskData(new TaskDetails("11323"));
-        jobExecutionContext.setOutMessageContext(outMessage);
-
-    }
-
-
-    private SecurityContext getSecurityContext(HpcApplicationDeploymentType app) {
-         try {
-
-        AuthenticationInfo authenticationInfo = null;
-        if (password != null) {
-            authenticationInfo = new DefaultPasswordAuthenticationInfo(this.password);
-        } else {
-            authenticationInfo = new DefaultPublicKeyFileAuthentication(this.publicKeyPath, this.privateKeyPath,
-                    this.passPhrase);
-        }
-        // Server info
-        ServerInfo serverInfo = new ServerInfo(this.userName, this.hostName);
-
-        Cluster pbsCluster = null;
-        SSHSecurityContext sshSecurityContext = null;
-
-            JobManagerConfiguration pbsJobManager = CommonUtils.getPBSJobManager(app.getInstalledParentPath());
-            pbsCluster = new PBSCluster(serverInfo, authenticationInfo, pbsJobManager);
-
-
-            sshSecurityContext = new SSHSecurityContext();
-            sshSecurityContext.setPbsCluster(pbsCluster);
-            sshSecurityContext.setUsername(userName);
-            sshSecurityContext.setKeyPass(passPhrase);
-            sshSecurityContext.setPrivateKeyLoc(privateKeyPath);
-             return sshSecurityContext;
-        } catch (SSHApiException e) {
-            e.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
-        }
-        return null;
-    }
-
-    @Test
-    public void testSSHProvider() throws GFacException {
-        BetterGfacImpl gFacAPI = new BetterGfacImpl();
-        gFacAPI.submitJob(jobExecutionContext.getExperimentID(), jobExecutionContext.getTaskData().getTaskID(), jobExecutionContext.getGatewayID());
-        org.junit.Assert.assertNotNull(jobExecutionContext.getJobDetails().getJobDescription());
-        org.junit.Assert.assertNotNull(jobExecutionContext.getJobDetails().getJobID());
-    }
-
-}
+///*
+// *
+// * Licensed to the Apache Software Foundation (ASF) under one
+// * or more contributor license agreements.  See the NOTICE file
+// * distributed with this work for additional information
+// * regarding copyright ownership.  The ASF licenses this file
+// * to you under the Apache License, Version 2.0 (the
+// * "License"); you may not use this file except in compliance
+// * with the License.  You may obtain a copy of the License at
+// *
+// *   http://www.apache.org/licenses/LICENSE-2.0
+// *
+// * Unless required by applicable law or agreed to in writing,
+// * software distributed under the License is distributed on an
+// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// * KIND, either express or implied.  See the License for the
+// * specific language governing permissions and limitations
+// * under the License.
+// *
+//*/
+//package org.apache.airavata.core.gfac.services.impl;
+//
+//import org.apache.airavata.commons.gfac.type.ActualParameter;
+//import org.apache.airavata.commons.gfac.type.ApplicationDescription;
+//import org.apache.airavata.commons.gfac.type.HostDescription;
+//import org.apache.airavata.commons.gfac.type.ServiceDescription;
+//import org.apache.airavata.gfac.GFacConfiguration;
+//import org.apache.airavata.gfac.GFacException;
+//import org.apache.airavata.gfac.SecurityContext;
+//import org.apache.airavata.gfac.core.context.ApplicationContext;
+//import org.apache.airavata.gfac.core.context.JobExecutionContext;
+//import org.apache.airavata.gfac.core.context.MessageContext;
+//import org.apache.airavata.gfac.core.cpi.BetterGfacImpl;
+//import org.apache.airavata.gfac.ssh.security.SSHSecurityContext;
+//import org.apache.airavata.gsi.ssh.api.Cluster;
+//import org.apache.airavata.gsi.ssh.api.SSHApiException;
+//import org.apache.airavata.gsi.ssh.api.ServerInfo;
+//import org.apache.airavata.gsi.ssh.api.authentication.AuthenticationInfo;
+//import org.apache.airavata.gsi.ssh.api.job.JobManagerConfiguration;
+//import org.apache.airavata.gsi.ssh.impl.PBSCluster;
+//import org.apache.airavata.gsi.ssh.impl.authentication.DefaultPasswordAuthenticationInfo;
+//import org.apache.airavata.gsi.ssh.impl.authentication.DefaultPublicKeyFileAuthentication;
+//import org.apache.airavata.gsi.ssh.util.CommonUtils;
+//import org.apache.airavata.model.workspace.experiment.TaskDetails;
+//import org.apache.airavata.persistance.registry.jpa.impl.RegistryFactory;
+//import org.apache.airavata.schemas.gfac.*;
+//import org.testng.annotations.BeforeClass;
+//import org.testng.annotations.Test;
+//
+//import java.io.File;
+//import java.net.URL;
+//import java.util.ArrayList;
+//import java.util.Date;
+//import java.util.List;
+//import java.util.UUID;
+//
+//public class BigRed2TestWithSSHAuth {
+//    private JobExecutionContext jobExecutionContext;
+//
+//    private String userName;
+//    private String password;
+//    private String passPhrase;
+//    private String hostName;
+//    private String workingDirectory;
+//    private String privateKeyPath;
+//    private String publicKeyPath;
+//
+//    @BeforeClass
+//    public void setUp() throws Exception {
+//
+//        System.out.println("Test case name " + this.getClass().getName());
+////        System.setProperty("ssh.host","bigred2.uits.iu.edu");        //default ssh host
+////        System.setProperty("ssh.user", "lginnali");
+////        System.setProperty("ssh.private.key.path", "/Users/lahirugunathilake/.ssh/id_dsa");
+////        System.setProperty("ssh.public.key.path", "/Users/lahirugunathilake/.ssh/id_dsa.pub");
+////        System.setProperty("ssh.working.directory", "/tmp");
+//
+//        this.hostName = "bigred2.uits.iu.edu";
+//        this.hostName = System.getProperty("ssh.host");
+//        this.userName = System.getProperty("ssh.username");
+//        this.password = System.getProperty("ssh.password");
+//        this.privateKeyPath = System.getProperty("private.ssh.key");
+//        this.publicKeyPath = System.getProperty("public.ssh.key");
+//        this.passPhrase = System.getProperty("ssh.keypass");
+//        this.workingDirectory = System.getProperty("ssh.working.directory");
+//
+//
+//         if (this.userName == null
+//                || (this.password==null && (this.publicKeyPath == null || this.privateKeyPath == null)) || this.workingDirectory == null) {
+//            System.out.println("########### In order to test you have to either username password or private,public keys");
+//            System.out.println("Use -Dssh.username=xxx -Dssh.password=yyy -Dssh.keypass=zzz " +
+//                    "-Dprivate.ssh.key -Dpublic.ssh.key -Dssh.working.directory ");
+//        }
+//        URL resource = BigRed2TestWithSSHAuth.class.getClassLoader().getResource(org.apache.airavata.common.utils.Constants.GFAC_CONFIG_XML);
+//        assert resource != null;
+//        System.out.println(resource.getFile());
+//        GFacConfiguration gFacConfiguration = GFacConfiguration.create(new File(resource.getPath()), null);
+//
+////        gFacConfiguration.setMyProxyLifeCycle(3600);
+////        gFacConfiguration.setMyProxyServer("myproxy.teragrid.org");
+////        gFacConfiguration.setMyProxyUser("*****");
+////        gFacConfiguration.setMyProxyPassphrase("*****");
+////        gFacConfiguration.setTrustedCertLocation("./certificates");
+////        //have to set InFlwo Handlers and outFlowHandlers
+////        gFacConfiguration.setInHandlers(Arrays.asList(new String[] {"org.apache.airavata.gfac.handler.GramDirectorySetupHandler","org.apache.airavata.gfac.handler.GridFTPInputHandler"}));
+////        gFacConfiguration.setOutHandlers(Arrays.asList(new String[] {"org.apache.airavata.gfac.handler.GridFTPOutputHandler"}));
+//
+//        /*
+//        * Host
+//        */
+//        HostDescription host = new HostDescription(SSHHostType.type);
+//        host.getType().setHostAddress(hostName);
+//        host.getType().setHostName(hostName);
+//        ((SSHHostType)host.getType()).setHpcResource(true);
+//        /*
+//        * App
+//        */
+//        ApplicationDescription appDesc = new ApplicationDescription(HpcApplicationDeploymentType.type);
+//        HpcApplicationDeploymentType app = (HpcApplicationDeploymentType) appDesc.getType();
+//        ApplicationDeploymentDescriptionType.ApplicationName name = ApplicationDeploymentDescriptionType.ApplicationName.Factory.newInstance();
+//        name.setStringValue("EchoLocal");
+//        app.setApplicationName(name);
+//
+//        app.setCpuCount(1);
+//        app.setJobType(JobTypeType.SERIAL);
+//        app.setNodeCount(1);
+//        app.setProcessorsPerNode(1);
+//
+//        /*
+//        * Use bat file if it is compiled on Windows
+//        */
+//        app.setExecutableLocation("/bin/echo");
+//
+//        /*
+//        * Default tmp location
+//        */
+//        String tempDir = "/tmp";
+//        String date = (new Date()).toString();
+//        date = date.replaceAll(" ", "_");
+//        date = date.replaceAll(":", "_");
+//
+//        tempDir = tempDir + File.separator
+//                + "SimpleEcho" + "_" + date + "_" + UUID.randomUUID();
+//
+//        System.out.println(tempDir);
+//        app.setScratchWorkingDirectory(tempDir);
+//        app.setStaticWorkingDirectory(tempDir);
+//        app.setInputDataDirectory(tempDir + File.separator + "inputData");
+//        app.setOutputDataDirectory(tempDir + File.separator + "outputData");
+//        app.setStandardOutput(tempDir + File.separator + app.getApplicationName().getStringValue() + ".stdout");
+//        app.setStandardError(tempDir + File.separator + app.getApplicationName().getStringValue() + ".stderr");
+//        app.setMaxWallTime(5);
+//        app.setJobSubmitterCommand("aprun -n 1");
+//        app.setInstalledParentPath("/opt/torque/torque-4.2.3.1/bin/");
+//
+//        /*
+//        * Service
+//        */
+//        ServiceDescription serv = new ServiceDescription();
+//        serv.getType().setName("SimpleEcho");
+//
+//        List<InputParameterType> inputList = new ArrayList<InputParameterType>();
+//
+//        InputParameterType input = InputParameterType.Factory.newInstance();
+//        input.setParameterName("echo_input");
+//        input.setParameterType(StringParameterType.Factory.newInstance());
+//        inputList.add(input);
+//
+//        InputParameterType[] inputParamList = inputList.toArray(new InputParameterType[inputList
+//
+//                .size()]);
+//        List<OutputParameterType> outputList = new ArrayList<OutputParameterType>();
+//        OutputParameterType output = OutputParameterType.Factory.newInstance();
+//        output.setParameterName("echo_output");
+//        output.setParameterType(StringParameterType.Factory.newInstance());
+//        outputList.add(output);
+//
+//        OutputParameterType[] outputParamList = outputList
+//                .toArray(new OutputParameterType[outputList.size()]);
+//
+//        serv.getType().setInputParametersArray(inputParamList);
+//        serv.getType().setOutputParametersArray(outputParamList);
+//
+//        jobExecutionContext = new JobExecutionContext(gFacConfiguration, serv.getType().getName());
+//        // Adding security context
+//        jobExecutionContext.addSecurityContext(SSHSecurityContext.SSH_SECURITY_CONTEXT, getSecurityContext(app));
+//        ApplicationContext applicationContext = new ApplicationContext();
+//        jobExecutionContext.setApplicationContext(applicationContext);
+//        applicationContext.setServiceDescription(serv);
+//        applicationContext.setApplicationDeploymentDescription(appDesc);
+//        applicationContext.setHostDescription(host);
+//
+//        MessageContext inMessage = new MessageContext();
+//        ActualParameter echo_input = new ActualParameter();
+//        ((StringParameterType) echo_input.getType()).setValue("echo_output=hello");
+//        inMessage.addParameter("echo_input", echo_input);
+//
+//
+//        jobExecutionContext.setInMessageContext(inMessage);
+//
+//        MessageContext outMessage = new MessageContext();
+//        ActualParameter echo_out = new ActualParameter();
+////		((StringParameterType)echo_input.getType()).setValue("echo_output=hello");
+//        outMessage.addParameter("echo_output", echo_out);
+//        jobExecutionContext.setRegistry(RegistryFactory.getLoggingRegistry());
+//        jobExecutionContext.setTaskData(new TaskDetails("11323"));
+//        jobExecutionContext.setOutMessageContext(outMessage);
+//
+//    }
+//
+//
+//    private SecurityContext getSecurityContext(HpcApplicationDeploymentType app) {
+//         try {
+//
+//        AuthenticationInfo authenticationInfo = null;
+//        if (password != null) {
+//            authenticationInfo = new DefaultPasswordAuthenticationInfo(this.password);
+//        } else {
+//            authenticationInfo = new DefaultPublicKeyFileAuthentication(this.publicKeyPath, this.privateKeyPath,
+//                    this.passPhrase);
+//        }
+//        // Server info
+//        ServerInfo serverInfo = new ServerInfo(this.userName, this.hostName);
+//
+//        Cluster pbsCluster = null;
+//        SSHSecurityContext sshSecurityContext = null;
+//
+//            JobManagerConfiguration pbsJobManager = CommonUtils.getPBSJobManager(app.getInstalledParentPath());
+//            pbsCluster = new PBSCluster(serverInfo, authenticationInfo, pbsJobManager);
+//
+//
+//            sshSecurityContext = new SSHSecurityContext();
+//            sshSecurityContext.setPbsCluster(pbsCluster);
+//            sshSecurityContext.setUsername(userName);
+//            sshSecurityContext.setKeyPass(passPhrase);
+//            sshSecurityContext.setPrivateKeyLoc(privateKeyPath);
+//             return sshSecurityContext;
+//        } catch (SSHApiException e) {
+//            e.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
+//        }
+//        return null;
+//    }
+//
+//    @Test
+//    public void testSSHProvider() throws GFacException {
+//        BetterGfacImpl gFacAPI = new BetterGfacImpl();
+//        gFacAPI.submitJob(jobExecutionContext.getExperimentID(), jobExecutionContext.getTaskData().getTaskID(), jobExecutionContext.getGatewayID());
+//        org.junit.Assert.assertNotNull(jobExecutionContext.getJobDetails().getJobDescription());
+//        org.junit.Assert.assertNotNull(jobExecutionContext.getJobDetails().getJobID());
+//    }
+//
+//}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d94e8c95/modules/gfac/gfac-ssh/src/test/java/org/apache/airavata/core/gfac/services/impl/SSHProviderTestWithSSHAuth.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ssh/src/test/java/org/apache/airavata/core/gfac/services/impl/SSHProviderTestWithSSHAuth.java b/modules/gfac/gfac-ssh/src/test/java/org/apache/airavata/core/gfac/services/impl/SSHProviderTestWithSSHAuth.java
index 5cb1200..b115b6c 100644
--- a/modules/gfac/gfac-ssh/src/test/java/org/apache/airavata/core/gfac/services/impl/SSHProviderTestWithSSHAuth.java
+++ b/modules/gfac/gfac-ssh/src/test/java/org/apache/airavata/core/gfac/services/impl/SSHProviderTestWithSSHAuth.java
@@ -1,172 +1,172 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.core.gfac.services.impl;
-
-import java.io.File;
-import java.net.URL;
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.List;
-import java.util.UUID;
-
-import org.apache.airavata.commons.gfac.type.ActualParameter;
-import org.apache.airavata.commons.gfac.type.ApplicationDescription;
-import org.apache.airavata.commons.gfac.type.HostDescription;
-import org.apache.airavata.commons.gfac.type.MappingFactory;
-import org.apache.airavata.commons.gfac.type.ServiceDescription;
-import org.apache.airavata.gfac.GFacConfiguration;
-import org.apache.airavata.gfac.GFacException;
-import org.apache.airavata.gfac.core.context.ApplicationContext;
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.context.MessageContext;
-import org.apache.airavata.gfac.core.cpi.BetterGfacImpl;
-import org.apache.airavata.gfac.ssh.security.SSHSecurityContext;
-import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
-import org.apache.airavata.schemas.gfac.InputParameterType;
-import org.apache.airavata.schemas.gfac.OutputParameterType;
-import org.apache.airavata.schemas.gfac.SSHHostType;
-import org.apache.airavata.schemas.gfac.StringParameterType;
-import org.apache.commons.lang.SystemUtils;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-
-public class SSHProviderTestWithSSHAuth {
-	private JobExecutionContext jobExecutionContext;
-    @Before
-    public void setUp() throws Exception {
-
-    	URL resource = SSHProviderTestWithSSHAuth.class.getClassLoader().getResource(org.apache.airavata.common.utils.Constants.GFAC_CONFIG_XML);
-        GFacConfiguration gFacConfiguration = GFacConfiguration.create(new File(resource.getPath()),null);
-//        gFacConfiguration.s
-        //have to set InFlwo Handlers and outFlowHandlers
-        ApplicationContext applicationContext = new ApplicationContext();
-        HostDescription host = new HostDescription(SSHHostType.type);
-        host.getType().setHostName("bigred");
-        host.getType().setHostAddress("bigred2.uits.iu.edu");
-        applicationContext.setHostDescription(host);
-        /*
-           * App
-           */
-        ApplicationDescription appDesc = new ApplicationDescription();
-        ApplicationDeploymentDescriptionType app = appDesc.getType();
-        ApplicationDeploymentDescriptionType.ApplicationName name = ApplicationDeploymentDescriptionType.ApplicationName.Factory.newInstance();
-        name.setStringValue("EchoSSH");
-        app.setApplicationName(name);
-
-        /*
-           * Use bat file if it is compiled on Windows
-           */
-        if (SystemUtils.IS_OS_WINDOWS) {
-            URL url = this.getClass().getClassLoader().getResource("echo.bat");
-            app.setExecutableLocation(url.getFile());
-        } else {
-            //for unix and Mac
-            app.setExecutableLocation("/bin/echo");
-        }
-
-        /*
-         * Job location
-        */
-        String tempDir = "/tmp";
-        String date = (new Date()).toString();
-        date = date.replaceAll(" ", "_");
-        date = date.replaceAll(":", "_");
-
-        tempDir = tempDir + File.separator
-                + "EchoSSH" + "_" + date + "_" + UUID.randomUUID();
-
-        app.setScratchWorkingDirectory(tempDir);
-        app.setStaticWorkingDirectory(tempDir);
-        app.setInputDataDirectory(tempDir + File.separator + "input");
-        app.setOutputDataDirectory(tempDir + File.separator + "output");
-        app.setStandardOutput(tempDir + File.separator + "echo.stdout");
-        app.setStandardError(tempDir + File.separator + "echo.stderr");
-
-        applicationContext.setApplicationDeploymentDescription(appDesc);
-
-        /*
-           * Service
-           */
-        ServiceDescription serv = new ServiceDescription();
-        serv.getType().setName("EchoSSH");
-
-        List<InputParameterType> inputList = new ArrayList<InputParameterType>();
-        InputParameterType input = InputParameterType.Factory.newInstance();
-        input.setParameterName("echo_input");
-        input.setParameterType(StringParameterType.Factory.newInstance());
-        inputList.add(input);
-        InputParameterType[] inputParamList = inputList.toArray(new InputParameterType[inputList
-                .size()]);
-
-        List<OutputParameterType> outputList = new ArrayList<OutputParameterType>();
-        OutputParameterType output = OutputParameterType.Factory.newInstance();
-        output.setParameterName("echo_output");
-        output.setParameterType(StringParameterType.Factory.newInstance());
-        outputList.add(output);
-        OutputParameterType[] outputParamList = outputList
-                .toArray(new OutputParameterType[outputList.size()]);
-
-        serv.getType().setInputParametersArray(inputParamList);
-        serv.getType().setOutputParametersArray(outputParamList);
-
-        jobExecutionContext = new JobExecutionContext(gFacConfiguration,serv.getType().getName());
-        jobExecutionContext.setApplicationContext(applicationContext);
-
-        // Add security context
-        jobExecutionContext.addSecurityContext(SSHSecurityContext.SSH_SECURITY_CONTEXT, getSecurityContext());
-        /*
-        * Host
-        */
-        applicationContext.setServiceDescription(serv);
-
-        MessageContext inMessage = new MessageContext();
-        ActualParameter echo_input = new ActualParameter();
-		((StringParameterType)echo_input.getType()).setValue("echo_output=hello");
-        inMessage.addParameter("echo_input", echo_input);
-
-        jobExecutionContext.setInMessageContext(inMessage);
-
-        MessageContext outMessage = new MessageContext();
-        ActualParameter echo_out = new ActualParameter();
+///*
+// *
+// * Licensed to the Apache Software Foundation (ASF) under one
+// * or more contributor license agreements.  See the NOTICE file
+// * distributed with this work for additional information
+// * regarding copyright ownership.  The ASF licenses this file
+// * to you under the Apache License, Version 2.0 (the
+// * "License"); you may not use this file except in compliance
+// * with the License.  You may obtain a copy of the License at
+// *
+// *   http://www.apache.org/licenses/LICENSE-2.0
+// *
+// * Unless required by applicable law or agreed to in writing,
+// * software distributed under the License is distributed on an
+// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// * KIND, either express or implied.  See the License for the
+// * specific language governing permissions and limitations
+// * under the License.
+// *
+//*/
+//package org.apache.airavata.core.gfac.services.impl;
+//
+//import java.io.File;
+//import java.net.URL;
+//import java.util.ArrayList;
+//import java.util.Date;
+//import java.util.List;
+//import java.util.UUID;
+//
+//import org.apache.airavata.commons.gfac.type.ActualParameter;
+//import org.apache.airavata.commons.gfac.type.ApplicationDescription;
+//import org.apache.airavata.commons.gfac.type.HostDescription;
+//import org.apache.airavata.commons.gfac.type.MappingFactory;
+//import org.apache.airavata.commons.gfac.type.ServiceDescription;
+//import org.apache.airavata.gfac.GFacConfiguration;
+//import org.apache.airavata.gfac.GFacException;
+//import org.apache.airavata.gfac.core.context.ApplicationContext;
+//import org.apache.airavata.gfac.core.context.JobExecutionContext;
+//import org.apache.airavata.gfac.core.context.MessageContext;
+//import org.apache.airavata.gfac.core.cpi.BetterGfacImpl;
+//import org.apache.airavata.gfac.ssh.security.SSHSecurityContext;
+//import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
+//import org.apache.airavata.schemas.gfac.InputParameterType;
+//import org.apache.airavata.schemas.gfac.OutputParameterType;
+//import org.apache.airavata.schemas.gfac.SSHHostType;
+//import org.apache.airavata.schemas.gfac.StringParameterType;
+//import org.apache.commons.lang.SystemUtils;
+//import org.junit.Assert;
+//import org.junit.Before;
+//import org.junit.Test;
+//
+//public class SSHProviderTestWithSSHAuth {
+//	private JobExecutionContext jobExecutionContext;
+//    @Before
+//    public void setUp() throws Exception {
+//
+//    	URL resource = SSHProviderTestWithSSHAuth.class.getClassLoader().getResource(org.apache.airavata.common.utils.Constants.GFAC_CONFIG_XML);
+//        GFacConfiguration gFacConfiguration = GFacConfiguration.create(new File(resource.getPath()),null);
+////        gFacConfiguration.s
+//        //have to set InFlwo Handlers and outFlowHandlers
+//        ApplicationContext applicationContext = new ApplicationContext();
+//        HostDescription host = new HostDescription(SSHHostType.type);
+//        host.getType().setHostName("bigred");
+//        host.getType().setHostAddress("bigred2.uits.iu.edu");
+//        applicationContext.setHostDescription(host);
+//        /*
+//           * App
+//           */
+//        ApplicationDescription appDesc = new ApplicationDescription();
+//        ApplicationDeploymentDescriptionType app = appDesc.getType();
+//        ApplicationDeploymentDescriptionType.ApplicationName name = ApplicationDeploymentDescriptionType.ApplicationName.Factory.newInstance();
+//        name.setStringValue("EchoSSH");
+//        app.setApplicationName(name);
+//
+//        /*
+//           * Use bat file if it is compiled on Windows
+//           */
+//        if (SystemUtils.IS_OS_WINDOWS) {
+//            URL url = this.getClass().getClassLoader().getResource("echo.bat");
+//            app.setExecutableLocation(url.getFile());
+//        } else {
+//            //for unix and Mac
+//            app.setExecutableLocation("/bin/echo");
+//        }
+//
+//        /*
+//         * Job location
+//        */
+//        String tempDir = "/tmp";
+//        String date = (new Date()).toString();
+//        date = date.replaceAll(" ", "_");
+//        date = date.replaceAll(":", "_");
+//
+//        tempDir = tempDir + File.separator
+//                + "EchoSSH" + "_" + date + "_" + UUID.randomUUID();
+//
+//        app.setScratchWorkingDirectory(tempDir);
+//        app.setStaticWorkingDirectory(tempDir);
+//        app.setInputDataDirectory(tempDir + File.separator + "input");
+//        app.setOutputDataDirectory(tempDir + File.separator + "output");
+//        app.setStandardOutput(tempDir + File.separator + "echo.stdout");
+//        app.setStandardError(tempDir + File.separator + "echo.stderr");
+//
+//        applicationContext.setApplicationDeploymentDescription(appDesc);
+//
+//        /*
+//           * Service
+//           */
+//        ServiceDescription serv = new ServiceDescription();
+//        serv.getType().setName("EchoSSH");
+//
+//        List<InputParameterType> inputList = new ArrayList<InputParameterType>();
+//        InputParameterType input = InputParameterType.Factory.newInstance();
+//        input.setParameterName("echo_input");
+//        input.setParameterType(StringParameterType.Factory.newInstance());
+//        inputList.add(input);
+//        InputParameterType[] inputParamList = inputList.toArray(new InputParameterType[inputList
+//                .size()]);
+//
+//        List<OutputParameterType> outputList = new ArrayList<OutputParameterType>();
+//        OutputParameterType output = OutputParameterType.Factory.newInstance();
+//        output.setParameterName("echo_output");
+//        output.setParameterType(StringParameterType.Factory.newInstance());
+//        outputList.add(output);
+//        OutputParameterType[] outputParamList = outputList
+//                .toArray(new OutputParameterType[outputList.size()]);
+//
+//        serv.getType().setInputParametersArray(inputParamList);
+//        serv.getType().setOutputParametersArray(outputParamList);
+//
+//        jobExecutionContext = new JobExecutionContext(gFacConfiguration,serv.getType().getName());
+//        jobExecutionContext.setApplicationContext(applicationContext);
+//
+//        // Add security context
+//        jobExecutionContext.addSecurityContext(SSHSecurityContext.SSH_SECURITY_CONTEXT, getSecurityContext());
+//        /*
+//        * Host
+//        */
+//        applicationContext.setServiceDescription(serv);
+//
+//        MessageContext inMessage = new MessageContext();
+//        ActualParameter echo_input = new ActualParameter();
 //		((StringParameterType)echo_input.getType()).setValue("echo_output=hello");
-        outMessage.addParameter("echo_output", echo_out);
-
-        jobExecutionContext.setOutMessageContext(outMessage);
-
-    }
-
-	private SSHSecurityContext getSecurityContext() {
-		SSHSecurityContext context = new SSHSecurityContext();
-        context.setUsername("lginnali");
-        context.setPrivateKeyLoc("~/.ssh/id_dsa");
-        context.setKeyPass("i want to be free");
-		return context;
-	}
-
-    @Test
-    public void testLocalProvider() throws GFacException {
-        BetterGfacImpl gFacAPI = new BetterGfacImpl();
-        gFacAPI.submitJob(jobExecutionContext.getExperimentID(), jobExecutionContext.getTaskData().getTaskID(), jobExecutionContext.getGatewayID());
-        MessageContext outMessageContext = jobExecutionContext.getOutMessageContext();
-        Assert.assertEquals(MappingFactory.toString((ActualParameter)outMessageContext.getParameter("echo_output")), "hello");
-    }
-}
+//        inMessage.addParameter("echo_input", echo_input);
+//
+//        jobExecutionContext.setInMessageContext(inMessage);
+//
+//        MessageContext outMessage = new MessageContext();
+//        ActualParameter echo_out = new ActualParameter();
+////		((StringParameterType)echo_input.getType()).setValue("echo_output=hello");
+//        outMessage.addParameter("echo_output", echo_out);
+//
+//        jobExecutionContext.setOutMessageContext(outMessage);
+//
+//    }
+//
+//	private SSHSecurityContext getSecurityContext() {
+//		SSHSecurityContext context = new SSHSecurityContext();
+//        context.setUsername("lginnali");
+//        context.setPrivateKeyLoc("~/.ssh/id_dsa");
+//        context.setKeyPass("i want to be free");
+//		return context;
+//	}
+//
+//    @Test
+//    public void testLocalProvider() throws GFacException {
+//        BetterGfacImpl gFacAPI = new BetterGfacImpl();
+//        gFacAPI.submitJob(jobExecutionContext.getExperimentID(), jobExecutionContext.getTaskData().getTaskID(), jobExecutionContext.getGatewayID());
+//        MessageContext outMessageContext = jobExecutionContext.getOutMessageContext();
+//        Assert.assertEquals(MappingFactory.toString((ActualParameter)outMessageContext.getParameter("echo_output")), "hello");
+//    }
+//}


[07/50] [abbrv] airavata git commit: Integrated appCatalog for ssh and gsi modules, commented out old test classes, need to fix this

Posted by ch...@apache.org.
Integrated appCatalog for ssh and gsi modules, commented out old test classes, need to fix this


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/2dd94e63
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/2dd94e63
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/2dd94e63

Branch: refs/heads/master
Commit: 2dd94e633c1b2ceb17211f28aed9b044d9f7353a
Parents: bb4fe12
Author: shamrath <sh...@gmail.com>
Authored: Tue Nov 4 12:32:09 2014 -0500
Committer: shamrath <sh...@gmail.com>
Committed: Tue Nov 4 12:32:09 2014 -0500

----------------------------------------------------------------------
 .../org/apache/airavata/gfac/Scheduler.java     |   6 +-
 .../gfac/core/context/JobExecutionContext.java  |  31 +-
 .../airavata/gfac/core/cpi/BetterGfacImpl.java  |   3 +-
 .../gfac/gram/handler/GridFTPOutputHandler.java |   2 +-
 .../gfac/gsissh/util/GFACGSISSHUtils.java       |  47 +-
 .../impl/GSISSHProviderTestWithMyProxyAuth.java | 465 +++++++++--------
 .../ssh/handler/AdvancedSCPOutputHandler.java   |   6 +-
 .../ssh/handler/SSHDirectorySetupHandler.java   |   7 +-
 .../gfac/ssh/handler/SSHInputHandler.java       |   3 +-
 .../gfac/ssh/handler/SSHOutputHandler.java      | 142 +++---
 .../gfac/ssh/provider/impl/SSHProvider.java     |  69 +--
 .../airavata/gfac/ssh/util/GFACSSHUtils.java    | 243 ++++-----
 .../services/impl/BigRed2TestWithSSHAuth.java   | 504 +++++++++----------
 .../impl/SSHProviderTestWithSSHAuth.java        | 342 ++++++-------
 14 files changed, 869 insertions(+), 1001 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/2dd94e63/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/Scheduler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/Scheduler.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/Scheduler.java
index 9e642fe..2bd612c 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/Scheduler.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/Scheduler.java
@@ -60,9 +60,9 @@ public class Scheduler {
         jobExecutionContext.setProvider(getProvider(jobExecutionContext));
         // TODO: Selecting the provider based on application description.
         jobExecutionContext.getGFacConfiguration().setInHandlers(jobExecutionContext.getProvider().getClass().getName(),
-                jobExecutionContext.getServiceName());
+                jobExecutionContext.getApplicationName());
         jobExecutionContext.getGFacConfiguration().setOutHandlers(jobExecutionContext.getProvider().getClass().getName(),
-        		 jobExecutionContext.getServiceName());
+        		 jobExecutionContext.getApplicationName());
         jobExecutionContext.getGFacConfiguration().setExecutionMode(getExecutionMode(jobExecutionContext));
     }
 
@@ -72,7 +72,7 @@ public class Scheduler {
      * @return GFacProvider instance.
      */
     private static GFacProvider getProvider(JobExecutionContext jobExecutionContext) throws GFacException {
-        String applicationName = jobExecutionContext.getServiceName();
+        String applicationName = jobExecutionContext.getApplicationName();
 
         URL resource = Scheduler.class.getClassLoader().getResource(org.apache.airavata.common.utils.Constants.GFAC_CONFIG_XML);
         DocumentBuilderFactory docBuilderFactory = DocumentBuilderFactory.newInstance();

http://git-wip-us.apache.org/repos/asf/airavata/blob/2dd94e63/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
index d5e8daa..d344e8b 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
@@ -139,7 +139,7 @@ public class JobExecutionContext extends AbstractContext implements Serializable
     // a scientific application(or algorithm) as a service. Service name is there to identify to
     // which service description we should refer during the execution of the current job represented
     // by this context instance.
-    private String serviceName;
+    private String applicationName;
 
     private String experimentID;
 
@@ -166,10 +166,10 @@ public class JobExecutionContext extends AbstractContext implements Serializable
      */
     private Map<String, SecurityContext> securityContext = new HashMap<String, SecurityContext>();
 
-    public JobExecutionContext(GFacConfiguration gFacConfiguration,String serviceName){
+    public JobExecutionContext(GFacConfiguration gFacConfiguration,String applicationName){
         this.gfacConfiguration = gFacConfiguration;
         notifier = new GFacNotifier();
-        setServiceName(serviceName);
+        setApplicationName(applicationName);
         outputFileList = new ArrayList<String>();
     }
 
@@ -238,12 +238,12 @@ public class JobExecutionContext extends AbstractContext implements Serializable
         this.outHandlers = outHandlers;
     }
 
-    public String getServiceName() {
-        return serviceName;
+    public String getApplicationName() {
+        return applicationName;
     }
 
-    public void setServiceName(String serviceName) {
-        this.serviceName = serviceName;
+    public void setApplicationName(String applicationName) {
+        this.applicationName = applicationName;
     }
 
     public GFacNotifier getNotifier() {
@@ -274,15 +274,6 @@ public class JobExecutionContext extends AbstractContext implements Serializable
         this.inPath = false;
     }
 
-//    public ContextHeaderDocument.ContextHeader getContextHeader() {
-//        return contextHeader;
-//    }
-//
-//    public void setContextHeader(ContextHeaderDocument.ContextHeader contextHeader) {
-//        this.contextHeader = contextHeader;
-//    }
-
-	
 	public SecurityContext getSecurityContext(String name) throws GFacException{
 		SecurityContext secContext = securityContext.get(name);
 		return secContext;
@@ -459,4 +450,12 @@ public class JobExecutionContext extends AbstractContext implements Serializable
     public void setPreferredDataMovementInterface(DataMovementInterface preferredDataMovementInterface) {
         this.preferredDataMovementInterface = preferredDataMovementInterface;
     }
+
+    public String getExecutablePath() {
+        if (applicationContext == null || applicationContext.getApplicationDeploymentDescription() == null) {
+            return null;
+        } else {
+            return applicationContext.getApplicationDeploymentDescription().getExecutablePath();
+        }
+    }
 }

http://git-wip-us.apache.org/repos/asf/airavata/blob/2dd94e63/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
index 656a291..0455f7e 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
@@ -269,7 +269,7 @@ public class BetterGfacImpl implements GFac,Watcher {
         GFacConfiguration gFacConfiguration = GFacConfiguration.create(new File(resource.getPath()), configurationProperties);
 
         // start constructing jobexecutioncontext
-        jobExecutionContext = new JobExecutionContext(gFacConfiguration, applicationInterfaceId);
+        jobExecutionContext = new JobExecutionContext(gFacConfiguration, applicationInterface.getApplicationName());
 
         // setting experiment/task/workflownode related information
         Experiment experiment = (Experiment) registry.get(RegistryModelType.EXPERIMENT, experimentID);
@@ -281,6 +281,7 @@ public class BetterGfacImpl implements GFac,Watcher {
 
 
         List<JobDetails> jobDetailsList = taskData.getJobDetailsList();
+        //FIXME: Following for loop only set last jobDetails element to the jobExecutionContext
         for(JobDetails jDetails:jobDetailsList){
             jobExecutionContext.setJobDetails(jDetails);
         }

http://git-wip-us.apache.org/repos/asf/airavata/blob/2dd94e63/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/handler/GridFTPOutputHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/handler/GridFTPOutputHandler.java b/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/handler/GridFTPOutputHandler.java
index a424da0..7e226ea 100644
--- a/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/handler/GridFTPOutputHandler.java
+++ b/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/handler/GridFTPOutputHandler.java
@@ -133,7 +133,7 @@ public class GridFTPOutputHandler extends AbstractHandler {
                     }
 
                     String timeStampedServiceName = GFacUtils.createUniqueNameWithDate(jobExecutionContext
-                            .getServiceName());
+                            .getApplicationName());
                     File localStdOutFile = File.createTempFile(timeStampedServiceName, "stdout");
                     localStdErrFile = File.createTempFile(timeStampedServiceName, "stderr");
 

http://git-wip-us.apache.org/repos/asf/airavata/blob/2dd94e63/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/util/GFACGSISSHUtils.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/util/GFACGSISSHUtils.java b/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/util/GFACGSISSHUtils.java
index baca65c..07a14bf 100644
--- a/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/util/GFACGSISSHUtils.java
+++ b/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/util/GFACGSISSHUtils.java
@@ -181,7 +181,7 @@ public class GFACGSISSHUtils {
         jobDescriptor.setCallBackPort(ServerSettings.getSetting(org.apache.airavata.common.utils.Constants.GFAC_SERVER_PORT, "8950"));
         jobDescriptor.setInputDirectory(jobExecutionContext.getInputDir());
         jobDescriptor.setOutputDirectory(jobExecutionContext.getOutputDir());
-        jobDescriptor.setExecutablePath(app.getExecutablePath());
+        jobDescriptor.setExecutablePath(jobExecutionContext.getExecutablePath());
         jobDescriptor.setStandardOutFile(jobExecutionContext.getStandardOutput());
         jobDescriptor.setStandardErrorFile(jobExecutionContext.getStandardError());
         Random random = new Random();
@@ -214,51 +214,6 @@ public class GFACGSISSHUtils {
         }
         jobDescriptor.setInputValues(inputValues);
 
-        // this part will fill out the hpcApplicationDescriptor
-        if (app instanceof HpcApplicationDeploymentType) {
-            HpcApplicationDeploymentType applicationDeploymentType
-                    = (HpcApplicationDeploymentType) app;
-            jobDescriptor.setUserName(((GSISSHAbstractCluster)cluster).getServerInfo().getUserName());
-            jobDescriptor.setShellName("/bin/bash");
-            jobDescriptor.setAllEnvExport(true);
-            jobDescriptor.setMailOptions("n");
-            jobDescriptor.setNodes(applicationDeploymentType.getNodeCount());
-            jobDescriptor.setProcessesPerNode(applicationDeploymentType.getProcessorsPerNode());
-            jobDescriptor.setMaxWallTime(String.valueOf(applicationDeploymentType.getMaxWallTime()));
-            jobDescriptor.setJobSubmitter(applicationDeploymentType.getJobSubmitterCommand());
-            jobDescriptor.setCPUCount(applicationDeploymentType.getCpuCount());
-            if (applicationDeploymentType.getProjectAccount() != null) {
-                if (applicationDeploymentType.getProjectAccount().getProjectAccountNumber() != null) {
-                    jobDescriptor.setAcountString(applicationDeploymentType.getProjectAccount().getProjectAccountNumber());
-                }
-            }
-            if (applicationDeploymentType.getQueue() != null) {
-                if (applicationDeploymentType.getQueue().getQueueName() != null) {
-                    jobDescriptor.setQueueName(applicationDeploymentType.getQueue().getQueueName());
-                }
-            }
-            jobDescriptor.setOwner(((PBSCluster) cluster).getServerInfo().getUserName());
-            TaskDetails taskData = jobExecutionContext.getTaskData();
-            if (taskData != null && taskData.isSetTaskScheduling()) {
-                ComputationalResourceScheduling computionnalResource = taskData.getTaskScheduling();
-                if (computionnalResource.getNodeCount() > 0) {
-                    jobDescriptor.setNodes(computionnalResource.getNodeCount());
-                }
-                if (computionnalResource.getComputationalProjectAccount() != null) {
-                    jobDescriptor.setAcountString(computionnalResource.getComputationalProjectAccount());
-                }
-                if (computionnalResource.getQueueName() != null) {
-                    jobDescriptor.setQueueName(computionnalResource.getQueueName());
-                }
-                if (computionnalResource.getTotalCPUCount() > 0) {
-                    jobDescriptor.setProcessesPerNode(computionnalResource.getTotalCPUCount());
-                }
-                if (computionnalResource.getWallTimeLimit() > 0) {
-                    jobDescriptor.setMaxWallTime(String.valueOf(computionnalResource.getWallTimeLimit()));
-                }
-            }
-
-        }
         return jobDescriptor;
     }
 }

http://git-wip-us.apache.org/repos/asf/airavata/blob/2dd94e63/modules/gfac/gfac-gsissh/src/test/java/org/apache/airavata/core/gfac/services/impl/GSISSHProviderTestWithMyProxyAuth.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gsissh/src/test/java/org/apache/airavata/core/gfac/services/impl/GSISSHProviderTestWithMyProxyAuth.java b/modules/gfac/gfac-gsissh/src/test/java/org/apache/airavata/core/gfac/services/impl/GSISSHProviderTestWithMyProxyAuth.java
index 0774022..630cd5c 100644
--- a/modules/gfac/gfac-gsissh/src/test/java/org/apache/airavata/core/gfac/services/impl/GSISSHProviderTestWithMyProxyAuth.java
+++ b/modules/gfac/gfac-gsissh/src/test/java/org/apache/airavata/core/gfac/services/impl/GSISSHProviderTestWithMyProxyAuth.java
@@ -1,236 +1,229 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.core.gfac.services.impl;
-
-import java.io.File;
-import java.net.URL;
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.List;
-import java.util.UUID;
-
-import org.apache.airavata.commons.gfac.type.ActualParameter;
-import org.apache.airavata.commons.gfac.type.ApplicationDescription;
-import org.apache.airavata.commons.gfac.type.HostDescription;
-import org.apache.airavata.commons.gfac.type.ServiceDescription;
-import org.apache.airavata.gfac.GFacConfiguration;
-import org.apache.airavata.gfac.GFacException;
-import org.apache.airavata.gfac.SecurityContext;
-import org.apache.airavata.gfac.core.context.ApplicationContext;
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.context.MessageContext;
-import org.apache.airavata.gfac.core.cpi.BetterGfacImpl;
-import org.apache.airavata.gfac.gsissh.security.GSISecurityContext;
-import org.apache.airavata.gsi.ssh.api.Cluster;
-import org.apache.airavata.gsi.ssh.api.SSHApiException;
-import org.apache.airavata.gsi.ssh.api.ServerInfo;
-import org.apache.airavata.gsi.ssh.api.authentication.GSIAuthenticationInfo;
-import org.apache.airavata.gsi.ssh.impl.PBSCluster;
-import org.apache.airavata.gsi.ssh.impl.authentication.MyProxyAuthenticationInfo;
-import org.apache.airavata.gsi.ssh.util.CommonUtils;
-import org.apache.airavata.model.workspace.experiment.TaskDetails;
-import org.apache.airavata.persistance.registry.jpa.impl.RegistryFactory;
-import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
-import org.apache.airavata.schemas.gfac.GsisshHostType;
-import org.apache.airavata.schemas.gfac.HpcApplicationDeploymentType;
-import org.apache.airavata.schemas.gfac.InputParameterType;
-import org.apache.airavata.schemas.gfac.JobTypeType;
-import org.apache.airavata.schemas.gfac.OutputParameterType;
-import org.apache.airavata.schemas.gfac.ProjectAccountType;
-import org.apache.airavata.schemas.gfac.QueueType;
-import org.apache.airavata.schemas.gfac.StringParameterType;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.Test;
-
-public class GSISSHProviderTestWithMyProxyAuth {
-    private JobExecutionContext jobExecutionContext;
-
-    //FIXME: move job properties to configuration file
-    private static final String hostAddress = "trestles.sdsc.edu";
-    private static final String hostName = "trestles";
-    private String myProxyUserName;
-    private String myProxyPassword;
-    private String workingDirectory;
-    private String certificateLocation = "/Users/lahirugunathilake/Downloads/certificates";
-
-    @BeforeClass
-    public void setUp() throws Exception {
-//        System.setProperty("myproxy.user", "ogce");
-//        System.setProperty("myproxy.password", "");
-//        System.setProperty("basedir", "/Users/lahirugunathilake/Downloads");
-//        System.setProperty("gsi.working.directory", "/home/ogce");
-//        System.setProperty("gsi.certificate.path", "/Users/lahirugunathilake/Downloads/certificates");
-        certificateLocation = System.getProperty("trusted.cert.location");
-        myProxyUserName = System.getProperty("myproxy.username");
-        myProxyPassword = System.getProperty("myproxy.password");
-        workingDirectory = System.getProperty("gsi.working.directory");
-
-        if (myProxyUserName == null || myProxyPassword == null || certificateLocation == null) {
-            System.out.println(">>>>>> Please run tests with my proxy user name and password. " +
-                    "E.g :- mvn clean install -Dmyproxy.username=xxx -Dmyproxy.password=xxx -Dgsi.working.directory=/path<<<<<<<");
-            throw new Exception("Need my proxy user name password to run tests.");
-        }
-        URL resource = GSISSHProviderTestWithMyProxyAuth.class.getClassLoader().getResource(org.apache.airavata.common.utils.Constants.GFAC_CONFIG_XML);
-        assert resource != null;
-        System.out.println(resource.getFile());
-        GFacConfiguration gFacConfiguration = GFacConfiguration.create(new File(resource.getPath()), null);
-
-//        gFacConfiguration.setMyProxyLifeCycle(3600);
-//        gFacConfiguration.setMyProxyServer("myproxy.teragrid.org");
-//        gFacConfiguration.setMyProxyUser("*****");
-//        gFacConfiguration.setMyProxyPassphrase("*****");
-//        gFacConfiguration.setTrustedCertLocation("./certificates");
-//        //have to set InFlwo Handlers and outFlowHandlers
-//        gFacConfiguration.setInHandlers(Arrays.asList(new String[] {"org.apache.airavata.gfac.handler.GramDirectorySetupHandler","org.apache.airavata.gfac.handler.GridFTPInputHandler"}));
-//        gFacConfiguration.setOutHandlers(Arrays.asList(new String[] {"org.apache.airavata.gfac.handler.GridFTPOutputHandler"}));
-
-        /*
-        * Host
-        */
-        HostDescription host = new HostDescription(GsisshHostType.type);
-        host.getType().setHostAddress(hostAddress);
-        host.getType().setHostName(hostName);
-
-        /*
-        * App
-        */
-        ApplicationDescription appDesc = new ApplicationDescription(HpcApplicationDeploymentType.type);
-        HpcApplicationDeploymentType app = (HpcApplicationDeploymentType) appDesc.getType();
-        ApplicationDeploymentDescriptionType.ApplicationName name = ApplicationDeploymentDescriptionType.ApplicationName.Factory.newInstance();
-        name.setStringValue("EchoLocal");
-        app.setApplicationName(name);
-        ProjectAccountType projectAccountType = app.addNewProjectAccount();
-        projectAccountType.setProjectAccountNumber("sds128");
-
-        QueueType queueType = app.addNewQueue();
-        queueType.setQueueName("normal");
-
-        app.setCpuCount(1);
-        app.setJobType(JobTypeType.SERIAL);
-        app.setNodeCount(1);
-        app.setProcessorsPerNode(1);
-
-        /*
-        * Use bat file if it is compiled on Windows
-        */
-        app.setExecutableLocation("/bin/echo");
-
-        /*
-        * Default tmp location
-        */
-        String tempDir = "/home/ogce/scratch/";
-        String date = (new Date()).toString();
-        date = date.replaceAll(" ", "_");
-        date = date.replaceAll(":", "_");
-
-        tempDir = workingDirectory + File.separator
-                + "SimpleEcho" + "_" + date + "_" + UUID.randomUUID();
-
-        System.out.println(tempDir);
-        app.setScratchWorkingDirectory(tempDir);
-        app.setStaticWorkingDirectory(tempDir);
-        app.setInputDataDirectory(tempDir + File.separator + "inputData");
-        app.setOutputDataDirectory(tempDir + File.separator + "outputData");
-        app.setStandardOutput(tempDir + File.separator + app.getApplicationName().getStringValue() + ".stdout");
-        app.setStandardError(tempDir + File.separator + app.getApplicationName().getStringValue() + ".stderr");
-        app.setMaxWallTime(5);
-        app.setInstalledParentPath("/opt/torque/bin/");
-
-        /*
-        * Service
-        */
-        ServiceDescription serv = new ServiceDescription();
-        serv.getType().setName("SimpleEcho");
-
-        List<InputParameterType> inputList = new ArrayList<InputParameterType>();
-
-        InputParameterType input = InputParameterType.Factory.newInstance();
-        input.setParameterName("echo_input");
-        input.setParameterType(StringParameterType.Factory.newInstance());
-        inputList.add(input);
-
-        InputParameterType[] inputParamList = inputList.toArray(new InputParameterType[inputList
-
-                .size()]);
-        List<OutputParameterType> outputList = new ArrayList<OutputParameterType>();
-        OutputParameterType output = OutputParameterType.Factory.newInstance();
-        output.setParameterName("echo_output");
-        output.setParameterType(StringParameterType.Factory.newInstance());
-        outputList.add(output);
-
-        OutputParameterType[] outputParamList = outputList
-                .toArray(new OutputParameterType[outputList.size()]);
-
-        serv.getType().setInputParametersArray(inputParamList);
-        serv.getType().setOutputParametersArray(outputParamList);
-
-        jobExecutionContext = new JobExecutionContext(gFacConfiguration, serv.getType().getName());
-        // Adding security context
-        jobExecutionContext.addSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT, getSecurityContext(app));
-        ApplicationContext applicationContext = new ApplicationContext();
-        jobExecutionContext.setApplicationContext(applicationContext);
-        applicationContext.setServiceDescription(serv);
-        applicationContext.setApplicationDeploymentDescription(appDesc);
-        applicationContext.setHostDescription(host);
-
-        MessageContext inMessage = new MessageContext();
-        ActualParameter echo_input = new ActualParameter();
-        ((StringParameterType) echo_input.getType()).setValue("echo_output=hello");
-        inMessage.addParameter("echo_input", echo_input);
-
-
-        jobExecutionContext.setInMessageContext(inMessage);
-
-        MessageContext outMessage = new MessageContext();
-        ActualParameter echo_out = new ActualParameter();
-//		((StringParameterType)echo_input.getType()).setValue("echo_output=hello");
-        outMessage.addParameter("echo_output", echo_out);
-        jobExecutionContext.setRegistry(RegistryFactory.getLoggingRegistry());
-        jobExecutionContext.setTaskData(new TaskDetails("11323"));
-        jobExecutionContext.setOutMessageContext(outMessage);
-
-    }
-
-    private SecurityContext getSecurityContext(HpcApplicationDeploymentType app) {
-        GSIAuthenticationInfo authenticationInfo
-                = new MyProxyAuthenticationInfo(myProxyUserName, myProxyPassword, "myproxy.teragrid.org",
-                7512, 17280000, certificateLocation);
-
-        // Server info
-        ServerInfo serverInfo = new ServerInfo("ogce", "trestles.sdsc.edu");
-        Cluster pbsCluster = null;
-        try {
-            pbsCluster = new PBSCluster(serverInfo, authenticationInfo, CommonUtils.getPBSJobManager(app.getInstalledParentPath()));
-        } catch (SSHApiException e) {
-            e.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
-        }
-        GSISecurityContext sshSecurityContext = new GSISecurityContext(pbsCluster);
-        return sshSecurityContext;
-    }
-    @Test
-    public void testGSISSHProvider() throws GFacException {
-        BetterGfacImpl gFacAPI = new BetterGfacImpl();
-        gFacAPI.submitJob(jobExecutionContext.getExperimentID(), jobExecutionContext.getTaskData().getTaskID(), jobExecutionContext.getGatewayID());
-        System.out.println(jobExecutionContext.getJobDetails().getJobDescription());
-        System.out.println(jobExecutionContext.getJobDetails().getJobID());
-    }
-
-}
+///*
+// *
+// * Licensed to the Apache Software Foundation (ASF) under one
+// * or more contributor license agreements.  See the NOTICE file
+// * distributed with this work for additional information
+// * regarding copyright ownership.  The ASF licenses this file
+// * to you under the Apache License, Version 2.0 (the
+// * "License"); you may not use this file except in compliance
+// * with the License.  You may obtain a copy of the License at
+// *
+// *   http://www.apache.org/licenses/LICENSE-2.0
+// *
+// * Unless required by applicable law or agreed to in writing,
+// * software distributed under the License is distributed on an
+// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// * KIND, either express or implied.  See the License for the
+// * specific language governing permissions and limitations
+// * under the License.
+// *
+//*/
+//package org.apache.airavata.core.gfac.services.impl;
+//
+//import java.io.File;
+//import java.net.URL;
+//import java.util.ArrayList;
+//import java.util.Date;
+//import java.util.List;
+//import java.util.UUID;
+//
+//import org.apache.aiaravata.application.catalog.data.model.ApplicationInterface;
+//import org.apache.airavata.commons.gfac.type.ActualParameter;
+//import org.apache.airavata.commons.gfac.type.ApplicationDescription;
+//import org.apache.airavata.commons.gfac.type.HostDescription;
+//import org.apache.airavata.commons.gfac.type.ServiceDescription;
+//import org.apache.airavata.gfac.GFacConfiguration;
+//import org.apache.airavata.gfac.GFacException;
+//import org.apache.airavata.gfac.SecurityContext;
+//import org.apache.airavata.gfac.core.context.ApplicationContext;
+//import org.apache.airavata.gfac.core.context.JobExecutionContext;
+//import org.apache.airavata.gfac.core.context.MessageContext;
+//import org.apache.airavata.gfac.core.cpi.BetterGfacImpl;
+//import org.apache.airavata.gfac.gsissh.security.GSISecurityContext;
+//import org.apache.airavata.gsi.ssh.api.Cluster;
+//import org.apache.airavata.gsi.ssh.api.SSHApiException;
+//import org.apache.airavata.gsi.ssh.api.ServerInfo;
+//import org.apache.airavata.gsi.ssh.api.authentication.GSIAuthenticationInfo;
+//import org.apache.airavata.gsi.ssh.impl.PBSCluster;
+//import org.apache.airavata.gsi.ssh.impl.authentication.MyProxyAuthenticationInfo;
+//import org.apache.airavata.gsi.ssh.util.CommonUtils;
+//import org.apache.airavata.model.appcatalog.appdeployment.ApplicationDeploymentDescription;
+//import org.apache.airavata.model.workspace.experiment.TaskDetails;
+//import org.apache.airavata.persistance.registry.jpa.impl.RegistryFactory;
+//import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
+//import org.apache.airavata.schemas.gfac.GsisshHostType;
+//import org.apache.airavata.schemas.gfac.HpcApplicationDeploymentType;
+//import org.apache.airavata.schemas.gfac.InputParameterType;
+//import org.apache.airavata.schemas.gfac.JobTypeType;
+//import org.apache.airavata.schemas.gfac.OutputParameterType;
+//import org.apache.airavata.schemas.gfac.ProjectAccountType;
+//import org.apache.airavata.schemas.gfac.QueueType;
+//import org.apache.airavata.schemas.gfac.StringParameterType;
+//import org.testng.annotations.BeforeClass;
+//import org.testng.annotations.Test;
+//
+//public class GSISSHProviderTestWithMyProxyAuth {
+//    private JobExecutionContext jobExecutionContext;
+//
+//    //FIXME: move job properties to configuration file
+//    private static final String hostAddress = "trestles.sdsc.edu";
+//    private static final String hostName = "trestles";
+//    private String myProxyUserName;
+//    private String myProxyPassword;
+//    private String workingDirectory;
+//    private String certificateLocation = "/Users/lahirugunathilake/Downloads/certificates";
+//
+//    @BeforeClass
+//    public void setUp() throws Exception {
+////        System.setProperty("myproxy.user", "ogce");
+////        System.setProperty("myproxy.password", "");
+////        System.setProperty("basedir", "/Users/lahirugunathilake/Downloads");
+////        System.setProperty("gsi.working.directory", "/home/ogce");
+////        System.setProperty("gsi.certificate.path", "/Users/lahirugunathilake/Downloads/certificates");
+//        certificateLocation = System.getProperty("trusted.cert.location");
+//        myProxyUserName = System.getProperty("myproxy.username");
+//        myProxyPassword = System.getProperty("myproxy.password");
+//        workingDirectory = System.getProperty("gsi.working.directory");
+//
+//        if (myProxyUserName == null || myProxyPassword == null || certificateLocation == null) {
+//            System.out.println(">>>>>> Please run tests with my proxy user name and password. " +
+//                    "E.g :- mvn clean install -Dmyproxy.username=xxx -Dmyproxy.password=xxx -Dgsi.working.directory=/path<<<<<<<");
+//            throw new Exception("Need my proxy user name password to run tests.");
+//        }
+//        URL resource = GSISSHProviderTestWithMyProxyAuth.class.getClassLoader().getResource(org.apache.airavata.common.utils.Constants.GFAC_CONFIG_XML);
+//        assert resource != null;
+//        System.out.println(resource.getFile());
+//        GFacConfiguration gFacConfiguration = GFacConfiguration.create(new File(resource.getPath()), null);
+//
+//        /*
+//        * Host
+//        */
+//        HostDescription host = new HostDescription(GsisshHostType.type);
+//        host.getType().setHostAddress(hostAddress);
+//        host.getType().setHostName(hostName);
+//
+//        /*
+//        * App
+//        */
+//        ApplicationDescription appDesc = new ApplicationDescription(HpcApplicationDeploymentType.type);
+//        HpcApplicationDeploymentType app = (HpcApplicationDeploymentType) appDesc.getType();
+//        ApplicationDeploymentDescriptionType.ApplicationName name = ApplicationDeploymentDescriptionType.ApplicationName.Factory.newInstance();
+//        name.setStringValue("EchoLocal");
+//        app.setApplicationName(name);
+//        ProjectAccountType projectAccountType = app.addNewProjectAccount();
+//        projectAccountType.setProjectAccountNumber("sds128");
+//
+//        QueueType queueType = app.addNewQueue();
+//        queueType.setQueueName("normal");
+//
+//        app.setCpuCount(1);
+//        app.setJobType(JobTypeType.SERIAL);
+//        app.setNodeCount(1);
+//        app.setProcessorsPerNode(1);
+//
+//        /*
+//        * Use bat file if it is compiled on Windows
+//        */
+//        app.setExecutableLocation("/bin/echo");
+//
+//        /*
+//        * Default tmp location
+//        */
+//        String tempDir = "/home/ogce/scratch/";
+//        String date = (new Date()).toString();
+//        date = date.replaceAll(" ", "_");
+//        date = date.replaceAll(":", "_");
+//
+//        tempDir = workingDirectory + File.separator
+//                + "SimpleEcho" + "_" + date + "_" + UUID.randomUUID();
+//
+//        System.out.println(tempDir);
+//        app.setScratchWorkingDirectory(tempDir);
+//        app.setStaticWorkingDirectory(tempDir);
+//        app.setInputDataDirectory(tempDir + File.separator + "inputData");
+//        app.setOutputDataDirectory(tempDir + File.separator + "outputData");
+//        app.setStandardOutput(tempDir + File.separator + app.getApplicationName().getStringValue() + ".stdout");
+//        app.setStandardError(tempDir + File.separator + app.getApplicationName().getStringValue() + ".stderr");
+//        app.setMaxWallTime(5);
+//        app.setInstalledParentPath("/opt/torque/bin/");
+//
+//        /*
+//        * Service
+//        */
+//        ServiceDescription serv = new ServiceDescription();
+//        serv.getType().setName("SimpleEcho");
+//
+//        List<InputParameterType> inputList = new ArrayList<InputParameterType>();
+//
+//        InputParameterType input = InputParameterType.Factory.newInstance();
+//        input.setParameterName("echo_input");
+//        input.setParameterType(StringParameterType.Factory.newInstance());
+//        inputList.add(input);
+//
+//        InputParameterType[] inputParamList = inputList.toArray(new InputParameterType[inputList
+//
+//                .size()]);
+//        List<OutputParameterType> outputList = new ArrayList<OutputParameterType>();
+//        OutputParameterType output = OutputParameterType.Factory.newInstance();
+//        output.setParameterName("echo_output");
+//        output.setParameterType(StringParameterType.Factory.newInstance());
+//        outputList.add(output);
+//
+//        OutputParameterType[] outputParamList = outputList
+//                .toArray(new OutputParameterType[outputList.size()]);
+//
+//        serv.getType().setInputParametersArray(inputParamList);
+//        serv.getType().setOutputParametersArray(outputParamList);
+//
+//        jobExecutionContext = new JobExecutionContext(gFacConfiguration, serv.getType().getName());
+//        // Adding security context
+//        jobExecutionContext.addSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT, getSecurityContext(app));
+//        ApplicationContext applicationContext = new ApplicationContext();
+//        jobExecutionContext.setApplicationContext(applicationContext);
+//        applicationContext.setServiceDescription(serv);
+//        applicationContext.setApplicationDeploymentDescription(appDesc);
+//        applicationContext.setHostDescription(host);
+//
+//        MessageContext inMessage = new MessageContext();
+//        ActualParameter echo_input = new ActualParameter();
+//        ((StringParameterType) echo_input.getType()).setValue("echo_output=hello");
+//        inMessage.addParameter("echo_input", echo_input);
+//
+//
+//        jobExecutionContext.setInMessageContext(inMessage);
+//
+//        MessageContext outMessage = new MessageContext();
+//        ActualParameter echo_out = new ActualParameter();
+////		((StringParameterType)echo_input.getType()).setValue("echo_output=hello");
+//        outMessage.addParameter("echo_output", echo_out);
+//        jobExecutionContext.setRegistry(RegistryFactory.getLoggingRegistry());
+//        jobExecutionContext.setTaskData(new TaskDetails("11323"));
+//        jobExecutionContext.setOutMessageContext(outMessage);
+//
+//    }
+//
+//    private SecurityContext getSecurityContext(HpcApplicationDeploymentType app) {
+//        GSIAuthenticationInfo authenticationInfo
+//                = new MyProxyAuthenticationInfo(myProxyUserName, myProxyPassword, "myproxy.teragrid.org",
+//                7512, 17280000, certificateLocation);
+//
+//        // Server info
+//        ServerInfo serverInfo = new ServerInfo("ogce", "trestles.sdsc.edu");
+//        Cluster pbsCluster = null;
+//        try {
+//            pbsCluster = new PBSCluster(serverInfo, authenticationInfo, CommonUtils.getPBSJobManager(app.getInstalledParentPath()));
+//        } catch (SSHApiException e) {
+//            e.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
+//        }
+//        GSISecurityContext sshSecurityContext = new GSISecurityContext(pbsCluster);
+//        return sshSecurityContext;
+//    }
+//    @Test
+//    public void testGSISSHProvider() throws GFacException {
+//        BetterGfacImpl gFacAPI = new BetterGfacImpl();
+//        gFacAPI.submitJob(jobExecutionContext.getExperimentID(), jobExecutionContext.getTaskData().getTaskID(), jobExecutionContext.getGatewayID());
+//        System.out.println(jobExecutionContext.getJobDetails().getJobDescription());
+//        System.out.println(jobExecutionContext.getJobDetails().getJobID());
+//    }
+//
+//}

http://git-wip-us.apache.org/repos/asf/airavata/blob/2dd94e63/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPOutputHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPOutputHandler.java b/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPOutputHandler.java
index ad2131e..7248367 100644
--- a/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPOutputHandler.java
+++ b/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPOutputHandler.java
@@ -128,10 +128,8 @@ public class AdvancedSCPOutputHandler extends AbstractHandler {
                 }
             }
             pbsCluster = ((SSHSecurityContext)jobExecutionContext.getSecurityContext(SSHSecurityContext.SSH_SECURITY_CONTEXT)).getPbsCluster();
-            ApplicationDeploymentDescriptionType app = jobExecutionContext.getApplicationContext()
-                    .getApplicationDeploymentDescription().getType();
-            String standardError = app.getStandardError();
-            String standardOutput = app.getStandardOutput();
+            String standardError = jobExecutionContext.getStandardError();
+            String standardOutput = jobExecutionContext.getStandardOutput();
             super.invoke(jobExecutionContext);
             // Server info
             if(jobExecutionContext.getTaskData().getAdvancedOutputDataHandling() != null && jobExecutionContext.getTaskData().getAdvancedOutputDataHandling().getOutputDataDir() != null){

http://git-wip-us.apache.org/repos/asf/airavata/blob/2dd94e63/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHDirectorySetupHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHDirectorySetupHandler.java b/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHDirectorySetupHandler.java
index 0be6820..f7cbcc0 100644
--- a/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHDirectorySetupHandler.java
+++ b/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHDirectorySetupHandler.java
@@ -73,11 +73,10 @@ public class SSHDirectorySetupHandler extends AbstractHandler {
         } else {
             log.info("Successfully retrieved the Security Context");
         }
-        ApplicationDeploymentDescriptionType app = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription().getType();
-            String workingDirectory = app.getScratchWorkingDirectory();
+            String workingDirectory = jobExecutionContext.getWorkingDir();
             cluster.makeDirectory(workingDirectory);
-            cluster.makeDirectory(app.getInputDataDirectory());
-            cluster.makeDirectory(app.getOutputDataDirectory());
+            cluster.makeDirectory(jobExecutionContext.getInputDir());
+            cluster.makeDirectory(jobExecutionContext.getOutputDir());
             DataTransferDetails detail = new DataTransferDetails();
             TransferStatus status = new TransferStatus();
             status.setTransferState(TransferState.DIRECTORY_SETUP);

http://git-wip-us.apache.org/repos/asf/airavata/blob/2dd94e63/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHInputHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHInputHandler.java b/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHInputHandler.java
index b26e035..b0367f3 100644
--- a/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHInputHandler.java
+++ b/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHInputHandler.java
@@ -150,11 +150,10 @@ public class SSHInputHandler extends AbstractHandler {
     }
 
     private static String stageInputFiles(Cluster cluster, JobExecutionContext jobExecutionContext, String paramValue) throws IOException, GFacException {
-        ApplicationDeploymentDescriptionType app = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription().getType();
         int i = paramValue.lastIndexOf(File.separator);
         String substring = paramValue.substring(i + 1);
         try {
-            String targetFile = app.getInputDataDirectory() + File.separator + substring;
+            String targetFile = jobExecutionContext.getInputDir() + File.separator + substring;
             if(paramValue.startsWith("scp:")){
             	paramValue = paramValue.substring(paramValue.indexOf(":") + 1, paramValue.length());
             	cluster.scpThirdParty(paramValue, targetFile);

http://git-wip-us.apache.org/repos/asf/airavata/blob/2dd94e63/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHOutputHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHOutputHandler.java b/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHOutputHandler.java
index 328ad32..d80e92b 100644
--- a/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHOutputHandler.java
+++ b/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHOutputHandler.java
@@ -27,6 +27,8 @@ import java.util.*;
 import net.schmizz.sshj.connection.ConnectionException;
 import net.schmizz.sshj.transport.TransportException;
 
+import org.airavata.appcatalog.cpi.AppCatalog;
+import org.apache.aiaravata.application.catalog.data.impl.AppCatalogFactory;
 import org.apache.airavata.common.exception.ApplicationSettingsException;
 import org.apache.airavata.common.utils.Constants;
 import org.apache.airavata.commons.gfac.type.ActualParameter;
@@ -44,6 +46,9 @@ import org.apache.airavata.gfac.ssh.util.GFACSSHUtils;
 import org.apache.airavata.gsi.ssh.api.Cluster;
 import org.apache.airavata.gsi.ssh.api.SSHApiException;
 import org.apache.airavata.gsi.ssh.api.job.JobDescriptor;
+import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol;
+import org.apache.airavata.model.appcatalog.computeresource.SSHJobSubmission;
+import org.apache.airavata.model.appcatalog.computeresource.SecurityProtocol;
 import org.apache.airavata.model.workspace.experiment.*;
 import org.apache.airavata.registry.cpi.ChildDataType;
 import org.apache.airavata.registry.cpi.RegistryModelType;
@@ -58,38 +63,6 @@ public class SSHOutputHandler extends AbstractHandler {
     private static final Logger log = LoggerFactory.getLogger(SSHOutputHandler.class);
 
     public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-        if (jobExecutionContext.getApplicationContext().getHostDescription().getType() instanceof GsisshHostType) { // this is because we don't have the right jobexecution context
-            // so attempting to get it from the registry
-            if (Constants.PUSH.equals(((GsisshHostType) jobExecutionContext.getApplicationContext().getHostDescription().getType()).getMonitorMode())) { // this is because we don't have the right jobexecution context
-                // so attempting to get it from the registry
-                log.warn("During the out handler chain jobExecution context came null, so trying to handler");
-                ApplicationDescription applicationDeploymentDescription = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription();
-                TaskDetails taskData = null;
-                try {
-                    taskData = (TaskDetails) registry.get(RegistryModelType.TASK_DETAIL, jobExecutionContext.getTaskData().getTaskID());
-                } catch (RegistryException e) {
-                    log.error("Error retrieving job details from Registry");
-                    throw new GFacHandlerException("Error retrieving job details from Registry", e);
-                }
-                JobDetails jobDetails = taskData.getJobDetailsList().get(0);
-                String jobDescription = jobDetails.getJobDescription();
-                if (jobDescription != null) {
-                    JobDescriptor jobDescriptor = null;
-                    try {
-                        jobDescriptor = JobDescriptor.fromXML(jobDescription);
-                    } catch (XmlException e1) {
-                        e1.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
-                    }
-                    applicationDeploymentDescription.getType().setScratchWorkingDirectory(
-                            jobDescriptor.getJobDescriptorDocument().getJobDescriptor().getWorkingDirectory());
-                    applicationDeploymentDescription.getType().setInputDataDirectory(jobDescriptor.getInputDirectory());
-                    applicationDeploymentDescription.getType().setOutputDataDirectory(jobDescriptor.getOutputDirectory());
-                    applicationDeploymentDescription.getType().setStandardError(jobDescriptor.getJobDescriptorDocument().getJobDescriptor().getStandardErrorFile());
-                    applicationDeploymentDescription.getType().setStandardOutput(jobDescriptor.getJobDescriptorDocument().getJobDescriptor().getStandardOutFile());
-                }
-            }
-        }
-
         try {
             if (jobExecutionContext.getSecurityContext(SSHSecurityContext.SSH_SECURITY_CONTEXT) == null) {
 
@@ -98,10 +71,10 @@ public class SSHOutputHandler extends AbstractHandler {
         } catch (Exception e) {
             log.error(e.getMessage());
             try {
- 				GFacUtils.saveErrorDetails(jobExecutionContext, e.getLocalizedMessage(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
- 			} catch (GFacException e1) {
- 				 log.error(e1.getLocalizedMessage());
- 			}
+                GFacUtils.saveErrorDetails(jobExecutionContext, e.getLocalizedMessage(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
+            } catch (GFacException e1) {
+                log.error(e1.getLocalizedMessage());
+            }
             throw new GFacHandlerException("Error while creating SSHSecurityContext", e, e.getLocalizedMessage());
         }
 
@@ -109,11 +82,9 @@ public class SSHOutputHandler extends AbstractHandler {
         DataTransferDetails detail = new DataTransferDetails();
         TransferStatus status = new TransferStatus();
 
-        ApplicationDeploymentDescriptionType app = jobExecutionContext.getApplicationContext()
-                .getApplicationDeploymentDescription().getType();
         Cluster cluster = null;
         try {
-             cluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(SSHSecurityContext.SSH_SECURITY_CONTEXT)).getPbsCluster();
+            cluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(SSHSecurityContext.SSH_SECURITY_CONTEXT)).getPbsCluster();
             if (cluster == null) {
                 throw new GFacProviderException("Security context is not set properly");
             } else {
@@ -143,19 +114,19 @@ public class SSHOutputHandler extends AbstractHandler {
 //            cluster.makeDirectory(outputDataDir);
             int i = 0;
             String stdOutStr = "";
-            while(stdOutStr.isEmpty()){ 		
-            try {
-            	cluster.scpFrom(app.getStandardOutput(), localStdOutFile.getAbsolutePath());
-                stdOutStr = GFacUtils.readFileToString(localStdOutFile.getAbsolutePath());
-			} catch (Exception e) {
-				log.error(e.getLocalizedMessage());
-				Thread.sleep(2000);
-			}
-            i++;
-            if(i == 3) break;
+            while (stdOutStr.isEmpty()) {
+                try {
+                    cluster.scpFrom(jobExecutionContext.getStandardOutput(), localStdOutFile.getAbsolutePath());
+                    stdOutStr = GFacUtils.readFileToString(localStdOutFile.getAbsolutePath());
+                } catch (Exception e) {
+                    log.error(e.getLocalizedMessage());
+                    Thread.sleep(2000);
+                }
+                i++;
+                if (i == 3) break;
             }
             Thread.sleep(1000);
-            cluster.scpFrom(app.getStandardError(), localStdErrFile.getAbsolutePath());
+            cluster.scpFrom(jobExecutionContext.getStandardError(), localStdErrFile.getAbsolutePath());
             Thread.sleep(1000);
 
             String stdErrStr = GFacUtils.readFileToString(localStdErrFile.getAbsolutePath());
@@ -177,72 +148,73 @@ public class SSHOutputHandler extends AbstractHandler {
                 ActualParameter actualParameter = (ActualParameter) output.get(paramName);
                 if ("URI".equals(actualParameter.getType().getType().toString())) {
                     List<String> outputList = null;
-                    int retry=3;
-                    while(retry>0){
-                    	 outputList = cluster.listDirectory(app.getOutputDataDirectory());
-                    	 if(outputList.size() > 0){
-                    		 break;
-                    	 }	
-                    	 retry--;
-                    	 Thread.sleep(2000);
+                    int retry = 3;
+                    while (retry > 0) {
+                        outputList = cluster.listDirectory(jobExecutionContext.getOutputDir());
+                        if (outputList.size() > 0) {
+                            break;
+                        }
+                        retry--;
+                        Thread.sleep(2000);
                     }
-                  
+
                     if (outputList.size() == 0 || outputList.get(0).isEmpty() || outputList.size() > 0) {
-                        OutputUtils.fillOutputFromStdout(output, stdOutStr, stdErrStr,outputArray);
+                        OutputUtils.fillOutputFromStdout(output, stdOutStr, stdErrStr, outputArray);
                         Set<String> strings = output.keySet();
                         outputArray.clear();
                         for (String key : strings) {
                             ActualParameter actualParameter1 = (ActualParameter) output.get(key);
                             if ("URI".equals(actualParameter1.getType().getType().toString())) {
-                              	String downloadFile = MappingFactory.toString(actualParameter1);
-                            	cluster.scpFrom(downloadFile, outputDataDir);
-                            	String fileName = downloadFile.substring(downloadFile.lastIndexOf(File.separatorChar)+1, downloadFile.length());
-                            	String localFile = outputDataDir +  File.separator +fileName;
-								jobExecutionContext.addOutputFile(localFile);
-								MappingFactory.fromString(actualParameter1, localFile);
-								DataObjectType dataObjectType = new DataObjectType();
+                                String downloadFile = MappingFactory.toString(actualParameter1);
+                                cluster.scpFrom(downloadFile, outputDataDir);
+                                String fileName = downloadFile.substring(downloadFile.lastIndexOf(File.separatorChar) + 1, downloadFile.length());
+                                String localFile = outputDataDir + File.separator + fileName;
+                                jobExecutionContext.addOutputFile(localFile);
+                                MappingFactory.fromString(actualParameter1, localFile);
+                                DataObjectType dataObjectType = new DataObjectType();
                                 dataObjectType.setValue(localFile);
                                 dataObjectType.setKey(key);
                                 dataObjectType.setType(DataType.URI);
                                 outputArray.add(dataObjectType);
                             }
                         }
-                    
+
                         break;
-                    } else if( outputList.size() == 0) {//FIXME: Ultrascan case
+                    } else if (outputList.size() == 0) {//FIXME: Ultrascan case
                         String valueList = outputList.get(0);
-                        cluster.scpFrom(app.getOutputDataDirectory() + File.separator + valueList, outputDataDir);
+                        cluster.scpFrom(jobExecutionContext.getOutputDir() + File.separator + valueList, outputDataDir);
                         String outputPath = outputDataDir + File.separator + valueList;
-						jobExecutionContext.addOutputFile(outputPath);
-						MappingFactory.fromString(actualParameter, outputPath);
-						DataObjectType dataObjectType = new DataObjectType();
+                        jobExecutionContext.addOutputFile(outputPath);
+                        MappingFactory.fromString(actualParameter, outputPath);
+                        DataObjectType dataObjectType = new DataObjectType();
                         dataObjectType.setValue(outputPath);
                         dataObjectType.setKey(paramName);
                         dataObjectType.setType(DataType.URI);
                         outputArray.add(dataObjectType);
                     }
                 } else {
-                    OutputUtils.fillOutputFromStdout(output, stdOutStr, stdErrStr,outputArray);
+                    OutputUtils.fillOutputFromStdout(output, stdOutStr, stdErrStr, outputArray);
                 }
             }
             if (outputArray == null || outputArray.isEmpty()) {
-            	log.error("Empty Output returned from the Application, Double check the application and ApplicationDescriptor output Parameter Names");
-            	if(jobExecutionContext.getTaskData().getAdvancedOutputDataHandling() == null){
-            		throw new GFacHandlerException(
-                        "Empty Output returned from the Application, Double check the application"
-                                + "and ApplicationDescriptor output Parameter Names");
-            	}
+                log.error("Empty Output returned from the Application, Double check the application and ApplicationDescriptor output Parameter Names");
+                if (jobExecutionContext.getTaskData().getAdvancedOutputDataHandling() == null) {
+                    throw new GFacHandlerException(
+                            "Empty Output returned from the Application, Double check the application"
+                                    + "and ApplicationDescriptor output Parameter Names");
+                }
             }
-            app.setStandardError(localStdErrFile.getAbsolutePath());
-            app.setStandardOutput(localStdOutFile.getAbsolutePath());
-            app.setOutputDataDirectory(outputDataDir);
+            // FIXME: why we set standard error ouput and outputDirectory again ?
+//            app.setStandardError(localStdErrFile.getAbsolutePath());
+//            app.setStandardOutput(localStdOutFile.getAbsolutePath());
+//            app.setOutputDataDirectory(outputDataDir);
             status.setTransferState(TransferState.DOWNLOAD);
             detail.setTransferStatus(status);
             detail.setTransferDescription(outputDataDir);
             registry.add(ChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
             registry.add(ChildDataType.EXPERIMENT_OUTPUT, outputArray, jobExecutionContext.getExperimentID());
-            
-        }catch (Exception e) {
+
+        } catch (Exception e) {
             try {
                 status.setTransferState(TransferState.FAILED);
                 detail.setTransferStatus(status);

http://git-wip-us.apache.org/repos/asf/airavata/blob/2dd94e63/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/provider/impl/SSHProvider.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/provider/impl/SSHProvider.java b/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/provider/impl/SSHProvider.java
index 0527c78..573ddf0 100644
--- a/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/provider/impl/SSHProvider.java
+++ b/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/provider/impl/SSHProvider.java
@@ -51,6 +51,8 @@ import org.apache.airavata.gsi.ssh.api.SSHApiException;
 import org.apache.airavata.gsi.ssh.api.job.JobDescriptor;
 import org.apache.airavata.gsi.ssh.impl.RawCommandInfo;
 import org.apache.airavata.gsi.ssh.impl.StandardOutReader;
+import org.apache.airavata.model.appcatalog.appdeployment.SetEnvPaths;
+import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol;
 import org.apache.airavata.model.workspace.experiment.CorrectiveAction;
 import org.apache.airavata.model.workspace.experiment.ErrorCategory;
 import org.apache.airavata.model.workspace.experiment.JobDetails;
@@ -86,16 +88,16 @@ public class SSHProvider extends AbstractProvider {
         }
         taskID = jobExecutionContext.getTaskData().getTaskID();
 
-        if (!((SSHHostType) jobExecutionContext.getApplicationContext().getHostDescription().getType()).getHpcResource()) {
-            jobID = "SSH_" + jobExecutionContext.getApplicationContext().getHostDescription().getType().getHostAddress() + "_" + Calendar.getInstance().getTimeInMillis();
+        JobSubmissionProtocol preferredJobSubmissionProtocol = jobExecutionContext.getPreferredJobSubmissionProtocol();
+        if (preferredJobSubmissionProtocol == JobSubmissionProtocol.SSH) {
+            jobID = "SSH_" + jobExecutionContext.getHostName() + "_" + Calendar.getInstance().getTimeInMillis();
             cluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(SSHSecurityContext.SSH_SECURITY_CONTEXT)).getPbsCluster();
 
-            ApplicationDeploymentDescriptionType app = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription().getType();
-            String remoteFile = app.getStaticWorkingDirectory() + File.separatorChar + Constants.EXECUTABLE_NAME;
+            String remoteFile = jobExecutionContext.getWorkingDir() + File.separatorChar + Constants.EXECUTABLE_NAME;
             details.setJobID(taskID);
             details.setJobDescription(remoteFile);
             jobExecutionContext.setJobDetails(details);
-            JobDescriptor jobDescriptor = GFACSSHUtils.createJobDescriptor(jobExecutionContext, app, null);
+            JobDescriptor jobDescriptor = GFACSSHUtils.createJobDescriptor(jobExecutionContext, null);
             details.setJobDescription(jobDescriptor.toXML());
 
             GFacUtils.saveJobStatus(jobExecutionContext, details, JobState.SETUP);
@@ -114,16 +116,15 @@ public class SSHProvider extends AbstractProvider {
 
     public void execute(JobExecutionContext jobExecutionContext) throws GFacProviderException {
         if (!hpcType) {
-            ApplicationDeploymentDescriptionType app = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription().getType();
             try {
                 /*
                  * Execute
                  */
-                String execuable = app.getStaticWorkingDirectory() + File.separatorChar + Constants.EXECUTABLE_NAME;
-                details.setJobDescription(execuable);
+                String executable = jobExecutionContext.getWorkingDir() + File.separatorChar + Constants.EXECUTABLE_NAME;
+                details.setJobDescription(executable);
 
 //                GFacUtils.updateJobStatus(details, JobState.SUBMITTED);
-                RawCommandInfo rawCommandInfo = new RawCommandInfo("/bin/chmod 755 " + execuable + "; " + execuable);
+                RawCommandInfo rawCommandInfo = new RawCommandInfo("/bin/chmod 755 " + executable + "; " + executable);
 
                 StandardOutReader jobIDReaderCommandOutput = new StandardOutReader();
 
@@ -139,10 +140,6 @@ public class SSHProvider extends AbstractProvider {
         } else {
             try {
                 jobExecutionContext.getNotifier().publish(new StartExecutionEvent());
-                HostDescriptionType host = jobExecutionContext.getApplicationContext().
-                        getHostDescription().getType();
-                HpcApplicationDeploymentType app = (HpcApplicationDeploymentType) jobExecutionContext.getApplicationContext().
-                        getApplicationDeploymentDescription().getType();
                 JobDetails jobDetails = new JobDetails();
                 try {
                     Cluster cluster = null;
@@ -155,7 +152,7 @@ public class SSHProvider extends AbstractProvider {
                         log.info("Successfully retrieved the Security Context");
                     }
                     // This installed path is a mandetory field, because this could change based on the computing resource
-                    JobDescriptor jobDescriptor = GFACSSHUtils.createJobDescriptor(jobExecutionContext, app, cluster);
+                    JobDescriptor jobDescriptor = GFACSSHUtils.createJobDescriptor(jobExecutionContext, cluster);
                     jobDetails.setJobName(jobDescriptor.getJobName());
                     log.info(jobDescriptor.toXML());
 
@@ -172,14 +169,14 @@ public class SSHProvider extends AbstractProvider {
                     }
                     delegateToMonitorHandlers(jobExecutionContext);
                 } catch (SSHApiException e) {
-                    String error = "Error submitting the job to host " + host.getHostAddress() + " message: " + e.getMessage();
+                    String error = "Error submitting the job to host " + jobExecutionContext.getHostName() + " message: " + e.getMessage();
                     log.error(error);
                     jobDetails.setJobID("none");
                     GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.FAILED);
                     GFacUtils.saveErrorDetails(jobExecutionContext, error, CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
                     throw new GFacProviderException(error, e);
                 } catch (Exception e) {
-                    String error = "Error submitting the job to host " + host.getHostAddress() + " message: " + e.getMessage();
+                    String error = "Error submitting the job to host " + jobExecutionContext.getHostName() + " message: " + e.getMessage();
                     log.error(error);
                     jobDetails.setJobID("none");
                     GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.FAILED);
@@ -199,8 +196,6 @@ public class SSHProvider extends AbstractProvider {
 
     public void cancelJob(JobExecutionContext jobExecutionContext) throws GFacProviderException, GFacException {
         JobDetails jobDetails = jobExecutionContext.getJobDetails();
-        HostDescriptionType host = jobExecutionContext.getApplicationContext().
-                getHostDescription().getType();
         StringBuffer data = new StringBuffer();
         if (!hpcType) {
             throw new NotImplementedException();
@@ -225,14 +220,14 @@ public class SSHProvider extends AbstractProvider {
                 }
                 GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.CANCELED);
             } catch (SSHApiException e) {
-                String error = "Error submitting the job to host " + host.getHostAddress() + " message: " + e.getMessage();
+                String error = "Error submitting the job to host " + jobExecutionContext.getHostName() + " message: " + e.getMessage();
                 log.error(error);
                 jobDetails.setJobID("none");
                 GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.FAILED);
                 GFacUtils.saveErrorDetails(jobExecutionContext, error, CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
                 throw new GFacProviderException(error, e);
             } catch (Exception e) {
-                String error = "Error submitting the job to host " + host.getHostAddress() + " message: " + e.getMessage();
+                String error = "Error submitting the job to host " + jobExecutionContext.getHostName() + " message: " + e.getMessage();
                 log.error(error);
                 jobDetails.setJobID("none");
                 GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.FAILED);
@@ -279,40 +274,28 @@ public class SSHProvider extends AbstractProvider {
         }
     }
     private File createShellScript(JobExecutionContext context) throws IOException {
-        ApplicationDeploymentDescriptionType app = context.getApplicationContext()
-                .getApplicationDeploymentDescription().getType();
-        String uniqueDir = app.getApplicationName().getStringValue() + System.currentTimeMillis()
+        String uniqueDir = jobExecutionContext.getApplicationName() + System.currentTimeMillis()
                 + new Random().nextLong();
 
         File shellScript = File.createTempFile(uniqueDir, "sh");
         OutputStream out = new FileOutputStream(shellScript);
 
         out.write("#!/bin/bash\n".getBytes());
-        out.write(("cd " + app.getStaticWorkingDirectory() + "\n").getBytes());
-        out.write(("export " + Constants.INPUT_DATA_DIR_VAR_NAME + "=" + app.getInputDataDirectory() + "\n").getBytes());
-        out.write(("export " + Constants.OUTPUT_DATA_DIR_VAR_NAME + "=" + app.getOutputDataDirectory() + "\n")
+        out.write(("cd " + jobExecutionContext.getWorkingDir() + "\n").getBytes());
+        out.write(("export " + Constants.INPUT_DATA_DIR_VAR_NAME + "=" + jobExecutionContext.getInputDir() + "\n").getBytes());
+        out.write(("export " + Constants.OUTPUT_DATA_DIR_VAR_NAME + "=" + jobExecutionContext.getOutputDir() + "\n")
                 .getBytes());
         // get the env of the host and the application
-        NameValuePairType[] env = app.getApplicationEnvironmentArray();
-
-        Map<String, String> nv = new HashMap<String, String>();
-        if (env != null) {
-            for (int i = 0; i < env.length; i++) {
-                String key = env[i].getName();
-                String value = env[i].getValue();
-                nv.put(key, value);
-            }
-        }
-        for (Entry<String, String> entry : nv.entrySet()) {
-            log.debug("Env[" + entry.getKey() + "] = " + entry.getValue());
-            out.write(("export " + entry.getKey() + "=" + entry.getValue() + "\n").getBytes());
-
+        List<SetEnvPaths> envPathList = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription().getSetEnvironment();
+        for (SetEnvPaths setEnvPaths : envPathList) {
+            log.debug("Env[" + setEnvPaths.getName() + "] = " + setEnvPaths.getValue());
+            out.write(("export " + setEnvPaths.getName() + "=" + setEnvPaths.getValue() + "\n").getBytes());
         }
 
         // prepare the command
         final String SPACE = " ";
         StringBuffer cmd = new StringBuffer();
-        cmd.append(app.getExecutableLocation());
+        cmd.append(jobExecutionContext.getExecutablePath());
         cmd.append(SPACE);
 
         MessageContext input = context.getInMessageContext();
@@ -338,11 +321,11 @@ public class SSHProvider extends AbstractProvider {
         cmd.append(SPACE);
         cmd.append("1>");
         cmd.append(SPACE);
-        cmd.append(app.getStandardOutput());
+        cmd.append(jobExecutionContext.getStandardOutput());
         cmd.append(SPACE);
         cmd.append("2>");
         cmd.append(SPACE);
-        cmd.append(app.getStandardError());
+        cmd.append(jobExecutionContext.getStandardError());
 
         String cmdStr = cmd.toString();
         log.info("Command = " + cmdStr);


[34/50] [abbrv] airavata git commit: Removed old thrift model classes from repo

Posted by ch...@apache.org.
Removed old thrift model classes from repo


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/0b95d3b0
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/0b95d3b0
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/0b95d3b0

Branch: refs/heads/master
Commit: 0b95d3b0d15ef479d489bba430721b82d15a8c00
Parents: d7f35e1
Author: shamrath <sh...@gmail.com>
Authored: Thu Nov 6 15:59:21 2014 -0500
Committer: shamrath <sh...@gmail.com>
Committed: Thu Nov 6 15:59:21 2014 -0500

----------------------------------------------------------------------
 .../workspace/experiment/DataObjectType.java    | 719 -------------------
 .../model/workspace/experiment/DataType.java    |  71 --
 2 files changed, 790 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/0b95d3b0/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/workspace/experiment/DataObjectType.java
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/workspace/experiment/DataObjectType.java b/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/workspace/experiment/DataObjectType.java
deleted file mode 100644
index 04046ed..0000000
--- a/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/workspace/experiment/DataObjectType.java
+++ /dev/null
@@ -1,719 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * Autogenerated by Thrift Compiler (0.9.1)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- *  @generated
- */
-package org.apache.airavata.model.workspace.experiment;
-
-import org.apache.thrift.scheme.IScheme;
-import org.apache.thrift.scheme.SchemeFactory;
-import org.apache.thrift.scheme.StandardScheme;
-
-import org.apache.thrift.scheme.TupleScheme;
-import org.apache.thrift.protocol.TTupleProtocol;
-import org.apache.thrift.protocol.TProtocolException;
-import org.apache.thrift.EncodingUtils;
-import org.apache.thrift.TException;
-import org.apache.thrift.async.AsyncMethodCallback;
-import org.apache.thrift.server.AbstractNonblockingServer.*;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.EnumMap;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.EnumSet;
-import java.util.Collections;
-import java.util.BitSet;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * A structure  hold experiment input output
- * 
- */
-@SuppressWarnings("all") public class DataObjectType implements org.apache.thrift.TBase<DataObjectType, DataObjectType._Fields>, java.io.Serializable, Cloneable, Comparable<DataObjectType> {
-  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("DataObjectType");
-
-  private static final org.apache.thrift.protocol.TField KEY_FIELD_DESC = new org.apache.thrift.protocol.TField("key", org.apache.thrift.protocol.TType.STRING, (short)1);
-  private static final org.apache.thrift.protocol.TField VALUE_FIELD_DESC = new org.apache.thrift.protocol.TField("value", org.apache.thrift.protocol.TType.STRING, (short)2);
-  private static final org.apache.thrift.protocol.TField TYPE_FIELD_DESC = new org.apache.thrift.protocol.TField("type", org.apache.thrift.protocol.TType.I32, (short)3);
-  private static final org.apache.thrift.protocol.TField META_DATA_FIELD_DESC = new org.apache.thrift.protocol.TField("metaData", org.apache.thrift.protocol.TType.STRING, (short)4);
-
-  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
-  static {
-    schemes.put(StandardScheme.class, new DataObjectTypeStandardSchemeFactory());
-    schemes.put(TupleScheme.class, new DataObjectTypeTupleSchemeFactory());
-  }
-
-  private String key; // required
-  private String value; // optional
-  private DataType type; // optional
-  private String metaData; // optional
-
-  /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
-  @SuppressWarnings("all") public enum _Fields implements org.apache.thrift.TFieldIdEnum {
-    KEY((short)1, "key"),
-    VALUE((short)2, "value"),
-    /**
-     * 
-     * @see DataType
-     */
-    TYPE((short)3, "type"),
-    META_DATA((short)4, "metaData");
-
-    private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
-    static {
-      for (_Fields field : EnumSet.allOf(_Fields.class)) {
-        byName.put(field.getFieldName(), field);
-      }
-    }
-
-    /**
-     * Find the _Fields constant that matches fieldId, or null if its not found.
-     */
-    public static _Fields findByThriftId(int fieldId) {
-      switch(fieldId) {
-        case 1: // KEY
-          return KEY;
-        case 2: // VALUE
-          return VALUE;
-        case 3: // TYPE
-          return TYPE;
-        case 4: // META_DATA
-          return META_DATA;
-        default:
-          return null;
-      }
-    }
-
-    /**
-     * Find the _Fields constant that matches fieldId, throwing an exception
-     * if it is not found.
-     */
-    public static _Fields findByThriftIdOrThrow(int fieldId) {
-      _Fields fields = findByThriftId(fieldId);
-      if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
-      return fields;
-    }
-
-    /**
-     * Find the _Fields constant that matches name, or null if its not found.
-     */
-    public static _Fields findByName(String name) {
-      return byName.get(name);
-    }
-
-    private final short _thriftId;
-    private final String _fieldName;
-
-    _Fields(short thriftId, String fieldName) {
-      _thriftId = thriftId;
-      _fieldName = fieldName;
-    }
-
-    public short getThriftFieldId() {
-      return _thriftId;
-    }
-
-    public String getFieldName() {
-      return _fieldName;
-    }
-  }
-
-  // isset id assignments
-  private _Fields optionals[] = {_Fields.VALUE,_Fields.TYPE,_Fields.META_DATA};
-  public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
-  static {
-    Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
-    tmpMap.put(_Fields.KEY, new org.apache.thrift.meta_data.FieldMetaData("key", org.apache.thrift.TFieldRequirementType.REQUIRED, 
-        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
-    tmpMap.put(_Fields.VALUE, new org.apache.thrift.meta_data.FieldMetaData("value", org.apache.thrift.TFieldRequirementType.OPTIONAL, 
-        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
-    tmpMap.put(_Fields.TYPE, new org.apache.thrift.meta_data.FieldMetaData("type", org.apache.thrift.TFieldRequirementType.OPTIONAL, 
-        new org.apache.thrift.meta_data.EnumMetaData(org.apache.thrift.protocol.TType.ENUM, DataType.class)));
-    tmpMap.put(_Fields.META_DATA, new org.apache.thrift.meta_data.FieldMetaData("metaData", org.apache.thrift.TFieldRequirementType.OPTIONAL, 
-        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
-    metaDataMap = Collections.unmodifiableMap(tmpMap);
-    org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(DataObjectType.class, metaDataMap);
-  }
-
-  public DataObjectType() {
-  }
-
-  public DataObjectType(
-    String key)
-  {
-    this();
-    this.key = key;
-  }
-
-  /**
-   * Performs a deep copy on <i>other</i>.
-   */
-  public DataObjectType(DataObjectType other) {
-    if (other.isSetKey()) {
-      this.key = other.key;
-    }
-    if (other.isSetValue()) {
-      this.value = other.value;
-    }
-    if (other.isSetType()) {
-      this.type = other.type;
-    }
-    if (other.isSetMetaData()) {
-      this.metaData = other.metaData;
-    }
-  }
-
-  public DataObjectType deepCopy() {
-    return new DataObjectType(this);
-  }
-
-  @Override
-  public void clear() {
-    this.key = null;
-    this.value = null;
-    this.type = null;
-    this.metaData = null;
-  }
-
-  public String getKey() {
-    return this.key;
-  }
-
-  public void setKey(String key) {
-    this.key = key;
-  }
-
-  public void unsetKey() {
-    this.key = null;
-  }
-
-  /** Returns true if field key is set (has been assigned a value) and false otherwise */
-  public boolean isSetKey() {
-    return this.key != null;
-  }
-
-  public void setKeyIsSet(boolean value) {
-    if (!value) {
-      this.key = null;
-    }
-  }
-
-  public String getValue() {
-    return this.value;
-  }
-
-  public void setValue(String value) {
-    this.value = value;
-  }
-
-  public void unsetValue() {
-    this.value = null;
-  }
-
-  /** Returns true if field value is set (has been assigned a value) and false otherwise */
-  public boolean isSetValue() {
-    return this.value != null;
-  }
-
-  public void setValueIsSet(boolean value) {
-    if (!value) {
-      this.value = null;
-    }
-  }
-
-  /**
-   * 
-   * @see DataType
-   */
-  public DataType getType() {
-    return this.type;
-  }
-
-  /**
-   * 
-   * @see DataType
-   */
-  public void setType(DataType type) {
-    this.type = type;
-  }
-
-  public void unsetType() {
-    this.type = null;
-  }
-
-  /** Returns true if field type is set (has been assigned a value) and false otherwise */
-  public boolean isSetType() {
-    return this.type != null;
-  }
-
-  public void setTypeIsSet(boolean value) {
-    if (!value) {
-      this.type = null;
-    }
-  }
-
-  public String getMetaData() {
-    return this.metaData;
-  }
-
-  public void setMetaData(String metaData) {
-    this.metaData = metaData;
-  }
-
-  public void unsetMetaData() {
-    this.metaData = null;
-  }
-
-  /** Returns true if field metaData is set (has been assigned a value) and false otherwise */
-  public boolean isSetMetaData() {
-    return this.metaData != null;
-  }
-
-  public void setMetaDataIsSet(boolean value) {
-    if (!value) {
-      this.metaData = null;
-    }
-  }
-
-  public void setFieldValue(_Fields field, Object value) {
-    switch (field) {
-    case KEY:
-      if (value == null) {
-        unsetKey();
-      } else {
-        setKey((String)value);
-      }
-      break;
-
-    case VALUE:
-      if (value == null) {
-        unsetValue();
-      } else {
-        setValue((String)value);
-      }
-      break;
-
-    case TYPE:
-      if (value == null) {
-        unsetType();
-      } else {
-        setType((DataType)value);
-      }
-      break;
-
-    case META_DATA:
-      if (value == null) {
-        unsetMetaData();
-      } else {
-        setMetaData((String)value);
-      }
-      break;
-
-    }
-  }
-
-  public Object getFieldValue(_Fields field) {
-    switch (field) {
-    case KEY:
-      return getKey();
-
-    case VALUE:
-      return getValue();
-
-    case TYPE:
-      return getType();
-
-    case META_DATA:
-      return getMetaData();
-
-    }
-    throw new IllegalStateException();
-  }
-
-  /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
-  public boolean isSet(_Fields field) {
-    if (field == null) {
-      throw new IllegalArgumentException();
-    }
-
-    switch (field) {
-    case KEY:
-      return isSetKey();
-    case VALUE:
-      return isSetValue();
-    case TYPE:
-      return isSetType();
-    case META_DATA:
-      return isSetMetaData();
-    }
-    throw new IllegalStateException();
-  }
-
-  @Override
-  public boolean equals(Object that) {
-    if (that == null)
-      return false;
-    if (that instanceof DataObjectType)
-      return this.equals((DataObjectType)that);
-    return false;
-  }
-
-  public boolean equals(DataObjectType that) {
-    if (that == null)
-      return false;
-
-    boolean this_present_key = true && this.isSetKey();
-    boolean that_present_key = true && that.isSetKey();
-    if (this_present_key || that_present_key) {
-      if (!(this_present_key && that_present_key))
-        return false;
-      if (!this.key.equals(that.key))
-        return false;
-    }
-
-    boolean this_present_value = true && this.isSetValue();
-    boolean that_present_value = true && that.isSetValue();
-    if (this_present_value || that_present_value) {
-      if (!(this_present_value && that_present_value))
-        return false;
-      if (!this.value.equals(that.value))
-        return false;
-    }
-
-    boolean this_present_type = true && this.isSetType();
-    boolean that_present_type = true && that.isSetType();
-    if (this_present_type || that_present_type) {
-      if (!(this_present_type && that_present_type))
-        return false;
-      if (!this.type.equals(that.type))
-        return false;
-    }
-
-    boolean this_present_metaData = true && this.isSetMetaData();
-    boolean that_present_metaData = true && that.isSetMetaData();
-    if (this_present_metaData || that_present_metaData) {
-      if (!(this_present_metaData && that_present_metaData))
-        return false;
-      if (!this.metaData.equals(that.metaData))
-        return false;
-    }
-
-    return true;
-  }
-
-  @Override
-  public int hashCode() {
-    return 0;
-  }
-
-  @Override
-  public int compareTo(DataObjectType other) {
-    if (!getClass().equals(other.getClass())) {
-      return getClass().getName().compareTo(other.getClass().getName());
-    }
-
-    int lastComparison = 0;
-
-    lastComparison = Boolean.valueOf(isSetKey()).compareTo(other.isSetKey());
-    if (lastComparison != 0) {
-      return lastComparison;
-    }
-    if (isSetKey()) {
-      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.key, other.key);
-      if (lastComparison != 0) {
-        return lastComparison;
-      }
-    }
-    lastComparison = Boolean.valueOf(isSetValue()).compareTo(other.isSetValue());
-    if (lastComparison != 0) {
-      return lastComparison;
-    }
-    if (isSetValue()) {
-      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.value, other.value);
-      if (lastComparison != 0) {
-        return lastComparison;
-      }
-    }
-    lastComparison = Boolean.valueOf(isSetType()).compareTo(other.isSetType());
-    if (lastComparison != 0) {
-      return lastComparison;
-    }
-    if (isSetType()) {
-      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.type, other.type);
-      if (lastComparison != 0) {
-        return lastComparison;
-      }
-    }
-    lastComparison = Boolean.valueOf(isSetMetaData()).compareTo(other.isSetMetaData());
-    if (lastComparison != 0) {
-      return lastComparison;
-    }
-    if (isSetMetaData()) {
-      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.metaData, other.metaData);
-      if (lastComparison != 0) {
-        return lastComparison;
-      }
-    }
-    return 0;
-  }
-
-  public _Fields fieldForId(int fieldId) {
-    return _Fields.findByThriftId(fieldId);
-  }
-
-  public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
-    schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
-  }
-
-  public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
-    schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
-  }
-
-  @Override
-  public String toString() {
-    StringBuilder sb = new StringBuilder("DataObjectType(");
-    boolean first = true;
-
-    sb.append("key:");
-    if (this.key == null) {
-      sb.append("null");
-    } else {
-      sb.append(this.key);
-    }
-    first = false;
-    if (isSetValue()) {
-      if (!first) sb.append(", ");
-      sb.append("value:");
-      if (this.value == null) {
-        sb.append("null");
-      } else {
-        sb.append(this.value);
-      }
-      first = false;
-    }
-    if (isSetType()) {
-      if (!first) sb.append(", ");
-      sb.append("type:");
-      if (this.type == null) {
-        sb.append("null");
-      } else {
-        sb.append(this.type);
-      }
-      first = false;
-    }
-    if (isSetMetaData()) {
-      if (!first) sb.append(", ");
-      sb.append("metaData:");
-      if (this.metaData == null) {
-        sb.append("null");
-      } else {
-        sb.append(this.metaData);
-      }
-      first = false;
-    }
-    sb.append(")");
-    return sb.toString();
-  }
-
-  public void validate() throws org.apache.thrift.TException {
-    // check for required fields
-    if (!isSetKey()) {
-      throw new org.apache.thrift.protocol.TProtocolException("Required field 'key' is unset! Struct:" + toString());
-    }
-
-    // check for sub-struct validity
-  }
-
-  private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
-    try {
-      write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
-    } catch (org.apache.thrift.TException te) {
-      throw new java.io.IOException(te);
-    }
-  }
-
-  private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
-    try {
-      read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
-    } catch (org.apache.thrift.TException te) {
-      throw new java.io.IOException(te);
-    }
-  }
-
-  private static class DataObjectTypeStandardSchemeFactory implements SchemeFactory {
-    public DataObjectTypeStandardScheme getScheme() {
-      return new DataObjectTypeStandardScheme();
-    }
-  }
-
-  private static class DataObjectTypeStandardScheme extends StandardScheme<DataObjectType> {
-
-    public void read(org.apache.thrift.protocol.TProtocol iprot, DataObjectType struct) throws org.apache.thrift.TException {
-      org.apache.thrift.protocol.TField schemeField;
-      iprot.readStructBegin();
-      while (true)
-      {
-        schemeField = iprot.readFieldBegin();
-        if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
-          break;
-        }
-        switch (schemeField.id) {
-          case 1: // KEY
-            if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
-              struct.key = iprot.readString();
-              struct.setKeyIsSet(true);
-            } else { 
-              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
-            }
-            break;
-          case 2: // VALUE
-            if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
-              struct.value = iprot.readString();
-              struct.setValueIsSet(true);
-            } else { 
-              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
-            }
-            break;
-          case 3: // TYPE
-            if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
-              struct.type = DataType.findByValue(iprot.readI32());
-              struct.setTypeIsSet(true);
-            } else { 
-              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
-            }
-            break;
-          case 4: // META_DATA
-            if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
-              struct.metaData = iprot.readString();
-              struct.setMetaDataIsSet(true);
-            } else { 
-              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
-            }
-            break;
-          default:
-            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
-        }
-        iprot.readFieldEnd();
-      }
-      iprot.readStructEnd();
-      struct.validate();
-    }
-
-    public void write(org.apache.thrift.protocol.TProtocol oprot, DataObjectType struct) throws org.apache.thrift.TException {
-      struct.validate();
-
-      oprot.writeStructBegin(STRUCT_DESC);
-      if (struct.key != null) {
-        oprot.writeFieldBegin(KEY_FIELD_DESC);
-        oprot.writeString(struct.key);
-        oprot.writeFieldEnd();
-      }
-      if (struct.value != null) {
-        if (struct.isSetValue()) {
-          oprot.writeFieldBegin(VALUE_FIELD_DESC);
-          oprot.writeString(struct.value);
-          oprot.writeFieldEnd();
-        }
-      }
-      if (struct.type != null) {
-        if (struct.isSetType()) {
-          oprot.writeFieldBegin(TYPE_FIELD_DESC);
-          oprot.writeI32(struct.type.getValue());
-          oprot.writeFieldEnd();
-        }
-      }
-      if (struct.metaData != null) {
-        if (struct.isSetMetaData()) {
-          oprot.writeFieldBegin(META_DATA_FIELD_DESC);
-          oprot.writeString(struct.metaData);
-          oprot.writeFieldEnd();
-        }
-      }
-      oprot.writeFieldStop();
-      oprot.writeStructEnd();
-    }
-
-  }
-
-  private static class DataObjectTypeTupleSchemeFactory implements SchemeFactory {
-    public DataObjectTypeTupleScheme getScheme() {
-      return new DataObjectTypeTupleScheme();
-    }
-  }
-
-  private static class DataObjectTypeTupleScheme extends TupleScheme<DataObjectType> {
-
-    @Override
-    public void write(org.apache.thrift.protocol.TProtocol prot, DataObjectType struct) throws org.apache.thrift.TException {
-      TTupleProtocol oprot = (TTupleProtocol) prot;
-      oprot.writeString(struct.key);
-      BitSet optionals = new BitSet();
-      if (struct.isSetValue()) {
-        optionals.set(0);
-      }
-      if (struct.isSetType()) {
-        optionals.set(1);
-      }
-      if (struct.isSetMetaData()) {
-        optionals.set(2);
-      }
-      oprot.writeBitSet(optionals, 3);
-      if (struct.isSetValue()) {
-        oprot.writeString(struct.value);
-      }
-      if (struct.isSetType()) {
-        oprot.writeI32(struct.type.getValue());
-      }
-      if (struct.isSetMetaData()) {
-        oprot.writeString(struct.metaData);
-      }
-    }
-
-    @Override
-    public void read(org.apache.thrift.protocol.TProtocol prot, DataObjectType struct) throws org.apache.thrift.TException {
-      TTupleProtocol iprot = (TTupleProtocol) prot;
-      struct.key = iprot.readString();
-      struct.setKeyIsSet(true);
-      BitSet incoming = iprot.readBitSet(3);
-      if (incoming.get(0)) {
-        struct.value = iprot.readString();
-        struct.setValueIsSet(true);
-      }
-      if (incoming.get(1)) {
-        struct.type = DataType.findByValue(iprot.readI32());
-        struct.setTypeIsSet(true);
-      }
-      if (incoming.get(2)) {
-        struct.metaData = iprot.readString();
-        struct.setMetaDataIsSet(true);
-      }
-    }
-  }
-
-}
-

http://git-wip-us.apache.org/repos/asf/airavata/blob/0b95d3b0/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/workspace/experiment/DataType.java
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/workspace/experiment/DataType.java b/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/workspace/experiment/DataType.java
deleted file mode 100644
index 8cd6a02..0000000
--- a/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/workspace/experiment/DataType.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * Autogenerated by Thrift Compiler (0.9.1)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- *  @generated
- */
-package org.apache.airavata.model.workspace.experiment;
-
-
-import java.util.Map;
-import java.util.HashMap;
-import org.apache.thrift.TEnum;
-
-@SuppressWarnings("all") public enum DataType implements org.apache.thrift.TEnum {
-  STRING(0),
-  INTEGER(1),
-  URI(2),
-  STDOUT(3),
-  STDERR(4);
-
-  private final int value;
-
-  private DataType(int value) {
-    this.value = value;
-  }
-
-  /**
-   * Get the integer value of this enum value, as defined in the Thrift IDL.
-   */
-  public int getValue() {
-    return value;
-  }
-
-  /**
-   * Find a the enum type by its integer value, as defined in the Thrift IDL.
-   * @return null if the value is not found.
-   */
-  public static DataType findByValue(int value) { 
-    switch (value) {
-      case 0:
-        return STRING;
-      case 1:
-        return INTEGER;
-      case 2:
-        return URI;
-      case 3:
-        return STDOUT;
-      case 4:
-        return STDERR;
-      default:
-        return null;
-    }
-  }
-}


[41/50] [abbrv] airavata git commit: OrchestratorSimpleClient- Replace DataObjectType with Input and Output DataObjectType

Posted by ch...@apache.org.
OrchestratorSimpleClient- Replace DataObjectType with Input and Output DataObjectType


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/82127c7b
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/82127c7b
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/82127c7b

Branch: refs/heads/master
Commit: 82127c7b3c0afd32fdec372b60a2ca747aa13b9e
Parents: a3351b7
Author: shamrath <sh...@gmail.com>
Authored: Thu Nov 6 19:10:52 2014 -0500
Committer: shamrath <sh...@gmail.com>
Committed: Thu Nov 6 19:10:52 2014 -0500

----------------------------------------------------------------------
 .../sample/OrchestratorClientSample.java        | 22 +++++++++++---------
 1 file changed, 12 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/82127c7b/modules/orchestrator/airavata-orchestrator-stubs/src/main/java/org/apache/airavata/orchestrator/sample/OrchestratorClientSample.java
----------------------------------------------------------------------
diff --git a/modules/orchestrator/airavata-orchestrator-stubs/src/main/java/org/apache/airavata/orchestrator/sample/OrchestratorClientSample.java b/modules/orchestrator/airavata-orchestrator-stubs/src/main/java/org/apache/airavata/orchestrator/sample/OrchestratorClientSample.java
index c9c862f..009d110 100644
--- a/modules/orchestrator/airavata-orchestrator-stubs/src/main/java/org/apache/airavata/orchestrator/sample/OrchestratorClientSample.java
+++ b/modules/orchestrator/airavata-orchestrator-stubs/src/main/java/org/apache/airavata/orchestrator/sample/OrchestratorClientSample.java
@@ -25,18 +25,20 @@ package org.apache.airavata.orchestrator.sample;
 //import org.apache.airavata.client.api.AiravataAPI;
 //import org.apache.airavata.client.api.exception.AiravataAPIInvocationException;
 //import org.apache.airavata.client.tools.DocumentCreator;
-import java.util.ArrayList;
-import java.util.List;
 
+import org.apache.airavata.model.appcatalog.appinterface.DataType;
+import org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType;
+import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
 import org.apache.airavata.model.util.ExperimentModelUtil;
 import org.apache.airavata.model.workspace.experiment.ComputationalResourceScheduling;
-import org.apache.airavata.model.workspace.experiment.DataObjectType;
-import org.apache.airavata.model.workspace.experiment.DataType;
 import org.apache.airavata.model.workspace.experiment.Experiment;
 import org.apache.airavata.model.workspace.experiment.UserConfigurationData;
 import org.apache.airavata.orchestrator.cpi.OrchestratorService;
 import org.apache.thrift.TException;
 
+import java.util.ArrayList;
+import java.util.List;
+
 public class OrchestratorClientSample {
 //    private static DocumentCreator documentCreator;
     private static OrchestratorService.Client orchestratorClient;
@@ -86,17 +88,17 @@ public class OrchestratorClientSample {
         for (int i = 0; i < NUM_CONCURRENT_REQUESTS; i++) {
             Thread thread = new Thread() {
                 public void run() {
-                    List<DataObjectType> exInputs = new ArrayList<DataObjectType>();
-                    DataObjectType input = new DataObjectType();
-                    input.setKey("echo_input");
+                    List<InputDataObjectType> exInputs = new ArrayList<InputDataObjectType>();
+                    InputDataObjectType input = new InputDataObjectType();
+                    input.setName("echo_input");
                     input.setType(DataType.STRING);
                     input.setValue("echo_output=Hello World");
                     exInputs.add(input);
 
 
-                    List<DataObjectType> exOut = new ArrayList<DataObjectType>();
-                    DataObjectType output = new DataObjectType();
-                    output.setKey("echo_output");
+                    List<OutputDataObjectType> exOut = new ArrayList<OutputDataObjectType>();
+                    OutputDataObjectType output = new OutputDataObjectType();
+                    output.setName("echo_output");
                     output.setType(DataType.STRING);
                     output.setValue("");
                     exOut.add(output);


[48/50] [abbrv] airavata git commit: merging gfac app catalog integration branch with master - AIRAVATA-1511

Posted by ch...@apache.org.
merging gfac app catalog integration branch with master - AIRAVATA-1511


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/36938926
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/36938926
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/36938926

Branch: refs/heads/master
Commit: 36938926632e490d0a64e14e13277f74fd41c4df
Parents: 4286c8c 0b867da
Author: Chathuri Wimalasena <ka...@gmail.com>
Authored: Tue Nov 11 11:14:09 2014 -0500
Committer: Chathuri Wimalasena <ka...@gmail.com>
Committed: Tue Nov 11 11:14:09 2014 -0500

----------------------------------------------------------------------
 .../server/handler/AiravataServerHandler.java   |   72 +-
 .../java/org/apache/airavata/api/Airavata.java  |   58 +-
 .../main/resources/lib/airavata/Airavata.cpp    |    6 +-
 .../src/main/resources/lib/airavata/Airavata.h  |   16 +-
 .../lib/airavata/Airavata_server.skeleton.cpp   |    2 +-
 .../applicationInterfaceModel_types.cpp         |   10 +-
 .../airavata/applicationInterfaceModel_types.h  |    4 +-
 .../lib/airavata/computeResourceModel_types.cpp |  218 ++--
 .../lib/airavata/computeResourceModel_types.h   |   28 +-
 .../lib/airavata/experimentModel_types.cpp      |  521 ++++-----
 .../lib/airavata/experimentModel_types.h        |  124 +-
 .../gatewayResourceProfileModel_types.cpp       |   48 +-
 .../gatewayResourceProfileModel_types.h         |   19 +-
 .../lib/airavata/messagingEvents_types.cpp      |    6 +-
 .../lib/airavata/messagingEvents_types.h        |    9 +-
 .../resources/lib/Airavata/API/Airavata.php     |    4 +-
 .../Model/AppCatalog/AppInterface/Types.php     |    4 +
 .../Model/AppCatalog/ComputeResource/Types.php  |   29 +
 .../Model/AppCatalog/GatewayProfile/Types.php   |   20 +-
 .../Airavata/Model/Messaging/Event/Types.php    |    4 +-
 .../Model/Workspace/Experiment/Types.php        |  171 +--
 .../client/samples/CreateLaunchExperiment.java  |  260 +++--
 .../samples/CreateLaunchExperimentUS3.java      |  139 +--
 .../tools/RegisterSampleApplications.java       |    8 +-
 .../tools/RegisterSampleApplicationsUtils.java  |    3 +-
 .../model/appcatalog/appinterface/DataType.java |    8 +-
 .../appcatalog/computeresource/MonitorMode.java |   73 ++
 .../computeresource/SSHJobSubmission.java       |  121 +-
 .../ComputeResourcePreference.java              |   68 +-
 .../messaging/event/TaskOutputChangeEvent.java  |   40 +-
 .../workspace/experiment/DataObjectType.java    |  719 ------------
 .../model/workspace/experiment/DataType.java    |   71 --
 .../model/workspace/experiment/Experiment.java  |   76 +-
 .../model/workspace/experiment/TaskDetails.java |   76 +-
 .../experiment/WorkflowNodeDetails.java         |   76 +-
 .../model/util/ExperimentModelUtil.java         |   15 +-
 .../airavataAPI.thrift                          |    2 +-
 .../applicationInterfaceModel.thrift            |    4 +-
 .../computeResourceModel.thrift                 |   19 +-
 .../experimentModel.thrift                      |   32 +-
 .../gatewayResourceProfileModel.thrift          |    5 +-
 .../messagingEvents.thrift                      |    3 +-
 .../client/tools/DocumentCreatorUtils.java      |    4 +-
 .../data/impl/GwyResourceProfileImpl.java       |   19 +-
 .../data/util/AppCatalogThriftConversion.java   |    4 +-
 .../app/catalog/test/GatewayProfileTest.java    |    8 +-
 .../server/src/main/resources/gfac-config.xml   |    6 +-
 .../gfac/bes/handlers/AbstractSMSHandler.java   |   74 +-
 .../gfac/bes/provider/impl/BESProvider.java     |  378 +++---
 .../bes/security/UNICORESecurityContext.java    |    4 +-
 .../gfac/bes/utils/ApplicationProcessor.java    |  212 ++--
 .../org/apache/airavata/gfac/Constants.java     |    3 +
 .../org/apache/airavata/gfac/Scheduler.java     |  103 +-
 .../gfac/core/context/ApplicationContext.java   |   44 +-
 .../gfac/core/context/JobExecutionContext.java  |  185 ++-
 .../airavata/gfac/core/cpi/BetterGfacImpl.java  |  405 +++----
 .../handler/AbstractRecoverableHandler.java     |    4 +-
 .../core/handler/AppDescriptorCheckHandler.java |   56 +-
 .../airavata/gfac/core/monitor/MonitorID.java   |   20 +-
 .../gfac/core/provider/utils/ProviderUtils.java |   18 +-
 .../airavata/gfac/core/utils/GFacUtils.java     | 1101 ++++++++++--------
 .../airavata/gfac/core/utils/OutputUtils.java   |   72 +-
 .../apache/airavata/job/GFacConfigXmlTest.java  |   78 +-
 .../apache/airavata/gfac/ec2/EC2Provider.java   |   61 +-
 .../airavata/gfac/ec2/EC2ProviderTest.java      |  366 +++---
 .../gfac/gram/handler/GridFTPOutputHandler.java |    5 +-
 .../handler/GSISSHDirectorySetupHandler.java    |   11 +-
 .../gfac/gsissh/handler/GSISSHInputHandler.java |   86 +-
 .../gsissh/handler/GSISSHOutputHandler.java     |  116 +-
 .../gsissh/provider/impl/GSISSHProvider.java    |   77 +-
 .../gfac/gsissh/util/GFACGSISSHUtils.java       |  203 ++--
 .../impl/GSISSHProviderTestWithMyProxyAuth.java |  465 ++++----
 .../handler/LocalDirectorySetupHandler.java     |   19 +-
 .../gfac/local/provider/impl/LocalProvider.java |   54 +-
 .../gfac/local/utils/LocalProviderUtil.java     |   15 +-
 .../gfac/services/impl/LocalProviderTest.java   |  368 +++---
 .../airavata/gfac/monitor/HPCMonitorID.java     |   13 +-
 .../airavata/gfac/monitor/HostMonitorData.java  |   40 +-
 .../handlers/GridPullMonitorHandler.java        |    2 +-
 .../monitor/impl/pull/qstat/HPCPullMonitor.java |   28 +-
 .../impl/pull/qstat/ResourceConnection.java     |    6 +-
 .../monitor/impl/push/amqp/AMQPMonitor.java     |   57 +-
 .../airavata/gfac/monitor/util/CommonUtils.java |   32 +-
 .../apache/airavata/job/AMQPMonitorTest.java    |   64 +-
 .../job/QstatMonitorTestWithMyProxyAuth.java    |  344 +++---
 .../ssh/handler/AdvancedSCPOutputHandler.java   |   34 +-
 .../ssh/handler/SSHDirectorySetupHandler.java   |   11 +-
 .../gfac/ssh/handler/SSHInputHandler.java       |    5 +-
 .../gfac/ssh/handler/SSHOutputHandler.java      |  185 ++-
 .../gfac/ssh/provider/impl/SSHProvider.java     |   79 +-
 .../airavata/gfac/ssh/util/GFACSSHUtils.java    |  300 +++--
 .../services/impl/BigRed2TestWithSSHAuth.java   |  504 ++++----
 .../impl/SSHProviderTestWithSSHAuth.java        |  342 +++---
 modules/gfac/pom.xml                            |    6 +-
 .../apache/airavata/integration/BaseCaseIT.java |   49 +-
 .../airavata/integration/DataRetrievalIT.java   |   15 +-
 .../airavata/integration/SimpleEchoIT.java      |   23 +-
 .../integration/tools/DocumentCreatorUtils.java |    4 +-
 .../sample/OrchestratorClientSample.java        |   22 +-
 .../registry/jpa/impl/ExperimentRegistry.java   |  126 +-
 .../registry/jpa/impl/RegistryImpl.java         |   13 +-
 .../jpa/utils/ThriftDataModelConversion.java    |  101 +-
 .../engine/interpretor/WorkflowInterpreter.java |   81 +-
 .../ui/experiment/LaunchApplicationWindow.java  |   58 +-
 .../WorkflowInterpreterLaunchWindow.java        |   17 +-
 .../airavata/gsi/ssh/api/job/JobDescriptor.java |    7 +
 106 files changed, 4861 insertions(+), 5542 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/36938926/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperiment.java
----------------------------------------------------------------------
diff --cc airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperiment.java
index f039988,63fe5ad..2d7768b
--- a/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperiment.java
+++ b/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperiment.java
@@@ -53,9 -56,9 +56,9 @@@ public class CreateLaunchExperiment 
      private static final String DEFAULT_GATEWAY = "default.registry.gateway";
      private static Airavata.Client airavataClient;
  
-     private static String echoAppId = "Echo_636b4530-6fb2-4c9e-998a-b41e648aa70f";
-     private static String wrfAppId = "WRF_d41bdc86-e280-4eb6-a045-708f69a8c116";
-     private static String amberAppId = "Amber_b23ee051-90d6-4892-827e-622a2f6c95ee";
 -    private static String echoAppId = "Echo_647ba0c5-64ef-4efe-9786-2d28f4d5acc9";
++    private static String echoAppId = "Echo_1869465f-f002-43a9-b243-c091f63ab059";
+     private static String wrfAppId = "WRF_a458df70-6808-4d5d-ae32-c49082f2a6cc";
+     private static String amberAppId = "Amber_1b99f73b-a88d-44e3-b04e-4f56ba95ed6f";
  
      private static String localHost = "localhost";
      private static String trestlesHostName = "trestles.sdsc.xsede.org";

http://git-wip-us.apache.org/repos/asf/airavata/blob/36938926/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/Constants.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/airavata/blob/36938926/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHDirectorySetupHandler.java
----------------------------------------------------------------------
diff --cc modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHDirectorySetupHandler.java
index 1f84490,b2790c9..b2c40a7
--- a/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHDirectorySetupHandler.java
+++ b/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHDirectorySetupHandler.java
@@@ -47,8 -47,7 +47,8 @@@ public class GSISSHDirectorySetupHandle
  
  	public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
          try {
-             String hostAddress = jobExecutionContext.getApplicationContext().getHostDescription().getType().getHostAddress();
 -            if (jobExecutionContext.getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT) == null) {
++            String hostAddress = jobExecutionContext.getHostName();
 +            if (jobExecutionContext.getSecurityContext(hostAddress) == null) {
                  GFACGSISSHUtils.addSecurityContext(jobExecutionContext);
              }
          } catch (Exception e) {
@@@ -65,26 -64,24 +65,25 @@@
          makeDirectory(jobExecutionContext);
  	}
  	private void makeDirectory(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
 -		 Cluster cluster = null;
 -		try {
 -         cluster = ((GSISecurityContext) jobExecutionContext.getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT)).getPbsCluster();
 -        if (cluster == null) {
 -        	 try {
 -  				GFacUtils.saveErrorDetails(jobExecutionContext, "Security context is not set properly", CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
 -  			} catch (GFacException e1) {
 -  				 log.error(e1.getLocalizedMessage());
 -  			}
 -            throw new GFacHandlerException("Security context is not set properly");
 -        } else {
 -            log.info("Successfully retrieved the Security Context");
 -        }
 +        Cluster cluster = null;
 +        try {
-             String hostAddress = jobExecutionContext.getApplicationContext().getHostDescription().getType().getHostAddress();
++            String hostAddress = jobExecutionContext.getHostName();
 +            cluster = ((GSISecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getPbsCluster();
 +            if (cluster == null) {
 +                try {
 +                    GFacUtils.saveErrorDetails(jobExecutionContext, "Security context is not set properly", CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
 +                } catch (GFacException e1) {
 +                    log.error(e1.getLocalizedMessage());
 +                }
 +                throw new GFacHandlerException("Security context is not set properly");
 +            } else {
 +                log.info("Successfully retrieved the Security Context");
 +            }
-         ApplicationDeploymentDescriptionType app = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription().getType();
  
-             String workingDirectory = app.getScratchWorkingDirectory();
+             String workingDirectory = jobExecutionContext.getWorkingDir();
              cluster.makeDirectory(workingDirectory);
-             cluster.makeDirectory(app.getInputDataDirectory());
-             cluster.makeDirectory(app.getOutputDataDirectory());
+             cluster.makeDirectory(jobExecutionContext.getInputDir());
+             cluster.makeDirectory(jobExecutionContext.getOutputDir());
              DataTransferDetails detail = new DataTransferDetails();
              TransferStatus status = new TransferStatus();
              status.setTransferState(TransferState.DIRECTORY_SETUP);

http://git-wip-us.apache.org/repos/asf/airavata/blob/36938926/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHInputHandler.java
----------------------------------------------------------------------
diff --cc modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHInputHandler.java
index 01dc712,39f8df1..c297b2a
--- a/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHInputHandler.java
+++ b/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHInputHandler.java
@@@ -67,12 -69,11 +69,12 @@@ public class GSISSHInputHandler extend
          Cluster cluster = null;
          
          try {
-             String hostAddress = jobExecutionContext.getApplicationContext().getHostDescription().getType().getHostAddress();
 -        	if (jobExecutionContext.getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT) != null) {
 -                cluster = ((GSISecurityContext) jobExecutionContext.getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT)).getPbsCluster();
 -            } else {
 -                cluster = ((GSISecurityContext) jobExecutionContext.getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT)).getPbsCluster();
++            String hostAddress = jobExecutionContext.getHostName();
 +            if (jobExecutionContext.getSecurityContext(hostAddress) == null) {
 +                GFACGSISSHUtils.addSecurityContext(jobExecutionContext);
              }
 +
 +            cluster = ((GSISecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getPbsCluster();
              if (cluster == null) {
                  throw new GFacException("Security context is not set properly");
              } else {

http://git-wip-us.apache.org/repos/asf/airavata/blob/36938926/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHOutputHandler.java
----------------------------------------------------------------------
diff --cc modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHOutputHandler.java
index eff0164,a25e66a..68273b8
--- a/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHOutputHandler.java
+++ b/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHOutputHandler.java
@@@ -67,39 -61,9 +61,9 @@@ public class GSISSHOutputHandler extend
          int oldIndex = 0;
          List<String> oldFiles = new ArrayList<String>();
          StringBuffer data = new StringBuffer("|");
-         String hostAddress = jobExecutionContext.getApplicationContext().getHostDescription().getType().getHostAddress();
-         if (jobExecutionContext.getApplicationContext().getHostDescription().getType() instanceof GsisshHostType) { // this is because we don't have the right jobexecution context
-             // so attempting to get it from the registry
-             if (Constants.PUSH.equals(((GsisshHostType) jobExecutionContext.getApplicationContext().getHostDescription().getType()).getMonitorMode())) {
-                 log.warn("During the out handler chain jobExecution context came null, so trying to handler");
-                 ApplicationDescription applicationDeploymentDescription = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription();
-                 TaskDetails taskData = null;
-                 try {
-                     taskData = (TaskDetails) jobExecutionContext.getRegistry().get(RegistryModelType.TASK_DETAIL, jobExecutionContext.getTaskData().getTaskID());
-                 } catch (RegistryException e) {
-                     log.error("Error retrieving job details from Registry");
-                     throw new GFacHandlerException("Error retrieving job details from Registry", e);
-                 }
-                 JobDetails jobDetails = taskData.getJobDetailsList().get(0);
-                 String jobDescription = jobDetails.getJobDescription();
-                 if (jobDescription != null) {
-                     JobDescriptor jobDescriptor = null;
-                     try {
-                         jobDescriptor = JobDescriptor.fromXML(jobDescription);
-                     } catch (XmlException e1) {
-                         e1.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
-                     }
-                     applicationDeploymentDescription.getType().setScratchWorkingDirectory(
-                             jobDescriptor.getJobDescriptorDocument().getJobDescriptor().getWorkingDirectory());
-                     applicationDeploymentDescription.getType().setInputDataDirectory(jobDescriptor.getInputDirectory());
-                     applicationDeploymentDescription.getType().setOutputDataDirectory(jobDescriptor.getOutputDirectory());
-                     applicationDeploymentDescription.getType().setStandardError(jobDescriptor.getJobDescriptorDocument().getJobDescriptor().getStandardErrorFile());
-                     applicationDeploymentDescription.getType().setStandardOutput(jobDescriptor.getJobDescriptorDocument().getJobDescriptor().getStandardOutFile());
-                 }
-             }
-         }
++        String hostAddress = jobExecutionContext.getHostName();
          try {
 -            if (jobExecutionContext.getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT) == null) {
 -
 +            if (jobExecutionContext.getSecurityContext(hostAddress) == null) {
                  GFACGSISSHUtils.addSecurityContext(jobExecutionContext);
              }
          }  catch (Exception e) {

http://git-wip-us.apache.org/repos/asf/airavata/blob/36938926/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/provider/impl/GSISSHProvider.java
----------------------------------------------------------------------
diff --cc modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/provider/impl/GSISSHProvider.java
index 87e0fb4,92a50e4..d26d31b
--- a/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/provider/impl/GSISSHProvider.java
+++ b/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/provider/impl/GSISSHProvider.java
@@@ -61,8 -70,7 +70,8 @@@ public class GSISSHProvider extends Abs
      public void initialize(JobExecutionContext jobExecutionContext) throws GFacProviderException, GFacException {
          super.initialize(jobExecutionContext);
          try {
-             String hostAddress = jobExecutionContext.getApplicationContext().getHostDescription().getType().getHostAddress();
 -            if (jobExecutionContext.getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT) == null) {
++            String hostAddress = jobExecutionContext.getHostName();
 +            if (jobExecutionContext.getSecurityContext(hostAddress) == null) {
                  GFACGSISSHUtils.addSecurityContext(jobExecutionContext);
              }
          } catch (ApplicationSettingsException e) {
@@@ -77,16 -85,20 +86,19 @@@
          log.info("Invoking GSISSH Provider Invoke ...");
          StringBuffer data = new StringBuffer();
          jobExecutionContext.getNotifier().publish(new StartExecutionEvent());
-         HostDescriptionType host = jobExecutionContext.getApplicationContext().
-                 getHostDescription().getType();
-         HpcApplicationDeploymentType app = (HpcApplicationDeploymentType) jobExecutionContext.getApplicationContext().
-                 getApplicationDeploymentDescription().getType();
+         ComputeResourceDescription computeResourceDescription = jobExecutionContext.getApplicationContext()
+                 .getComputeResourceDescription();
+         ApplicationDeploymentDescription appDeployDesc = jobExecutionContext.getApplicationContext()
+                 .getApplicationDeploymentDescription();
          JobDetails jobDetails = new JobDetails();
          Cluster cluster = null;
-         
+ 
          try {
-             if (jobExecutionContext.getSecurityContext(host.getHostAddress()) != null) {
-                 cluster = ((GSISecurityContext) jobExecutionContext.getSecurityContext(host.getHostAddress())).getPbsCluster();
+             AppCatalog appCatalog = AppCatalogFactory.getAppCatalog();
+             SSHJobSubmission sshJobSubmission = appCatalog.getComputeResource().getSSHJobSubmission(
+                     jobExecutionContext.getPreferredJobSubmissionInterface().getJobSubmissionInterfaceId());
 -
 -            if (jobExecutionContext.getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT) != null) {
 -                cluster = ((GSISecurityContext) jobExecutionContext.getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT)).getPbsCluster();
++            if (jobExecutionContext.getSecurityContext(jobExecutionContext.getHostName()) != null) {
++                cluster = ((GSISecurityContext) jobExecutionContext.getSecurityContext(jobExecutionContext.getHostName())).getPbsCluster();
              }
              if (cluster == null) {
                  throw new GFacProviderException("Security context is not set properly");
@@@ -209,15 -221,13 +221,14 @@@
      public void cancelJob(JobExecutionContext jobExecutionContext) throws GFacProviderException,GFacException {
          //To change body of implemented methods use File | Settings | File Templates.
          log.info("canceling the job status in GSISSHProvider!!!!!");
-         HostDescriptionType host = jobExecutionContext.getApplicationContext().
-                 getHostDescription().getType();
          JobDetails jobDetails = jobExecutionContext.getJobDetails();
++        String hostName = jobExecutionContext.getHostName();
          try {
              Cluster cluster = null;
-             if (jobExecutionContext.getSecurityContext(host.getHostAddress()) == null) {
 -            if (jobExecutionContext.getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT) == null) {
++            if (jobExecutionContext.getSecurityContext(hostName) == null) {
                  GFACGSISSHUtils.addSecurityContext(jobExecutionContext);
              }
-             cluster = ((GSISecurityContext) jobExecutionContext.getSecurityContext(host.getHostAddress())).getPbsCluster();
 -            cluster = ((GSISecurityContext) jobExecutionContext.getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT)).getPbsCluster();
++            cluster = ((GSISecurityContext) jobExecutionContext.getSecurityContext(hostName)).getPbsCluster();
              if (cluster == null) {
                  throw new GFacProviderException("Security context is not set properly");
              } else {
@@@ -256,8 -266,8 +267,9 @@@
      public void recover(JobExecutionContext jobExecutionContext) throws GFacProviderException,GFacException {
          // have to implement the logic to recover a gfac failure
          log.info("Invoking Recovering for the Experiment: " + jobExecutionContext.getExperimentID());
-         HostDescriptionType host = jobExecutionContext.getApplicationContext().
-                 getHostDescription().getType();
+         ComputeResourceDescription computeResourceDescription = jobExecutionContext.getApplicationContext()
+                 .getComputeResourceDescription();
++        String hostName = jobExecutionContext.getHostName();
          String jobId = "";
          String jobDesc = "";
          try {
@@@ -299,7 -309,7 +311,7 @@@
              jobDetails.setJobDescription(jobDesc);
              jobDetails.setJobID(jobId);
              jobExecutionContext.setJobDetails(jobDetails);
-             if (jobExecutionContext.getSecurityContext(host.getHostAddress()) == null) {
 -            if (jobExecutionContext.getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT) == null) {
++            if (jobExecutionContext.getSecurityContext(hostName) == null) {
                  try {
                      GFACGSISSHUtils.addSecurityContext(jobExecutionContext);
                  } catch (ApplicationSettingsException e) {

http://git-wip-us.apache.org/repos/asf/airavata/blob/36938926/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/util/GFACGSISSHUtils.java
----------------------------------------------------------------------
diff --cc modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/util/GFACGSISSHUtils.java
index 58efb56,3c2e839..45ef1b8
--- a/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/util/GFACGSISSHUtils.java
+++ b/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/util/GFACGSISSHUtils.java
@@@ -160,10 -159,11 +159,11 @@@ public class GFACGSISSHUtils 
                          clusters.put(key, pbsClusters);
                      }
                  }
-             } catch (Exception e) {
-                 throw new GFacException("An error occurred while creating GSI security context", e);
+ 
 -                jobExecutionContext.addSecurityContext(Constants.GSI_SECURITY_CONTEXT, context);
++                jobExecutionContext.addSecurityContext(jobExecutionContext.getHostName(), context);
              }
-             jobExecutionContext.addSecurityContext(registeredHost.getType().getHostAddress(), context);
+         } catch (Exception e) {
+             throw new GFacException("An error occurred while creating GSI security context", e);
          }
      }
  

http://git-wip-us.apache.org/repos/asf/airavata/blob/36938926/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/HPCMonitorID.java
----------------------------------------------------------------------
diff --cc modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/HPCMonitorID.java
index 3a15d39,c788ace..12b7ad9
--- a/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/HPCMonitorID.java
+++ b/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/HPCMonitorID.java
@@@ -62,8 -63,7 +63,8 @@@ public class HPCMonitorID extends Monit
          this.authenticationInfo = authenticationInfo;
          if (this.authenticationInfo != null) {
              try {
-                 String hostAddress = jobExecutionContext.getApplicationContext().getHostDescription().getType().getHostAddress();
 -                SecurityContext securityContext = jobExecutionContext.getSecurityContext(Constants.GSI_SECURITY_CONTEXT);
++                String hostAddress = jobExecutionContext.getHostName();
 +                SecurityContext securityContext = jobExecutionContext.getSecurityContext(hostAddress);
                  ServerInfo serverInfo = null;
                  if (securityContext != null) {
                      serverInfo = (((GSISecurityContext) securityContext).getPbsCluster()).getServerInfo();

http://git-wip-us.apache.org/repos/asf/airavata/blob/36938926/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/HPCPullMonitor.java
----------------------------------------------------------------------
diff --cc modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/HPCPullMonitor.java
index 9f93dc6,feaaa2d..25113fd
--- a/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/HPCPullMonitor.java
+++ b/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/HPCPullMonitor.java
@@@ -25,6 -25,6 +25,7 @@@ import org.apache.airavata.common.logge
  import org.apache.airavata.common.logger.AiravataLoggerFactory;
  import org.apache.airavata.common.utils.MonitorPublisher;
  import org.apache.airavata.common.utils.ServerSettings;
++import org.apache.airavata.gfac.monitor.util.CommonUtils;
  import org.apache.airavata.commons.gfac.type.HostDescription;
  import org.apache.airavata.gfac.core.cpi.GFac;
  import org.apache.airavata.gfac.core.monitor.MonitorID;

http://git-wip-us.apache.org/repos/asf/airavata/blob/36938926/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/ResourceConnection.java
----------------------------------------------------------------------
diff --cc modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/ResourceConnection.java
index 121cc59,73c05b7..e7a081b
--- a/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/ResourceConnection.java
+++ b/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/ResourceConnection.java
@@@ -49,13 -49,11 +49,13 @@@ public class ResourceConnection 
      public ResourceConnection(HostMonitorData hostMonitorData,AuthenticationInfo authInfo) throws SSHApiException {
          MonitorID monitorID = hostMonitorData.getMonitorIDs().get(0);
          try {
 -            GSISecurityContext securityContext = (GSISecurityContext) monitorID.getJobExecutionContext().getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT);
 +            GSISecurityContext securityContext = (GSISecurityContext)
-                     monitorID.getJobExecutionContext().getSecurityContext(monitorID.getHost().getType().getHostAddress());
++                    monitorID.getJobExecutionContext().getSecurityContext(monitorID.getComputeResourceDescription().getHostName());
              if(securityContext != null) {
                  cluster = (PBSCluster) securityContext.getPbsCluster();
              }else {
 -                SSHSecurityContext sshSecurityContext = (SSHSecurityContext) monitorID.getJobExecutionContext().getSecurityContext(SSHSecurityContext.SSH_SECURITY_CONTEXT);
 +                SSHSecurityContext sshSecurityContext = (SSHSecurityContext)
-                         monitorID.getJobExecutionContext().getSecurityContext(monitorID.getHost().getType().getHostAddress());
++                        monitorID.getJobExecutionContext().getSecurityContext(monitorID.getComputeResourceDescription().getHostName());
                  cluster = (PBSCluster)sshSecurityContext.getPbsCluster();
              }
  
@@@ -70,8 -68,7 +70,8 @@@
      public ResourceConnection(HostMonitorData hostMonitorData) throws SSHApiException {
          MonitorID monitorID = hostMonitorData.getMonitorIDs().get(0);
          try {
 -            GSISecurityContext securityContext = (GSISecurityContext) monitorID.getJobExecutionContext().getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT);
 +            GSISecurityContext securityContext = (GSISecurityContext)
-                     monitorID.getJobExecutionContext().getSecurityContext(monitorID.getHost().getType().getHostAddress());
++                    monitorID.getJobExecutionContext().getSecurityContext(monitorID.getComputeResourceDescription().getHostName());
              cluster = (PBSCluster) securityContext.getPbsCluster();
  
              // we just use cluster configuration from the incoming request and construct a new cluster because for monitoring

http://git-wip-us.apache.org/repos/asf/airavata/blob/36938926/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/util/CommonUtils.java
----------------------------------------------------------------------
diff --cc modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/util/CommonUtils.java
index 6152505,a503154..219db22
--- a/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/util/CommonUtils.java
+++ b/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/util/CommonUtils.java
@@@ -148,7 -156,7 +156,6 @@@ public class CommonUtils 
  
      /**
       * This method doesn't have to be synchronized because it will be invoked by HPCPullMonitor which already synchronized
--     * @param queue
       * @param monitorID
       * @throws AiravataMonitorException
       */
@@@ -169,11 -180,15 +176,10 @@@
                                      // could be different, thats why we check the jobID
                                      iterator2.remove();
                                      logger.infoId(monitorID.getJobID(), "Removed the jobId: {} JobName: {} from monitoring last " +
-                                             "status:{}", monitorID.getJobID(),monitorID.getJobName(), monitorID.getStatus().toString());
+                                             "status:{}", monitorID.getJobID(), monitorID.getJobName(), monitorID.getStatus().toString());
                                      if (iHostMonitorID.getMonitorIDs().size() == 0) {
                                          iterator1.remove();
-                                         logger.debug("Removed host {} from monitoring queue", iHostMonitorID.getHost()
-                                                 .getType().getHostAddress());
+                                         logger.debug("Removed host {} from monitoring queue", iHostMonitorID.getComputeResourceDescription().getHostName());
 -                                        if (hostMonitorData.size() == 0) {
 -                                            // no useful data so we have to remove the element from the queue
 -                                            queue.remove(next);
 -                                            logger.debug("Removed user {} from monitoring.", next.getUserName());
 -                                        }
                                      }
                                      return;
                                  }

http://git-wip-us.apache.org/repos/asf/airavata/blob/36938926/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPOutputHandler.java
----------------------------------------------------------------------
diff --cc modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPOutputHandler.java
index c0416f5,e46dfa5..caefe7a
--- a/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPOutputHandler.java
+++ b/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPOutputHandler.java
@@@ -110,10 -107,22 +107,23 @@@ public class AdvancedSCPOutputHandler e
                      this.passPhrase);
          }
          try {
-             ApplicationDeploymentDescriptionType app = jobExecutionContext.getApplicationContext()
-                     .getApplicationDeploymentDescription().getType();
-             String standardError = app.getStandardError();
-             String standardOutput = app.getStandardOutput();
 -            if (jobExecutionContext.getSecurityContext(SSHSecurityContext.SSH_SECURITY_CONTEXT) == null) {
++            String hostName = jobExecutionContext.getHostName();
++            if (jobExecutionContext.getSecurityContext(hostName) == null) {
+                 try {
+                     GFACSSHUtils.addSecurityContext(jobExecutionContext);
+                 } catch (ApplicationSettingsException e) {
+                     log.error(e.getMessage());
+                     try {
+          				GFacUtils.saveErrorDetails(jobExecutionContext, e.getLocalizedMessage(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
+          			} catch (GFacException e1) {
+          				 log.error(e1.getLocalizedMessage());
+          			}
+                     throw new GFacHandlerException("Error while creating SSHSecurityContext", e, e.getLocalizedMessage());
+                 }
+             }
 -            pbsCluster = ((SSHSecurityContext)jobExecutionContext.getSecurityContext(SSHSecurityContext.SSH_SECURITY_CONTEXT)).getPbsCluster();
++            pbsCluster = ((SSHSecurityContext)jobExecutionContext.getSecurityContext(hostName)).getPbsCluster();
+             String standardError = jobExecutionContext.getStandardError();
+             String standardOutput = jobExecutionContext.getStandardOutput();
              super.invoke(jobExecutionContext);
              // Server info
              if(jobExecutionContext.getTaskData().getAdvancedOutputDataHandling() != null && jobExecutionContext.getTaskData().getAdvancedOutputDataHandling().getOutputDataDir() != null){

http://git-wip-us.apache.org/repos/asf/airavata/blob/36938926/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHDirectorySetupHandler.java
----------------------------------------------------------------------
diff --cc modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHDirectorySetupHandler.java
index 9dcdefe,f7cbcc0..1baed57
--- a/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHDirectorySetupHandler.java
+++ b/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHDirectorySetupHandler.java
@@@ -46,8 -46,7 +46,8 @@@ public class SSHDirectorySetupHandler e
  
  	public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
          try {
-             String hostAddress = jobExecutionContext.getApplicationContext().getHostDescription().getType().getHostAddress();
 -            if (jobExecutionContext.getSecurityContext(SSHSecurityContext.SSH_SECURITY_CONTEXT) == null) {
++            String hostAddress = jobExecutionContext.getHostName();
 +            if (jobExecutionContext.getSecurityContext(hostAddress) == null) {
                  GFACSSHUtils.addSecurityContext(jobExecutionContext);
              }
          } catch (Exception e) {
@@@ -68,8 -67,7 +68,8 @@@
  	private void makeDirectory(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
  		Cluster cluster = null;
  		try{
-             String hostAddress = jobExecutionContext.getApplicationContext().getHostDescription().getType().getHostAddress();
 -        cluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(SSHSecurityContext.SSH_SECURITY_CONTEXT)).getPbsCluster();
++            String hostAddress = jobExecutionContext.getHostName();
 +            cluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getPbsCluster();
          if (cluster == null) {
              throw new GFacHandlerException("Security context is not set properly");
          } else {

http://git-wip-us.apache.org/repos/asf/airavata/blob/36938926/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHInputHandler.java
----------------------------------------------------------------------
diff --cc modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHInputHandler.java
index c279378,b0367f3..dd27d6b
--- a/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHInputHandler.java
+++ b/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHInputHandler.java
@@@ -66,8 -66,13 +66,8 @@@ public class SSHInputHandler extends Ab
          Cluster cluster = null;
          
          try {
-             String hostAddress = jobExecutionContext.getApplicationContext().getHostDescription().getType().getHostAddress();
 -        	cluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(SSHSecurityContext.SSH_SECURITY_CONTEXT)).getPbsCluster();
 -            if (cluster == null) {
 -                throw new GFacException("Security context is not set properly");
 -            } else {
 -                log.info("Successfully retrieved the Security Context");
 -            }
 -            if (jobExecutionContext.getSecurityContext(SSHSecurityContext.SSH_SECURITY_CONTEXT) == null) {
++            String hostAddress = jobExecutionContext.getHostName();
 +            if (jobExecutionContext.getSecurityContext(hostAddress) == null) {
                  try {
                      GFACSSHUtils.addSecurityContext(jobExecutionContext);
                  } catch (ApplicationSettingsException e) {

http://git-wip-us.apache.org/repos/asf/airavata/blob/36938926/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHOutputHandler.java
----------------------------------------------------------------------
diff --cc modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHOutputHandler.java
index 0780084,94f667e..bf01aff
--- a/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHOutputHandler.java
+++ b/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHOutputHandler.java
@@@ -58,40 -53,9 +53,9 @@@ public class SSHOutputHandler extends A
      private static final Logger log = LoggerFactory.getLogger(SSHOutputHandler.class);
  
      public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-         if (jobExecutionContext.getApplicationContext().getHostDescription().getType() instanceof GsisshHostType) { // this is because we don't have the right jobexecution context
-             // so attempting to get it from the registry
-             if (Constants.PUSH.equals(((GsisshHostType) jobExecutionContext.getApplicationContext().getHostDescription().getType()).getMonitorMode())) { // this is because we don't have the right jobexecution context
-                 // so attempting to get it from the registry
-                 log.warn("During the out handler chain jobExecution context came null, so trying to handler");
-                 ApplicationDescription applicationDeploymentDescription = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription();
-                 TaskDetails taskData = null;
-                 try {
-                     taskData = (TaskDetails) registry.get(RegistryModelType.TASK_DETAIL, jobExecutionContext.getTaskData().getTaskID());
-                 } catch (RegistryException e) {
-                     log.error("Error retrieving job details from Registry");
-                     throw new GFacHandlerException("Error retrieving job details from Registry", e);
-                 }
-                 JobDetails jobDetails = taskData.getJobDetailsList().get(0);
-                 String jobDescription = jobDetails.getJobDescription();
-                 if (jobDescription != null) {
-                     JobDescriptor jobDescriptor = null;
-                     try {
-                         jobDescriptor = JobDescriptor.fromXML(jobDescription);
-                     } catch (XmlException e1) {
-                         e1.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
-                     }
-                     applicationDeploymentDescription.getType().setScratchWorkingDirectory(
-                             jobDescriptor.getJobDescriptorDocument().getJobDescriptor().getWorkingDirectory());
-                     applicationDeploymentDescription.getType().setInputDataDirectory(jobDescriptor.getInputDirectory());
-                     applicationDeploymentDescription.getType().setOutputDataDirectory(jobDescriptor.getOutputDirectory());
-                     applicationDeploymentDescription.getType().setStandardError(jobDescriptor.getJobDescriptorDocument().getJobDescriptor().getStandardErrorFile());
-                     applicationDeploymentDescription.getType().setStandardOutput(jobDescriptor.getJobDescriptorDocument().getJobDescriptor().getStandardOutFile());
-                 }
-             }
-         }
-         String hostAddress = jobExecutionContext.getApplicationContext().getHostDescription().getType().getHostAddress();
++        String hostAddress = jobExecutionContext.getHostName();
          try {
 -            if (jobExecutionContext.getSecurityContext(SSHSecurityContext.SSH_SECURITY_CONTEXT) == null) {
 -
 +            if (jobExecutionContext.getSecurityContext(hostAddress) == null) {
                  GFACSSHUtils.addSecurityContext(jobExecutionContext);
              }
          } catch (Exception e) {
@@@ -108,11 -72,9 +72,9 @@@
          DataTransferDetails detail = new DataTransferDetails();
          TransferStatus status = new TransferStatus();
  
-         ApplicationDeploymentDescriptionType app = jobExecutionContext.getApplicationContext()
-                 .getApplicationDeploymentDescription().getType();
          Cluster cluster = null;
          try {
 -            cluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(SSHSecurityContext.SSH_SECURITY_CONTEXT)).getPbsCluster();
 +             cluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getPbsCluster();
              if (cluster == null) {
                  throw new GFacProviderException("Security context is not set properly");
              } else {

http://git-wip-us.apache.org/repos/asf/airavata/blob/36938926/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/provider/impl/SSHProvider.java
----------------------------------------------------------------------
diff --cc modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/provider/impl/SSHProvider.java
index 8e46d1b,573ddf0..ff2267c
--- a/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/provider/impl/SSHProvider.java
+++ b/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/provider/impl/SSHProvider.java
@@@ -76,8 -78,7 +78,8 @@@ public class SSHProvider extends Abstra
  
      public void initialize(JobExecutionContext jobExecutionContext) throws GFacProviderException, GFacException {
          super.initialize(jobExecutionContext);
-         String hostAddress = jobExecutionContext.getApplicationContext().getHostDescription().getType().getHostAddress();
 -        if (jobExecutionContext.getSecurityContext(SSHSecurityContext.SSH_SECURITY_CONTEXT) == null) {
++        String hostAddress = jobExecutionContext.getHostName();
 +        if (jobExecutionContext.getSecurityContext(hostAddress) == null) {
              try {
                  GFACSSHUtils.addSecurityContext(jobExecutionContext);
              } catch (ApplicationSettingsException e) {
@@@ -87,12 -88,12 +89,12 @@@
          }
          taskID = jobExecutionContext.getTaskData().getTaskID();
  
-         if (!((SSHHostType) jobExecutionContext.getApplicationContext().getHostDescription().getType()).getHpcResource()) {
-             jobID = "SSH_" + jobExecutionContext.getApplicationContext().getHostDescription().getType().getHostAddress() + "_" + Calendar.getInstance().getTimeInMillis();
+         JobSubmissionProtocol preferredJobSubmissionProtocol = jobExecutionContext.getPreferredJobSubmissionProtocol();
+         if (preferredJobSubmissionProtocol == JobSubmissionProtocol.SSH) {
+             jobID = "SSH_" + jobExecutionContext.getHostName() + "_" + Calendar.getInstance().getTimeInMillis();
 -            cluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(SSHSecurityContext.SSH_SECURITY_CONTEXT)).getPbsCluster();
 +            cluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getPbsCluster();
  
-             ApplicationDeploymentDescriptionType app = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription().getType();
-             String remoteFile = app.getStaticWorkingDirectory() + File.separatorChar + Constants.EXECUTABLE_NAME;
+             String remoteFile = jobExecutionContext.getWorkingDir() + File.separatorChar + Constants.EXECUTABLE_NAME;
              details.setJobID(taskID);
              details.setJobDescription(remoteFile);
              jobExecutionContext.setJobDetails(details);
@@@ -140,17 -140,12 +141,14 @@@
          } else {
              try {
                  jobExecutionContext.getNotifier().publish(new StartExecutionEvent());
-                 HostDescriptionType host = jobExecutionContext.getApplicationContext().
-                         getHostDescription().getType();
-                 HpcApplicationDeploymentType app = (HpcApplicationDeploymentType) jobExecutionContext.getApplicationContext().
-                         getApplicationDeploymentDescription().getType();
                  JobDetails jobDetails = new JobDetails();
++                String hostAddress = jobExecutionContext.getHostName();
                  try {
                      Cluster cluster = null;
-                     if (jobExecutionContext.getSecurityContext(host.getHostAddress()) == null) {
 -                    if (jobExecutionContext.getSecurityContext(SSHSecurityContext.SSH_SECURITY_CONTEXT) != null) {
 -                        cluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(SSHSecurityContext.SSH_SECURITY_CONTEXT)).getPbsCluster();
++                    if (jobExecutionContext.getSecurityContext(hostAddress) == null) {
 +                        GFACSSHUtils.addSecurityContext(jobExecutionContext);
                      }
-                     cluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(host.getHostAddress())).getPbsCluster();
++                    cluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getPbsCluster();
                      if (cluster == null) {
                          throw new GFacProviderException("Security context is not set properly");
                      } else {
@@@ -201,13 -196,11 +199,12 @@@
  
      public void cancelJob(JobExecutionContext jobExecutionContext) throws GFacProviderException, GFacException {
          JobDetails jobDetails = jobExecutionContext.getJobDetails();
-         HostDescriptionType host = jobExecutionContext.getApplicationContext().
-                 getHostDescription().getType();
          StringBuffer data = new StringBuffer();
++        String hostAddress = jobExecutionContext.getHostName();
          if (!hpcType) {
              throw new NotImplementedException();
          } else {
-             Cluster cluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(host.getHostAddress())).getPbsCluster();
 -            Cluster cluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(SSHSecurityContext.SSH_SECURITY_CONTEXT)).getPbsCluster();
++            Cluster cluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getPbsCluster();
              if (cluster == null) {
                  throw new GFacProviderException("Security context is not set properly");
              } else {

http://git-wip-us.apache.org/repos/asf/airavata/blob/36938926/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/util/GFACSSHUtils.java
----------------------------------------------------------------------
diff --cc modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/util/GFACSSHUtils.java
index c756026,f726024..05cdf31
--- a/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/util/GFACSSHUtils.java
+++ b/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/util/GFACSSHUtils.java
@@@ -72,77 -73,84 +73,84 @@@ public class GFACSSHUtils 
       * @throws ApplicationSettingsException
       */
      public static void addSecurityContext(JobExecutionContext jobExecutionContext) throws GFacException, ApplicationSettingsException {
-         HostDescription registeredHost = jobExecutionContext.getApplicationContext().getHostDescription();
-         if (registeredHost.getType() instanceof GlobusHostType || registeredHost.getType() instanceof UnicoreHostType) {
+         JobSubmissionProtocol preferredJobSubmissionProtocol = jobExecutionContext.getPreferredJobSubmissionProtocol();
+         JobSubmissionInterface preferredJobSubmissionInterface = jobExecutionContext.getPreferredJobSubmissionInterface();
+         if (preferredJobSubmissionProtocol == JobSubmissionProtocol.GLOBUS || preferredJobSubmissionProtocol == JobSubmissionProtocol.UNICORE) {
              logger.error("This is a wrong method to invoke to non ssh host types,please check your gfac-config.xml");
-         } else if (registeredHost.getType() instanceof SSHHostType
-                 || registeredHost.getType() instanceof GsisshHostType) {
-             SSHSecurityContext sshSecurityContext = new SSHSecurityContext();
-             String credentialStoreToken = jobExecutionContext.getCredentialStoreToken(); // this is set by the framework
-             RequestData requestData = new RequestData(ServerSettings.getDefaultUserGateway());
-             requestData.setTokenId(credentialStoreToken);
- 
-             ServerInfo serverInfo = new ServerInfo(null, registeredHost.getType().getHostAddress());
-             Cluster pbsCluster = null;
+         } else if (preferredJobSubmissionProtocol == JobSubmissionProtocol.SSH) {
              try {
-                 TokenizedSSHAuthInfo tokenizedSSHAuthInfo = new TokenizedSSHAuthInfo(requestData);
-                 String installedParentPath = ((HpcApplicationDeploymentType)
-                         jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription().getType()).getInstalledParentPath();
-                 if (installedParentPath == null) {
-                     installedParentPath = "/";
-                 }
+                 AppCatalog appCatalog = AppCatalogFactory.getAppCatalog();
+                 SSHJobSubmission sshJobSubmission = appCatalog.getComputeResource().getSSHJobSubmission(preferredJobSubmissionInterface.getJobSubmissionInterfaceId());
+                 if (sshJobSubmission.getSecurityProtocol() == SecurityProtocol.GSI) {
+                     SSHSecurityContext sshSecurityContext = new SSHSecurityContext();
+                     String credentialStoreToken = jobExecutionContext.getCredentialStoreToken(); // this is set by the framework
+                     RequestData requestData = new RequestData(ServerSettings.getDefaultUserGateway());
+                     requestData.setTokenId(credentialStoreToken);
  
-                 SSHCredential credentials = tokenizedSSHAuthInfo.getCredentials();// this is just a call to get and set credentials in to this object,data will be used
-                 serverInfo.setUserName(credentials.getPortalUserName());
-                 jobExecutionContext.getExperiment().setUserName(credentials.getPortalUserName());
-                 // inside the pbsCluser object
+                     ServerInfo serverInfo = new ServerInfo(null, jobExecutionContext.getHostName());
  
-                 String key = credentials.getPortalUserName() + registeredHost.getType().getHostAddress() +
-                         serverInfo.getPort();
-                 boolean recreate = false;
-                 synchronized (clusters) {
-                     if (clusters.containsKey(key) && clusters.get(key).size() < maxClusterCount) {
-                         recreate = true;
-                     } else if (clusters.containsKey(key)) {
-                         int i = new Random().nextInt(Integer.MAX_VALUE) % maxClusterCount;
-                         if (clusters.get(key).get(i).getSession().isConnected()) {
-                             pbsCluster = clusters.get(key).get(i);
-                         } else {
-                             clusters.get(key).remove(i);
-                             recreate = true;
+                     Cluster pbsCluster = null;
+                     try {
+                         TokenizedSSHAuthInfo tokenizedSSHAuthInfo = new TokenizedSSHAuthInfo(requestData);
+                         String installedParentPath = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription().getExecutablePath();
+                         if (installedParentPath == null) {
+                             installedParentPath = "/";
                          }
-                         if (!recreate) {
-                             try {
-                                 pbsCluster.listDirectory("~/"); // its hard to trust isConnected method, so we try to connect if it works we are good,else we recreate
-                             } catch (Exception e) {
-                                 clusters.get(key).remove(i);
-                                 logger.info("Connection found the connection map is expired, so we create from the scratch");
-                                 maxClusterCount++;
-                                 recreate = true; // we make the pbsCluster to create again if there is any exception druing connection
+ 
+                         SSHCredential credentials = tokenizedSSHAuthInfo.getCredentials();// this is just a call to get and set credentials in to this object,data will be used
+                         serverInfo.setUserName(credentials.getPortalUserName());
+                         jobExecutionContext.getExperiment().setUserName(credentials.getPortalUserName());
+                         // inside the pbsCluser object
+ 
+                         String key = credentials.getPortalUserName() + jobExecutionContext.getHostName() + serverInfo.getPort();
+                         boolean recreate = false;
+                         synchronized (clusters) {
+                             if (clusters.containsKey(key) && clusters.get(key).size() < maxClusterCount) {
+                                 recreate = true;
+                             } else if (clusters.containsKey(key)) {
+                                 int i = new Random().nextInt(Integer.MAX_VALUE) % maxClusterCount;
+                                 if (clusters.get(key).get(i).getSession().isConnected()) {
+                                     pbsCluster = clusters.get(key).get(i);
+                                 } else {
+                                     clusters.get(key).remove(i);
+                                     recreate = true;
+                                 }
+                                 if (!recreate) {
+                                     try {
+                                         pbsCluster.listDirectory("~/"); // its hard to trust isConnected method, so we try to connect if it works we are good,else we recreate
+                                     } catch (Exception e) {
+                                         clusters.get(key).remove(i);
+                                         logger.info("Connection found the connection map is expired, so we create from the scratch");
+                                         maxClusterCount++;
+                                         recreate = true; // we make the pbsCluster to create again if there is any exception druing connection
+                                     }
+                                 }
+                                 logger.info("Re-using the same connection used with the connection string:" + key);
+                             } else {
+                                 recreate = true;
+                             }
+                             if (recreate) {
+                                 pbsCluster = new PBSCluster(serverInfo, tokenizedSSHAuthInfo,
+                                         CommonUtils.getPBSJobManager(installedParentPath));
+                                 List<Cluster> pbsClusters = null;
+                                 if (!(clusters.containsKey(key))) {
+                                     pbsClusters = new ArrayList<Cluster>();
+                                 } else {
+                                     pbsClusters = clusters.get(key);
+                                 }
+                                 pbsClusters.add(pbsCluster);
+                                 clusters.put(key, pbsClusters);
                              }
                          }
-                         logger.info("Re-using the same connection used with the connection string:" + key);
-                     } else {
-                         recreate = true;
-                     }
-                     if (recreate) {
-                         pbsCluster = new PBSCluster(serverInfo, tokenizedSSHAuthInfo,
-                                     CommonUtils.getPBSJobManager(installedParentPath));
-                         List<Cluster> pbsClusters = null;
-                         if (!(clusters.containsKey(key))) {
-                             pbsClusters = new ArrayList<Cluster>();
-                         } else {
-                             pbsClusters = clusters.get(key);
-                         }
-                         pbsClusters.add(pbsCluster);
-                         clusters.put(key, pbsClusters);
+                     } catch (Exception e) {
+                         throw new GFacException("Error occurred...", e);
                      }
+                     sshSecurityContext.setPbsCluster(pbsCluster);
 -                    jobExecutionContext.addSecurityContext(Constants.SSH_SECURITY_CONTEXT, sshSecurityContext);
++                    jobExecutionContext.addSecurityContext(jobExecutionContext.getHostName(), sshSecurityContext);
                  }
-             } catch (Exception e) {
-                 e.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
+             } catch (AppCatalogException e) {
+                 throw new GFacException("Error while getting SSH Submission object from app catalog", e);
              }
-             sshSecurityContext.setPbsCluster(pbsCluster);
-             jobExecutionContext.addSecurityContext(registeredHost.getType().getHostAddress(), sshSecurityContext);
          }
      }
  
@@@ -154,61 -162,61 +162,61 @@@
       * @throws ApplicationSettingsException
       */
      public static void addSecurityContext(JobExecutionContext jobExecutionContext,SSHAuthWrapper sshAuth) throws GFacException, ApplicationSettingsException {
-             try {
-                 if(sshAuth== null) {
-                     throw new GFacException("Error adding security Context, because sshAuthWrapper is null");
-                 }
-                 SSHSecurityContext sshSecurityContext = new SSHSecurityContext();
-                 Cluster pbsCluster = null;
-                 String key=sshAuth.getKey();
-                 boolean recreate = false;
-                 synchronized (clusters) {
-                     if (clusters.containsKey(key) && clusters.get(key).size() < maxClusterCount) {
-                         recreate = true;
-                     } else if (clusters.containsKey(key)) {
-                         int i = new Random().nextInt(Integer.MAX_VALUE) % maxClusterCount;
-                         if (clusters.get(key).get(i).getSession().isConnected()) {
-                             pbsCluster = clusters.get(key).get(i);
-                         } else {
-                             clusters.get(key).remove(i);
-                             recreate = true;
-                         }
-                         if (!recreate) {
-                             try {
-                                 pbsCluster.listDirectory("~/"); // its hard to trust isConnected method, so we try to connect if it works we are good,else we recreate
-                             } catch (Exception e) {
-                                 clusters.get(key).remove(i);
-                                 logger.info("Connection found the connection map is expired, so we create from the scratch");
-                                 maxClusterCount++;
-                                 recreate = true; // we make the pbsCluster to create again if there is any exception druing connection
-                             }
-                         }
-                         logger.info("Re-using the same connection used with the connection string:" + key);
+         try {
+             if(sshAuth== null) {
+                 throw new GFacException("Error adding security Context, because sshAuthWrapper is null");
+             }
+             SSHSecurityContext sshSecurityContext = new SSHSecurityContext();
+             Cluster pbsCluster = null;
+             String key=sshAuth.getKey();
+             boolean recreate = false;
+             synchronized (clusters) {
+                 if (clusters.containsKey(key) && clusters.get(key).size() < maxClusterCount) {
+                     recreate = true;
+                 } else if (clusters.containsKey(key)) {
+                     int i = new Random().nextInt(Integer.MAX_VALUE) % maxClusterCount;
+                     if (clusters.get(key).get(i).getSession().isConnected()) {
+                         pbsCluster = clusters.get(key).get(i);
                      } else {
+                         clusters.get(key).remove(i);
                          recreate = true;
                      }
-                     if (recreate) {
-                         pbsCluster = new PBSCluster(sshAuth.getServerInfo(), sshAuth.getAuthenticationInfo(),null);
-                         key = sshAuth.getKey();
-                         List<Cluster> pbsClusters = null;
-                         if (!(clusters.containsKey(key))) {
-                             pbsClusters = new ArrayList<Cluster>();
-                         } else {
-                             pbsClusters = clusters.get(key);
+                     if (!recreate) {
+                         try {
+                             pbsCluster.listDirectory("~/"); // its hard to trust isConnected method, so we try to connect if it works we are good,else we recreate
+                         } catch (Exception e) {
+                             clusters.get(key).remove(i);
+                             logger.info("Connection found the connection map is expired, so we create from the scratch");
+                             maxClusterCount++;
+                             recreate = true; // we make the pbsCluster to create again if there is any exception druing connection
                          }
-                         pbsClusters.add(pbsCluster);
-                         clusters.put(key, pbsClusters);
                      }
+                     logger.info("Re-using the same connection used with the connection string:" + key);
+                 } else {
+                     recreate = true;
+                 }
+                 if (recreate) {
+                     pbsCluster = new PBSCluster(sshAuth.getServerInfo(), sshAuth.getAuthenticationInfo(),null);
+                     key = sshAuth.getKey();
+                     List<Cluster> pbsClusters = null;
+                     if (!(clusters.containsKey(key))) {
+                         pbsClusters = new ArrayList<Cluster>();
+                     } else {
+                         pbsClusters = clusters.get(key);
+                     }
+                     pbsClusters.add(pbsCluster);
+                     clusters.put(key, pbsClusters);
                  }
-                 sshSecurityContext.setPbsCluster(pbsCluster);
-                 jobExecutionContext.addSecurityContext(key, sshSecurityContext);
-             } catch (Exception e) {
-                 e.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
              }
+             sshSecurityContext.setPbsCluster(pbsCluster);
 -            jobExecutionContext.addSecurityContext(Constants.SSH_SECURITY_CONTEXT+key, sshSecurityContext);
++            jobExecutionContext.addSecurityContext(key, sshSecurityContext);
+         } catch (Exception e) {
+             e.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
+         }
      }
  
-     public static JobDescriptor createJobDescriptor(JobExecutionContext jobExecutionContext,
-                                                     ApplicationDeploymentDescriptionType app, Cluster cluster) {
+ 
+     public static JobDescriptor createJobDescriptor(JobExecutionContext jobExecutionContext, Cluster cluster) {
          JobDescriptor jobDescriptor = new JobDescriptor();
          // this is common for any application descriptor
          jobDescriptor.setCallBackIp(ServerSettings.getIp());

http://git-wip-us.apache.org/repos/asf/airavata/blob/36938926/modules/xbaya-gui/src/main/java/org/apache/airavata/xbaya/ui/experiment/WorkflowInterpreterLaunchWindow.java
----------------------------------------------------------------------


[22/50] [abbrv] airavata git commit: Integrated appCatalog model to GFac local and hpc monitor modules, commented out test calsses

Posted by ch...@apache.org.
Integrated appCatalog model to GFac local and hpc monitor modules, commented out test calsses


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/3e584f87
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/3e584f87
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/3e584f87

Branch: refs/heads/master
Commit: 3e584f87d359c07bb2e4429884d8efa820135671
Parents: d94e8c9
Author: shamrath <sh...@gmail.com>
Authored: Tue Nov 4 17:51:53 2014 -0500
Committer: Chathuri Wimalasena <ka...@gmail.com>
Committed: Wed Nov 5 13:10:24 2014 -0500

----------------------------------------------------------------------
 .../gfac/core/context/JobExecutionContext.java  |  12 +
 .../airavata/gfac/core/cpi/BetterGfacImpl.java  |   4 +
 .../handler/LocalDirectorySetupHandler.java     |  19 +-
 .../gfac/local/provider/impl/LocalProvider.java |  48 ++-
 .../gfac/local/utils/LocalProviderUtil.java     |  15 +-
 .../gfac/services/impl/LocalProviderTest.java   | 368 +++++++++----------
 .../airavata/gfac/monitor/HPCMonitorID.java     |  11 +-
 .../airavata/gfac/monitor/HostMonitorData.java  |  38 +-
 .../handlers/GridPullMonitorHandler.java        |   2 +-
 .../monitor/impl/pull/qstat/HPCPullMonitor.java |  24 +-
 .../airavata/gfac/monitor/util/CommonUtils.java |  31 +-
 .../job/QstatMonitorTestWithMyProxyAuth.java    | 344 ++++++++---------
 12 files changed, 468 insertions(+), 448 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/3e584f87/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
index 2d1a975..30142f8 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
@@ -72,6 +72,10 @@ public class JobExecutionContext extends AbstractContext implements Serializable
 
     private String credentialStoreToken;
     /**
+     * User defined scratch/temp directory
+     */
+    private String scratchLocation;
+    /**
      * User defined working directory.
      */
     private String workingDir;
@@ -359,6 +363,14 @@ public class JobExecutionContext extends AbstractContext implements Serializable
         this.credentialStoreToken = credentialStoreToken;
     }
 
+    public String getScratchLocation() {
+        return scratchLocation;
+    }
+
+    public void setScratchLocation(String scratchLocation) {
+        this.scratchLocation = scratchLocation;
+    }
+
     public String getWorkingDir() {
         return workingDir;
     }

http://git-wip-us.apache.org/repos/asf/airavata/blob/3e584f87/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
index 0455f7e..d063dac 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
@@ -372,6 +372,10 @@ public class BetterGfacImpl implements GFac,Watcher {
     }
 
     private void setUpWorkingLocation(JobExecutionContext jobExecutionContext, ApplicationInterfaceDescription applicationInterface, String scratchLocation) {
+        /**
+         * Scratch location
+         */
+        jobExecutionContext.setScratchLocation(scratchLocation);
 
         /**
          * Working dir

http://git-wip-us.apache.org/repos/asf/airavata/blob/3e584f87/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/local/handler/LocalDirectorySetupHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/local/handler/LocalDirectorySetupHandler.java b/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/local/handler/LocalDirectorySetupHandler.java
index de516c0..394cfaa 100644
--- a/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/local/handler/LocalDirectorySetupHandler.java
+++ b/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/local/handler/LocalDirectorySetupHandler.java
@@ -20,12 +20,9 @@
 */
 package org.apache.airavata.gfac.local.handler;
 
-import org.apache.airavata.commons.gfac.type.ApplicationDescription;
 import org.apache.airavata.gfac.core.context.JobExecutionContext;
 import org.apache.airavata.gfac.core.handler.GFacHandler;
 import org.apache.airavata.gfac.core.handler.GFacHandlerException;
-import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
-import org.apache.airavata.schemas.gfac.HostDescriptionType;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -37,18 +34,14 @@ public class LocalDirectorySetupHandler implements GFacHandler {
 
     public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
         log.info("Invoking LocalDirectorySetupHandler ...");
-        HostDescriptionType type = jobExecutionContext.getApplicationContext().getHostDescription().getType();
-        ApplicationDescription applicationDeploymentDescription = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription();
-        ApplicationDeploymentDescriptionType app = applicationDeploymentDescription.getType();
-        log.debug("working directory = " + app.getStaticWorkingDirectory());
-        log.debug("temp directory = " + app.getScratchWorkingDirectory());
+        log.debug("working directory = " + jobExecutionContext.getWorkingDir());
+        log.debug("temp directory = " + jobExecutionContext.getWorkingDir());
 
-        makeFileSystemDir(app.getStaticWorkingDirectory(),jobExecutionContext);
-        makeFileSystemDir(app.getScratchWorkingDirectory(),jobExecutionContext);
-        makeFileSystemDir(app.getInputDataDirectory(),jobExecutionContext);
-        makeFileSystemDir(app.getOutputDataDirectory(),jobExecutionContext);
+        makeFileSystemDir(jobExecutionContext.getWorkingDir());
+        makeFileSystemDir(jobExecutionContext.getInputDir());
+        makeFileSystemDir(jobExecutionContext.getOutputDir());
     }
-    private void makeFileSystemDir(String dir, JobExecutionContext jobExecutionContext) throws GFacHandlerException {
+    private void makeFileSystemDir(String dir) throws GFacHandlerException {
            File f = new File(dir);
            if (f.isDirectory() && f.exists()) {
                return;

http://git-wip-us.apache.org/repos/asf/airavata/blob/3e584f87/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/local/provider/impl/LocalProvider.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/local/provider/impl/LocalProvider.java b/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/local/provider/impl/LocalProvider.java
index 51da68a..4cdd0c0 100644
--- a/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/local/provider/impl/LocalProvider.java
+++ b/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/local/provider/impl/LocalProvider.java
@@ -37,6 +37,8 @@ import org.apache.airavata.gfac.core.utils.GFacUtils;
 import org.apache.airavata.gfac.core.utils.OutputUtils;
 import org.apache.airavata.gfac.local.utils.InputStreamToFileWriter;
 import org.apache.airavata.gfac.local.utils.InputUtils;
+import org.apache.airavata.model.appcatalog.appdeployment.ApplicationDeploymentDescription;
+import org.apache.airavata.model.appcatalog.appdeployment.SetEnvPaths;
 import org.apache.airavata.model.messaging.event.JobIdentifier;
 import org.apache.airavata.model.messaging.event.JobStatusChangeEvent;
 import org.apache.airavata.model.messaging.event.TaskIdentifier;
@@ -104,18 +106,16 @@ public class LocalProvider extends AbstractProvider {
 
     public void initialize(JobExecutionContext jobExecutionContext) throws GFacProviderException,GFacException {
     	super.initialize(jobExecutionContext);
-        ApplicationDeploymentDescriptionType app = jobExecutionContext.getApplicationContext().
-                getApplicationDeploymentDescription().getType();
 
-        buildCommand(app.getExecutableLocation(), ProviderUtils.getInputParameters(jobExecutionContext));
-        initProcessBuilder(app);
+        buildCommand(jobExecutionContext.getExecutablePath(), ProviderUtils.getInputParameters(jobExecutionContext));
+        initProcessBuilder(jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription());
 
         // extra environment variables
-        builder.environment().put(Constants.INPUT_DATA_DIR_VAR_NAME, app.getInputDataDirectory());
-        builder.environment().put(Constants.OUTPUT_DATA_DIR_VAR_NAME, app.getOutputDataDirectory());
+        builder.environment().put(Constants.INPUT_DATA_DIR_VAR_NAME, jobExecutionContext.getInputDir());
+        builder.environment().put(Constants.OUTPUT_DATA_DIR_VAR_NAME, jobExecutionContext.getOutputDir());
 
         // set working directory
-        builder.directory(new File(app.getStaticWorkingDirectory()));
+        builder.directory(new File(jobExecutionContext.getWorkingDir()));
 
         // log info
         log.info("Command = " + InputUtils.buildCommand(cmdList));
@@ -127,21 +127,19 @@ public class LocalProvider extends AbstractProvider {
 
     public void execute(JobExecutionContext jobExecutionContext) throws GFacProviderException {
         jobExecutionContext.getNotifier().publish(new StartExecutionEvent());
-         ApplicationDeploymentDescriptionType app = jobExecutionContext.
-                 getApplicationContext().getApplicationDeploymentDescription().getType();
         JobDetails jobDetails = new JobDetails();
         try {
         	jobId = jobExecutionContext.getTaskData().getTaskID();
             jobDetails.setJobID(jobId);
-            jobDetails.setJobDescription(app.toString());
+            jobDetails.setJobDescription(jobExecutionContext.getApplicationContext()
+                    .getApplicationDeploymentDescription().getAppDeploymentDescription());
             jobExecutionContext.setJobDetails(jobDetails);
-            jobDetails.setJobDescription(app.toString());
             GFacUtils.saveJobStatus(jobExecutionContext,jobDetails, JobState.SETUP);
         	// running cmd
             Process process = builder.start();
 
-            Thread standardOutWriter = new InputStreamToFileWriter(process.getInputStream(), app.getStandardOutput());
-            Thread standardErrorWriter = new InputStreamToFileWriter(process.getErrorStream(), app.getStandardError());
+            Thread standardOutWriter = new InputStreamToFileWriter(process.getInputStream(), jobExecutionContext.getStandardOutput());
+            Thread standardErrorWriter = new InputStreamToFileWriter(process.getErrorStream(), jobExecutionContext.getStandardError());
 
             // start output threads
             standardOutWriter.setDaemon(true);
@@ -167,9 +165,10 @@ public class LocalProvider extends AbstractProvider {
 
             StringBuffer buf = new StringBuffer();
             buf.append("Executed ").append(InputUtils.buildCommand(cmdList))
-                    .append(" on the localHost, working directory = ").append(app.getStaticWorkingDirectory())
-                    .append(" tempDirectory = ").append(app.getScratchWorkingDirectory()).append(" With the status ")
+                    .append(" on the localHost, working directory = ").append(jobExecutionContext.getWorkingDir())
+                    .append(" tempDirectory = ").append(jobExecutionContext.getScratchLocation()).append(" With the status ")
                     .append(String.valueOf(returnValue));
+
             log.info(buf.toString());
 
             // updating the job status to complete because there's nothing to monitor in local jobs
@@ -219,12 +218,10 @@ public class LocalProvider extends AbstractProvider {
 //	}
 
     public void dispose(JobExecutionContext jobExecutionContext) throws GFacProviderException {
-        ApplicationDeploymentDescriptionType app = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription().getType();
-
         try {
         	List<DataObjectType> outputArray = new ArrayList<DataObjectType>();
-            String stdOutStr = GFacUtils.readFileToString(app.getStandardOutput());
-            String stdErrStr = GFacUtils.readFileToString(app.getStandardError());
+            String stdOutStr = GFacUtils.readFileToString(jobExecutionContext.getStandardOutput());
+            String stdErrStr = GFacUtils.readFileToString(jobExecutionContext.getStandardError());
 			Map<String, Object> output = jobExecutionContext.getOutMessageContext().getParameters();
             OutputUtils.fillOutputFromStdout(output, stdOutStr, stdErrStr, outputArray);
             TaskDetails taskDetails = (TaskDetails)registry.get(RegistryModelType.TASK_DETAIL, jobExecutionContext.getTaskData().getTaskID());
@@ -257,15 +254,14 @@ public class LocalProvider extends AbstractProvider {
         cmdList.addAll(inputParameterList);
     }
 
-    private void initProcessBuilder(ApplicationDeploymentDescriptionType app){
+    private void initProcessBuilder(ApplicationDeploymentDescription app){
         builder = new ProcessBuilder(cmdList);
 
-        NameValuePairType[] env = app.getApplicationEnvironmentArray();
-
-        if(env != null && env.length > 0){
-            Map<String,String> builderEnv = builder.environment();
-            for (NameValuePairType entry : env) {
-                builderEnv.put(entry.getName(), entry.getValue());
+        List<SetEnvPaths> setEnvironment = app.getSetEnvironment();
+        if (setEnvironment != null) {
+            for (SetEnvPaths envPath : setEnvironment) {
+                Map<String,String> builderEnv = builder.environment();
+                builderEnv.put(envPath.getName(), envPath.getValue());
             }
         }
     }

http://git-wip-us.apache.org/repos/asf/airavata/blob/3e584f87/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/local/utils/LocalProviderUtil.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/local/utils/LocalProviderUtil.java b/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/local/utils/LocalProviderUtil.java
index 932c693..2b45df7 100644
--- a/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/local/utils/LocalProviderUtil.java
+++ b/modules/gfac/gfac-local/src/main/java/org/apache/airavata/gfac/local/utils/LocalProviderUtil.java
@@ -22,7 +22,6 @@ package org.apache.airavata.gfac.local.utils;
 
 import org.apache.airavata.gfac.core.context.JobExecutionContext;
 import org.apache.airavata.gfac.core.provider.GFacProviderException;
-import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -41,14 +40,12 @@ public class LocalProviderUtil {
     }
 
     public void makeDirectory(JobExecutionContext jobExecutionContext) throws GFacProviderException {
-        ApplicationDeploymentDescriptionType app = jobExecutionContext.
-                getApplicationContext().getApplicationDeploymentDescription().getType();
-        log.info("working diectroy = " + app.getStaticWorkingDirectory());
-        log.info("temp directory = " + app.getScratchWorkingDirectory());
-        makeFileSystemDir(app.getStaticWorkingDirectory());
-        makeFileSystemDir(app.getScratchWorkingDirectory());
-        makeFileSystemDir(app.getInputDataDirectory());
-        makeFileSystemDir(app.getOutputDataDirectory());
+        log.info("working diectroy = " + jobExecutionContext.getWorkingDir());
+        log.info("temp directory = " + jobExecutionContext.getScratchLocation());
+        makeFileSystemDir(jobExecutionContext.getWorkingDir());
+        makeFileSystemDir(jobExecutionContext.getScratchLocation());
+        makeFileSystemDir(jobExecutionContext.getInputDir());
+        makeFileSystemDir(jobExecutionContext.getOutputDir());
     }
 
 }

http://git-wip-us.apache.org/repos/asf/airavata/blob/3e584f87/modules/gfac/gfac-local/src/test/java/org/apache/airavata/core/gfac/services/impl/LocalProviderTest.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-local/src/test/java/org/apache/airavata/core/gfac/services/impl/LocalProviderTest.java b/modules/gfac/gfac-local/src/test/java/org/apache/airavata/core/gfac/services/impl/LocalProviderTest.java
index 343b4bf..aeb8158 100644
--- a/modules/gfac/gfac-local/src/test/java/org/apache/airavata/core/gfac/services/impl/LocalProviderTest.java
+++ b/modules/gfac/gfac-local/src/test/java/org/apache/airavata/core/gfac/services/impl/LocalProviderTest.java
@@ -1,184 +1,184 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.core.gfac.services.impl;
-
-import java.io.File;
-import java.net.URL;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.airavata.common.utils.MonitorPublisher;
-import org.apache.airavata.commons.gfac.type.ActualParameter;
-import org.apache.airavata.commons.gfac.type.ApplicationDescription;
-import org.apache.airavata.commons.gfac.type.HostDescription;
-import org.apache.airavata.commons.gfac.type.ServiceDescription;
-import org.apache.airavata.gfac.GFacConfiguration;
-import org.apache.airavata.gfac.GFacException;
-import org.apache.airavata.gfac.core.context.ApplicationContext;
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.context.MessageContext;
-import org.apache.airavata.gfac.core.provider.GFacProviderException;
-import org.apache.airavata.gfac.local.handler.LocalDirectorySetupHandler;
-import org.apache.airavata.gfac.local.provider.impl.LocalProvider;
-import org.apache.airavata.model.workspace.experiment.ExecutionUnit;
-import org.apache.airavata.model.workspace.experiment.Experiment;
-import org.apache.airavata.model.workspace.experiment.TaskDetails;
-import org.apache.airavata.model.workspace.experiment.WorkflowNodeDetails;
-import org.apache.airavata.persistance.registry.jpa.impl.LoggingRegistryImpl;
-import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
-import org.apache.airavata.schemas.gfac.InputParameterType;
-import org.apache.airavata.schemas.gfac.OutputParameterType;
-import org.apache.airavata.schemas.gfac.StringParameterType;
-import org.apache.commons.lang.SystemUtils;
-import org.testng.annotations.BeforeTest;
-import org.testng.annotations.Test;
-
-import com.google.common.eventbus.EventBus;
-
-public class LocalProviderTest {
-    private JobExecutionContext jobExecutionContext;
-    @BeforeTest
-    public void setUp() throws Exception {
-
-        URL resource = this.getClass().getClassLoader().getResource(org.apache.airavata.common.utils.Constants.GFAC_CONFIG_XML);
-        File configFile = new File(resource.getPath());
-        GFacConfiguration gFacConfiguration = GFacConfiguration.create(configFile, null);
-        //have to set InFlwo Handlers and outFlowHandlers
-        ApplicationContext applicationContext = new ApplicationContext();
-        HostDescription host = new HostDescription();
-        host.getType().setHostName("localhost");
-        host.getType().setHostAddress("localhost");
-        applicationContext.setHostDescription(host);
-        /*
-           * App
-           */
-        ApplicationDescription appDesc = new ApplicationDescription();
-        ApplicationDeploymentDescriptionType app = appDesc.getType();
-        ApplicationDeploymentDescriptionType.ApplicationName name = ApplicationDeploymentDescriptionType.ApplicationName.Factory.newInstance();
-        name.setStringValue("EchoLocal");
-        app.setApplicationName(name);
-
-        /*
-           * Use bat file if it is compiled on Windows
-           */
-        if (SystemUtils.IS_OS_WINDOWS) {
-            URL url = this.getClass().getClassLoader().getResource("echo.bat");
-            app.setExecutableLocation(url.getFile());
-        } else {
-            //for unix and Mac
-            app.setExecutableLocation("/bin/echo");
-        }
-
-        /*
-           * Default tmp location
-           */
-        String tempDir = System.getProperty("java.io.tmpdir");
-        if (tempDir == null) {
-            tempDir = "/tmp";
-        }
-
-        app.setScratchWorkingDirectory(tempDir);
-        app.setStaticWorkingDirectory(tempDir);
-        app.setInputDataDirectory(tempDir + File.separator + "input");
-        app.setOutputDataDirectory(tempDir + File.separator + "output");
-        app.setStandardOutput(tempDir + File.separator + "echo.stdout");
-        app.setStandardError(tempDir + File.separator + "echo.stderr");
-
-        applicationContext.setApplicationDeploymentDescription(appDesc);
-
-        /*
-           * Service
-           */
-        ServiceDescription serv = new ServiceDescription();
-        serv.getType().setName("SimpleEcho");
-
-        List<InputParameterType> inputList = new ArrayList<InputParameterType>();
-        InputParameterType input = InputParameterType.Factory.newInstance();
-        input.setParameterName("echo_input");
-        input.setParameterType(StringParameterType.Factory.newInstance());
-        inputList.add(input);
-        InputParameterType[] inputParamList = inputList.toArray(new InputParameterType[inputList
-                .size()]);
-
-        List<OutputParameterType> outputList = new ArrayList<OutputParameterType>();
-        OutputParameterType output = OutputParameterType.Factory.newInstance();
-        output.setParameterName("echo_output");
-        output.setParameterType(StringParameterType.Factory.newInstance());
-        outputList.add(output);
-        OutputParameterType[] outputParamList = outputList
-                .toArray(new OutputParameterType[outputList.size()]);
-
-        serv.getType().setInputParametersArray(inputParamList);
-        serv.getType().setOutputParametersArray(outputParamList);
-
-        jobExecutionContext = new JobExecutionContext(gFacConfiguration, serv.getType().getName());
-        jobExecutionContext.setApplicationContext(applicationContext);
-        /*
-        * Host
-        */
-        applicationContext.setServiceDescription(serv);
-
-        MessageContext inMessage = new MessageContext();
-        ActualParameter echo_input = new ActualParameter();
-        ((StringParameterType) echo_input.getType()).setValue("echo_output=hello");
-        inMessage.addParameter("echo_input", echo_input);
-
-        jobExecutionContext.setInMessageContext(inMessage);
-
-        MessageContext outMessage = new MessageContext();
-        ActualParameter echo_out = new ActualParameter();
-        outMessage.addParameter("echo_output", echo_out);
-
-        jobExecutionContext.setOutMessageContext(outMessage);
-
-        jobExecutionContext.setExperimentID("test123");
-        jobExecutionContext.setExperiment(new Experiment("test123","project1","admin","testExp"));
-        jobExecutionContext.setTaskData(new TaskDetails(jobExecutionContext.getExperimentID()));
-        jobExecutionContext.setRegistry(new LoggingRegistryImpl());
-        jobExecutionContext.setWorkflowNodeDetails(new WorkflowNodeDetails(jobExecutionContext.getExperimentID(),"none", ExecutionUnit.APPLICATION));
-
-
-    }
-
-    @Test
-    public void testLocalDirectorySetupHandler() throws GFacException {
-        LocalDirectorySetupHandler localDirectorySetupHandler = new LocalDirectorySetupHandler();
-        localDirectorySetupHandler.invoke(jobExecutionContext);
-
-        ApplicationDescription applicationDeploymentDescription = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription();
-        ApplicationDeploymentDescriptionType app = applicationDeploymentDescription.getType();
-        junit.framework.Assert.assertTrue(new File(app.getStaticWorkingDirectory()).exists());
-        junit.framework.Assert.assertTrue(new File(app.getScratchWorkingDirectory()).exists());
-        junit.framework.Assert.assertTrue(new File(app.getInputDataDirectory()).exists());
-        junit.framework.Assert.assertTrue(new File(app.getOutputDataDirectory()).exists());
-    }
-
-    @Test
-    public void testLocalProvider() throws GFacException,GFacProviderException {
-        LocalDirectorySetupHandler localDirectorySetupHandler = new LocalDirectorySetupHandler();
-        localDirectorySetupHandler.invoke(jobExecutionContext);
-        LocalProvider localProvider = new LocalProvider();
-        localProvider.setMonitorPublisher(new MonitorPublisher(new EventBus()));
-        localProvider.initialize(jobExecutionContext);
-        localProvider.execute(jobExecutionContext);
-        localProvider.dispose(jobExecutionContext);
-    }
-}
+///*
+// *
+// * Licensed to the Apache Software Foundation (ASF) under one
+// * or more contributor license agreements.  See the NOTICE file
+// * distributed with this work for additional information
+// * regarding copyright ownership.  The ASF licenses this file
+// * to you under the Apache License, Version 2.0 (the
+// * "License"); you may not use this file except in compliance
+// * with the License.  You may obtain a copy of the License at
+// *
+// *   http://www.apache.org/licenses/LICENSE-2.0
+// *
+// * Unless required by applicable law or agreed to in writing,
+// * software distributed under the License is distributed on an
+// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// * KIND, either express or implied.  See the License for the
+// * specific language governing permissions and limitations
+// * under the License.
+// *
+//*/
+//package org.apache.airavata.core.gfac.services.impl;
+//
+//import java.io.File;
+//import java.net.URL;
+//import java.util.ArrayList;
+//import java.util.List;
+//
+//import org.apache.airavata.common.utils.MonitorPublisher;
+//import org.apache.airavata.commons.gfac.type.ActualParameter;
+//import org.apache.airavata.commons.gfac.type.ApplicationDescription;
+//import org.apache.airavata.commons.gfac.type.HostDescription;
+//import org.apache.airavata.commons.gfac.type.ServiceDescription;
+//import org.apache.airavata.gfac.GFacConfiguration;
+//import org.apache.airavata.gfac.GFacException;
+//import org.apache.airavata.gfac.core.context.ApplicationContext;
+//import org.apache.airavata.gfac.core.context.JobExecutionContext;
+//import org.apache.airavata.gfac.core.context.MessageContext;
+//import org.apache.airavata.gfac.core.provider.GFacProviderException;
+//import org.apache.airavata.gfac.local.handler.LocalDirectorySetupHandler;
+//import org.apache.airavata.gfac.local.provider.impl.LocalProvider;
+//import org.apache.airavata.model.workspace.experiment.ExecutionUnit;
+//import org.apache.airavata.model.workspace.experiment.Experiment;
+//import org.apache.airavata.model.workspace.experiment.TaskDetails;
+//import org.apache.airavata.model.workspace.experiment.WorkflowNodeDetails;
+//import org.apache.airavata.persistance.registry.jpa.impl.LoggingRegistryImpl;
+//import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
+//import org.apache.airavata.schemas.gfac.InputParameterType;
+//import org.apache.airavata.schemas.gfac.OutputParameterType;
+//import org.apache.airavata.schemas.gfac.StringParameterType;
+//import org.apache.commons.lang.SystemUtils;
+//import org.testng.annotations.BeforeTest;
+//import org.testng.annotations.Test;
+//
+//import com.google.common.eventbus.EventBus;
+//
+//public class LocalProviderTest {
+//    private JobExecutionContext jobExecutionContext;
+//    @BeforeTest
+//    public void setUp() throws Exception {
+//
+//        URL resource = this.getClass().getClassLoader().getResource(org.apache.airavata.common.utils.Constants.GFAC_CONFIG_XML);
+//        File configFile = new File(resource.getPath());
+//        GFacConfiguration gFacConfiguration = GFacConfiguration.create(configFile, null);
+//        //have to set InFlwo Handlers and outFlowHandlers
+//        ApplicationContext applicationContext = new ApplicationContext();
+//        HostDescription host = new HostDescription();
+//        host.getType().setHostName("localhost");
+//        host.getType().setHostAddress("localhost");
+//        applicationContext.setHostDescription(host);
+//        /*
+//           * App
+//           */
+//        ApplicationDescription appDesc = new ApplicationDescription();
+//        ApplicationDeploymentDescriptionType app = appDesc.getType();
+//        ApplicationDeploymentDescriptionType.ApplicationName name = ApplicationDeploymentDescriptionType.ApplicationName.Factory.newInstance();
+//        name.setStringValue("EchoLocal");
+//        app.setApplicationName(name);
+//
+//        /*
+//           * Use bat file if it is compiled on Windows
+//           */
+//        if (SystemUtils.IS_OS_WINDOWS) {
+//            URL url = this.getClass().getClassLoader().getResource("echo.bat");
+//            app.setExecutableLocation(url.getFile());
+//        } else {
+//            //for unix and Mac
+//            app.setExecutableLocation("/bin/echo");
+//        }
+//
+//        /*
+//           * Default tmp location
+//           */
+//        String tempDir = System.getProperty("java.io.tmpdir");
+//        if (tempDir == null) {
+//            tempDir = "/tmp";
+//        }
+//
+//        app.setScratchWorkingDirectory(tempDir);
+//        app.setStaticWorkingDirectory(tempDir);
+//        app.setInputDataDirectory(tempDir + File.separator + "input");
+//        app.setOutputDataDirectory(tempDir + File.separator + "output");
+//        app.setStandardOutput(tempDir + File.separator + "echo.stdout");
+//        app.setStandardError(tempDir + File.separator + "echo.stderr");
+//
+//        applicationContext.setApplicationDeploymentDescription(appDesc);
+//
+//        /*
+//           * Service
+//           */
+//        ServiceDescription serv = new ServiceDescription();
+//        serv.getType().setName("SimpleEcho");
+//
+//        List<InputParameterType> inputList = new ArrayList<InputParameterType>();
+//        InputParameterType input = InputParameterType.Factory.newInstance();
+//        input.setParameterName("echo_input");
+//        input.setParameterType(StringParameterType.Factory.newInstance());
+//        inputList.add(input);
+//        InputParameterType[] inputParamList = inputList.toArray(new InputParameterType[inputList
+//                .size()]);
+//
+//        List<OutputParameterType> outputList = new ArrayList<OutputParameterType>();
+//        OutputParameterType output = OutputParameterType.Factory.newInstance();
+//        output.setParameterName("echo_output");
+//        output.setParameterType(StringParameterType.Factory.newInstance());
+//        outputList.add(output);
+//        OutputParameterType[] outputParamList = outputList
+//                .toArray(new OutputParameterType[outputList.size()]);
+//
+//        serv.getType().setInputParametersArray(inputParamList);
+//        serv.getType().setOutputParametersArray(outputParamList);
+//
+//        jobExecutionContext = new JobExecutionContext(gFacConfiguration, serv.getType().getName());
+//        jobExecutionContext.setApplicationContext(applicationContext);
+//        /*
+//        * Host
+//        */
+//        applicationContext.setServiceDescription(serv);
+//
+//        MessageContext inMessage = new MessageContext();
+//        ActualParameter echo_input = new ActualParameter();
+//        ((StringParameterType) echo_input.getType()).setValue("echo_output=hello");
+//        inMessage.addParameter("echo_input", echo_input);
+//
+//        jobExecutionContext.setInMessageContext(inMessage);
+//
+//        MessageContext outMessage = new MessageContext();
+//        ActualParameter echo_out = new ActualParameter();
+//        outMessage.addParameter("echo_output", echo_out);
+//
+//        jobExecutionContext.setOutMessageContext(outMessage);
+//
+//        jobExecutionContext.setExperimentID("test123");
+//        jobExecutionContext.setExperiment(new Experiment("test123","project1","admin","testExp"));
+//        jobExecutionContext.setTaskData(new TaskDetails(jobExecutionContext.getExperimentID()));
+//        jobExecutionContext.setRegistry(new LoggingRegistryImpl());
+//        jobExecutionContext.setWorkflowNodeDetails(new WorkflowNodeDetails(jobExecutionContext.getExperimentID(),"none", ExecutionUnit.APPLICATION));
+//
+//
+//    }
+//
+//    @Test
+//    public void testLocalDirectorySetupHandler() throws GFacException {
+//        LocalDirectorySetupHandler localDirectorySetupHandler = new LocalDirectorySetupHandler();
+//        localDirectorySetupHandler.invoke(jobExecutionContext);
+//
+//        ApplicationDescription applicationDeploymentDescription = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription();
+//        ApplicationDeploymentDescriptionType app = applicationDeploymentDescription.getType();
+//        junit.framework.Assert.assertTrue(new File(app.getStaticWorkingDirectory()).exists());
+//        junit.framework.Assert.assertTrue(new File(app.getScratchWorkingDirectory()).exists());
+//        junit.framework.Assert.assertTrue(new File(app.getInputDataDirectory()).exists());
+//        junit.framework.Assert.assertTrue(new File(app.getOutputDataDirectory()).exists());
+//    }
+//
+//    @Test
+//    public void testLocalProvider() throws GFacException,GFacProviderException {
+//        LocalDirectorySetupHandler localDirectorySetupHandler = new LocalDirectorySetupHandler();
+//        localDirectorySetupHandler.invoke(jobExecutionContext);
+//        LocalProvider localProvider = new LocalProvider();
+//        localProvider.setMonitorPublisher(new MonitorPublisher(new EventBus()));
+//        localProvider.initialize(jobExecutionContext);
+//        localProvider.execute(jobExecutionContext);
+//        localProvider.dispose(jobExecutionContext);
+//    }
+//}

http://git-wip-us.apache.org/repos/asf/airavata/blob/3e584f87/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/HPCMonitorID.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/HPCMonitorID.java b/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/HPCMonitorID.java
index a4a131d..c788ace 100644
--- a/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/HPCMonitorID.java
+++ b/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/HPCMonitorID.java
@@ -31,6 +31,7 @@ import org.apache.airavata.gfac.ssh.security.TokenizedSSHAuthInfo;
 import org.apache.airavata.gsi.ssh.api.ServerInfo;
 import org.apache.airavata.gsi.ssh.api.authentication.AuthenticationInfo;
 import org.apache.airavata.gsi.ssh.impl.authentication.MyProxyAuthenticationInfo;
+import org.apache.airavata.model.appcatalog.computeresource.ComputeResourceDescription;
 import org.apache.airavata.model.workspace.experiment.JobState;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -45,10 +46,10 @@ public class HPCMonitorID extends MonitorID {
 
     private AuthenticationInfo authenticationInfo = null;
 
-    public HPCMonitorID(HostDescription host, String jobID, String taskID, String workflowNodeID,
+    public HPCMonitorID(ComputeResourceDescription computeResourceDescription, String jobID, String taskID, String workflowNodeID,
                         String experimentID, String userName,String jobName) {
-        super(host, jobID, taskID, workflowNodeID, experimentID, userName,jobName);
-        setHost(host);
+        super(computeResourceDescription, jobID, taskID, workflowNodeID, experimentID, userName,jobName);
+        setComputeResourceDescription(computeResourceDescription);
         setJobStartedTime(new Timestamp((new Date()).getTime()));
         setUserName(userName);
         setJobID(jobID);
@@ -84,8 +85,8 @@ public class HPCMonitorID extends MonitorID {
         }
     }
 
-    public HPCMonitorID(HostDescription host, String jobID, String taskID, String workflowNodeID, String experimentID, String userName, AuthenticationInfo authenticationInfo) {
-        setHost(host);
+    public HPCMonitorID(ComputeResourceDescription computeResourceDescription, String jobID, String taskID, String workflowNodeID, String experimentID, String userName, AuthenticationInfo authenticationInfo) {
+        setComputeResourceDescription(computeResourceDescription);
         setJobStartedTime(new Timestamp((new Date()).getTime()));
         this.authenticationInfo = authenticationInfo;
         // if we give myproxyauthenticationInfo, so we try to use myproxy user as the user

http://git-wip-us.apache.org/repos/asf/airavata/blob/3e584f87/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/HostMonitorData.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/HostMonitorData.java b/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/HostMonitorData.java
index 0480925..c2017a0 100644
--- a/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/HostMonitorData.java
+++ b/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/HostMonitorData.java
@@ -20,34 +20,36 @@
 */
 package org.apache.airavata.gfac.monitor;
 
-import org.apache.airavata.commons.gfac.type.HostDescription;
+import org.apache.airavata.gfac.core.context.JobExecutionContext;
 import org.apache.airavata.gfac.core.monitor.MonitorID;
 import org.apache.airavata.gfac.monitor.exception.AiravataMonitorException;
+import org.apache.airavata.model.appcatalog.computeresource.ComputeResourceDescription;
+import org.apache.airavata.model.appcatalog.computeresource.DataMovementProtocol;
+import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol;
 
-import java.util.ArrayList;
 import java.util.List;
 
 public class HostMonitorData {
-    private HostDescription host;
+//    private HostDescription host;
+    private ComputeResourceDescription computeResourceDescription;
+    private JobSubmissionProtocol jobSubmissionProtocol;
+    private DataMovementProtocol dataMovementProtocol;
 
     private List<MonitorID> monitorIDs;
 
-    public HostMonitorData(HostDescription host) {
-        this.host = host;
-        monitorIDs = new ArrayList<MonitorID>();
-    }
+    public HostMonitorData(JobExecutionContext jobExecutionContext) {
+        this.computeResourceDescription = jobExecutionContext.getApplicationContext().getComputeResourceDescription();
+        this.jobSubmissionProtocol = jobExecutionContext.getPreferredJobSubmissionProtocol();
+        this.dataMovementProtocol = jobExecutionContext.getPreferredDataMovementProtocol();
 
-    public HostMonitorData(HostDescription host, List<MonitorID> monitorIDs) {
-        this.host = host;
-        this.monitorIDs = monitorIDs;
     }
 
-    public HostDescription getHost() {
-        return host;
+    public ComputeResourceDescription getComputeResourceDescription() {
+        return computeResourceDescription;
     }
 
-    public void setHost(HostDescription host) {
-        this.host = host;
+    public void setComputeResourceDescription(ComputeResourceDescription computeResourceDescription) {
+        this.computeResourceDescription = computeResourceDescription;
     }
 
     public List<MonitorID> getMonitorIDs() {
@@ -67,4 +69,12 @@ public class HostMonitorData {
     public void addMonitorIDForHost(MonitorID monitorID)throws AiravataMonitorException {
         monitorIDs.add(monitorID);
     }
+
+    public JobSubmissionProtocol getJobSubmissionProtocol() {
+        return jobSubmissionProtocol;
+    }
+
+    public DataMovementProtocol getDataMovementProtocol() {
+        return dataMovementProtocol;
+    }
 }

http://git-wip-us.apache.org/repos/asf/airavata/blob/3e584f87/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/handlers/GridPullMonitorHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/handlers/GridPullMonitorHandler.java b/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/handlers/GridPullMonitorHandler.java
index ceb440c..3a0e44d 100644
--- a/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/handlers/GridPullMonitorHandler.java
+++ b/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/handlers/GridPullMonitorHandler.java
@@ -99,7 +99,7 @@ public class GridPullMonitorHandler extends ThreadedHandler implements Watcher{
             } catch (InterruptedException e) {
                 e.printStackTrace();
             }
-            CommonUtils.addMonitortoQueue(hpcPullMonitor.getQueue(), monitorID);
+            CommonUtils.addMonitortoQueue(hpcPullMonitor.getQueue(), monitorID, jobExecutionContext);
             CommonUtils.increaseZkJobCount(monitorID); // update change job count to zookeeper
         } catch (AiravataMonitorException e) {
             logger.errorId(monitorID.getJobID(), "Error adding job {} monitorID object to the queue with experiment {}",

http://git-wip-us.apache.org/repos/asf/airavata/blob/3e584f87/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/HPCPullMonitor.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/HPCPullMonitor.java b/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/HPCPullMonitor.java
index 952b30e..122d1e2 100644
--- a/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/HPCPullMonitor.java
+++ b/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/HPCPullMonitor.java
@@ -38,6 +38,7 @@ import org.apache.airavata.gfac.monitor.impl.push.amqp.SimpleJobFinishConsumer;
 import org.apache.airavata.gfac.monitor.util.CommonUtils;
 import org.apache.airavata.gsi.ssh.api.SSHApiException;
 import org.apache.airavata.gsi.ssh.api.authentication.AuthenticationInfo;
+import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol;
 import org.apache.airavata.model.messaging.event.JobIdentifier;
 import org.apache.airavata.model.messaging.event.JobStatusChangeRequestEvent;
 import org.apache.airavata.model.workspace.experiment.JobState;
@@ -159,20 +160,19 @@ public class HPCPullMonitor extends PullMonitor {
             take = this.queue.take();
             List<HostMonitorData> hostMonitorData = take.getHostMonitorData();
             for (HostMonitorData iHostMonitorData : hostMonitorData) {
-                if (iHostMonitorData.getHost().getType() instanceof GsisshHostType
-                        || iHostMonitorData.getHost().getType() instanceof SSHHostType) {
-                    String hostName =  iHostMonitorData.getHost().getType().getHostAddress();
+                if (iHostMonitorData.getJobSubmissionProtocol() == JobSubmissionProtocol.SSH) {
+                    String hostName = iHostMonitorData.getComputeResourceDescription().getHostName();
                     ResourceConnection connection = null;
                     if (connections.containsKey(hostName)) {
-                        if(!connections.get(hostName).isConnected()){
-                            connection = new ResourceConnection(iHostMonitorData,getAuthenticationInfo());
+                        if (!connections.get(hostName).isConnected()) {
+                            connection = new ResourceConnection(iHostMonitorData, getAuthenticationInfo());
                             connections.put(hostName, connection);
-                        }else{
+                        } else {
                             logger.debug("We already have this connection so not going to create one");
                             connection = connections.get(hostName);
                         }
                     } else {
-                        connection = new ResourceConnection(iHostMonitorData,getAuthenticationInfo());
+                        connection = new ResourceConnection(iHostMonitorData, getAuthenticationInfo());
                         connections.put(hostName, connection);
                     }
 
@@ -207,7 +207,7 @@ public class HPCPullMonitor extends PullMonitor {
                             MonitorID iMonitorID = monitorIDListIterator.next();
                             String completeId = null;
                             while (iterator.hasNext()) {
-                                 completeId = iterator.next();
+                                completeId = iterator.next();
                                 if (completeId.equals(iMonitorID.getUserName() + "," + iMonitorID.getJobName())) {
                                     logger.info("This job is finished because push notification came with <username,jobName> " + completeId);
                                     iMonitorID.setStatus(JobState.COMPLETE);
@@ -239,6 +239,7 @@ public class HPCPullMonitor extends PullMonitor {
                                 !JobState.COMPLETE.equals(iMonitorID.getStatus())) {
                             iMonitorID.setStatus(jobStatuses.get(iMonitorID.getJobID() + "," + iMonitorID.getJobName()));    //IMPORTANT this is NOT a simple setter we have a logic
                         }else if(JobState.COMPLETE.equals(iMonitorID.getStatus())){
+                            completedJobs.put(iMonitorID.getJobName(), iMonitorID);
                             logger.debugId(iMonitorID.getJobID(), "Moved job {} to completed jobs map, experiment {}, " +
                                     "task {}", iMonitorID.getJobID(), iMonitorID.getExperimentID(), iMonitorID.getTaskID());
                             iterator.remove();
@@ -260,8 +261,7 @@ public class HPCPullMonitor extends PullMonitor {
                         MonitorID iMonitorID = iterator.next();
                         if (iMonitorID.getFailedCount() > FAILED_COUNT) {
                             iMonitorID.setLastMonitored(new Timestamp((new Date()).getTime()));
-                            String outputDir = iMonitorID.getJobExecutionContext().getApplicationContext()
-                                    .getApplicationDeploymentDescription().getType().getOutputDataDirectory();
+                            String outputDir = iMonitorID.getJobExecutionContext().getOutputDir();
                             List<String> stdOut = null;
                             try {
                                 stdOut = connection.getCluster().listDirectory(outputDir); // check the outputs directory
@@ -296,8 +296,8 @@ public class HPCPullMonitor extends PullMonitor {
 
 
                 } else {
-                    logger.debug("Qstat Monitor doesn't handle non-gsissh hosts , host {}", iHostMonitorData.getHost()
-                            .getType().getHostAddress());
+                    logger.debug("Qstat Monitor doesn't handle non-gsissh hosts , host {}", iHostMonitorData.
+                            getComputeResourceDescription().getHostName());
                 }
             }
             // We have finished all the HostMonitorData object in userMonitorData, now we need to put it back

http://git-wip-us.apache.org/repos/asf/airavata/blob/3e584f87/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/util/CommonUtils.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/util/CommonUtils.java b/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/util/CommonUtils.java
index 3abcf1d..a503154 100644
--- a/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/util/CommonUtils.java
+++ b/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/util/CommonUtils.java
@@ -34,6 +34,7 @@ import org.apache.airavata.gfac.core.monitor.MonitorID;
 import org.apache.airavata.gfac.monitor.HostMonitorData;
 import org.apache.airavata.gfac.monitor.UserMonitorData;
 import org.apache.airavata.gfac.monitor.exception.AiravataMonitorException;
+import org.apache.airavata.model.appcatalog.computeresource.ComputeResourceDescription;
 import org.apache.airavata.schemas.gfac.GsisshHostType;
 import org.apache.zookeeper.CreateMode;
 import org.apache.zookeeper.KeeperException;
@@ -79,11 +80,11 @@ public class CommonUtils {
         }
     }
     public static String getChannelID(MonitorID monitorID) {
-        return monitorID.getUserName() + "-" + monitorID.getHost().getType().getHostName();
+        return monitorID.getUserName() + "-" + monitorID.getComputeResourceDescription().getHostName();
     }
 
     public static String getRoutingKey(MonitorID monitorID) {
-        return "*." + monitorID.getUserName() + "." + monitorID.getHost().getType().getHostAddress();
+        return "*." + monitorID.getUserName() + "." + monitorID.getComputeResourceDescription().getIpAddresses().get(0);
     }
 
     public static String getChannelID(String userName,String hostAddress) {
@@ -94,7 +95,7 @@ public class CommonUtils {
         return "*." + userName + "." + hostAddress;
     }
 
-    public static void addMonitortoQueue(BlockingQueue<UserMonitorData> queue, MonitorID monitorID) throws AiravataMonitorException {
+    public static void addMonitortoQueue(BlockingQueue<UserMonitorData> queue, MonitorID monitorID, JobExecutionContext jobExecutionContext) throws AiravataMonitorException {
         synchronized (queue) {
             Iterator<UserMonitorData> iterator = queue.iterator();
             while (iterator.hasNext()) {
@@ -103,7 +104,7 @@ public class CommonUtils {
                     // then this is the right place to update
                     List<HostMonitorData> monitorIDs = next.getHostMonitorData();
                     for (HostMonitorData host : monitorIDs) {
-                        if (host.getHost().toXML().equals(monitorID.getHost().toXML())) {
+                        if (isEqual(host.getComputeResourceDescription(), monitorID.getComputeResourceDescription())) {
                             // ok we found right place to add this monitorID
                             host.addMonitorIDForHost(monitorID);
                             logger.debugId(monitorID.getJobID(), "Added new job to the monitoring queue, experiment {}," +
@@ -113,7 +114,7 @@ public class CommonUtils {
                     }
                     // there is a userMonitor object for this user name but no Hosts for this host
                     // so we have to create new Hosts
-                    HostMonitorData hostMonitorData = new HostMonitorData(monitorID.getHost());
+                    HostMonitorData hostMonitorData = new HostMonitorData(jobExecutionContext);
                     hostMonitorData.addMonitorIDForHost(monitorID);
                     next.addHostMonitorData(hostMonitorData);
                     logger.debugId(monitorID.getJobID(), "Added new job to the monitoring queue, experiment {}," +
@@ -121,7 +122,7 @@ public class CommonUtils {
                     return;
                 }
             }
-            HostMonitorData hostMonitorData = new HostMonitorData(monitorID.getHost());
+            HostMonitorData hostMonitorData = new HostMonitorData(jobExecutionContext);
             hostMonitorData.addMonitorIDForHost(monitorID);
 
             UserMonitorData userMonitorData = new UserMonitorData(monitorID.getUserName());
@@ -135,11 +136,18 @@ public class CommonUtils {
             }
         }
     }
+
+    private static boolean isEqual(ComputeResourceDescription comRes_1, ComputeResourceDescription comRes_2) {
+        return comRes_1.getComputeResourceId().equals(comRes_2.getComputeResourceId()) &&
+                comRes_1.getHostName().equals(comRes_2.getHostName());
+    }
+
     public static boolean isTheLastJobInQueue(BlockingQueue<MonitorID> queue,MonitorID monitorID){
         Iterator<MonitorID> iterator = queue.iterator();
         while(iterator.hasNext()){
             MonitorID next = iterator.next();
-            if(monitorID.getUserName().equals(next.getUserName()) && CommonUtils.isEqual(monitorID.getHost(), next.getHost())){
+            if (monitorID.getUserName().equals(next.getUserName()) &&
+                    CommonUtils.isEqual(monitorID.getComputeResourceDescription(), next.getComputeResourceDescription())) {
                 return false;
             }
         }
@@ -162,7 +170,7 @@ public class CommonUtils {
                     Iterator<HostMonitorData> iterator1 = hostMonitorData.iterator();
                     while (iterator1.hasNext()) {
                         HostMonitorData iHostMonitorID = iterator1.next();
-                        if (iHostMonitorID.getHost().toXML().equals(monitorID.getHost().toXML())) {
+                        if (isEqual(iHostMonitorID.getComputeResourceDescription(), monitorID.getComputeResourceDescription())) {
                             Iterator<MonitorID> iterator2 = iHostMonitorID.getMonitorIDs().iterator();
                             while (iterator2.hasNext()) {
                                 MonitorID iMonitorID = iterator2.next();
@@ -172,11 +180,10 @@ public class CommonUtils {
                                     // could be different, thats why we check the jobID
                                     iterator2.remove();
                                     logger.infoId(monitorID.getJobID(), "Removed the jobId: {} JobName: {} from monitoring last " +
-                                            "status:{}", monitorID.getJobID(),monitorID.getJobName(), monitorID.getStatus().toString());
+                                            "status:{}", monitorID.getJobID(), monitorID.getJobName(), monitorID.getStatus().toString());
                                     if (iHostMonitorID.getMonitorIDs().size() == 0) {
                                         iterator1.remove();
-                                        logger.debug("Removed host {} from monitoring queue", iHostMonitorID.getHost()
-                                                .getType().getHostAddress());
+                                        logger.debug("Removed host {} from monitoring queue", iHostMonitorID.getComputeResourceDescription().getHostName());
                                         if (hostMonitorData.size() == 0) {
                                             // no useful data so we have to remove the element from the queue
                                             queue.remove(next);
@@ -330,7 +337,7 @@ public class CommonUtils {
      */
     public static String getJobCountUpdatePath(MonitorID monitorID){
         return new StringBuilder("/").append(Constants.STAT).append("/").append(monitorID.getUserName())
-                .append("/").append(monitorID.getHost().getType().getHostAddress()).append("/").append(Constants.JOB).toString();
+                .append("/").append(monitorID.getComputeResourceDescription().getHostName()).append("/").append(Constants.JOB).toString();
     }
 
     /**

http://git-wip-us.apache.org/repos/asf/airavata/blob/3e584f87/modules/gfac/gfac-monitor/src/test/java/org/apache/airavata/job/QstatMonitorTestWithMyProxyAuth.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-monitor/src/test/java/org/apache/airavata/job/QstatMonitorTestWithMyProxyAuth.java b/modules/gfac/gfac-monitor/src/test/java/org/apache/airavata/job/QstatMonitorTestWithMyProxyAuth.java
index 537d8bb..610934e 100644
--- a/modules/gfac/gfac-monitor/src/test/java/org/apache/airavata/job/QstatMonitorTestWithMyProxyAuth.java
+++ b/modules/gfac/gfac-monitor/src/test/java/org/apache/airavata/job/QstatMonitorTestWithMyProxyAuth.java
@@ -1,172 +1,172 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.job;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.concurrent.BlockingQueue;
-import java.util.concurrent.LinkedBlockingQueue;
-
-import org.apache.airavata.common.utils.MonitorPublisher;
-import org.apache.airavata.commons.gfac.type.HostDescription;
-import org.apache.airavata.gfac.core.monitor.MonitorID;
-import org.apache.airavata.gfac.monitor.HPCMonitorID;
-import org.apache.airavata.gfac.monitor.UserMonitorData;
-import org.apache.airavata.gfac.monitor.impl.pull.qstat.HPCPullMonitor;
-import org.apache.airavata.gsi.ssh.api.Cluster;
-import org.apache.airavata.gsi.ssh.api.SSHApiException;
-import org.apache.airavata.gsi.ssh.api.ServerInfo;
-import org.apache.airavata.gsi.ssh.api.authentication.GSIAuthenticationInfo;
-import org.apache.airavata.gsi.ssh.api.job.JobDescriptor;
-import org.apache.airavata.gsi.ssh.impl.PBSCluster;
-import org.apache.airavata.gsi.ssh.impl.authentication.MyProxyAuthenticationInfo;
-import org.apache.airavata.gsi.ssh.util.CommonUtils;
-import org.apache.airavata.model.messaging.event.JobStatusChangeEvent;
-import org.apache.airavata.schemas.gfac.GsisshHostType;
-import org.junit.Assert;
-import org.testng.annotations.Test;
-
-import com.google.common.eventbus.EventBus;
-import com.google.common.eventbus.Subscribe;
-
-public class QstatMonitorTestWithMyProxyAuth {
-    private String myProxyUserName;
-    private String myProxyPassword;
-    private String certificateLocation;
-    private String pbsFilePath;
-    private String workingDirectory;
-    private HostDescription hostDescription;
-    private MonitorPublisher monitorPublisher;
-    private BlockingQueue<UserMonitorData> pullQueue;
-    private Thread monitorThread;
-
-    @org.testng.annotations.BeforeClass
-    public void setUp() throws Exception {
-//        System.setProperty("myproxy.username", "ogce");
-//        System.setProperty("myproxy.password", "");
-//        System.setProperty("basedir", "/Users/lahirugunathilake/work/airavata/sandbox/gsissh");
-//        System.setProperty("gsi.working.directory", "/home/ogce");
-//        System.setProperty("trusted.cert.location", "/Users/lahirugunathilake/Downloads/certificates");
-        myProxyUserName = System.getProperty("myproxy.username");
-        myProxyPassword = System.getProperty("myproxy.password");
-        workingDirectory = System.getProperty("gsi.working.directory");
-        certificateLocation = System.getProperty("trusted.cert.location");
-        if (myProxyUserName == null || myProxyPassword == null || workingDirectory == null) {
-            System.out.println(">>>>>> Please run tests with my proxy user name and password. " +
-                    "E.g :- mvn clean install -Dmyproxy.username=xxx -Dmyproxy.password=xxx -Dgsi.working.directory=/path<<<<<<<");
-            throw new Exception("Need my proxy user name password to run tests.");
-        }
-
-        monitorPublisher =  new MonitorPublisher(new EventBus());
-        class InnerClassQstat {
-
-            @Subscribe
-            private void getStatus(JobStatusChangeEvent status) {
-                Assert.assertNotNull(status);
-                System.out.println(status.getState().toString());
-                monitorThread.interrupt();
-            }
-        }
-        monitorPublisher.registerListener(this);
-        pullQueue = new LinkedBlockingQueue<UserMonitorData>();
-        final HPCPullMonitor qstatMonitor = new
-                HPCPullMonitor(pullQueue, monitorPublisher);
-        try {
-            (new Thread(){
-                public void run(){
-                    qstatMonitor.run();
-                }
-            }).start();
-        } catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        hostDescription = new HostDescription(GsisshHostType.type);
-        hostDescription.getType().setHostAddress("trestles.sdsc.edu");
-        hostDescription.getType().setHostName("gsissh-gordon");
-        ((GsisshHostType) hostDescription.getType()).setPort(22);
-        ((GsisshHostType)hostDescription.getType()).setInstalledPath("/opt/torque/bin/");
-    }
-
-    @Test
-    public void testQstatMonitor() throws SSHApiException {
-        /* now have to submit a job to some machine and add that job to the queue */
-        //Create authentication
-        GSIAuthenticationInfo authenticationInfo
-                = new MyProxyAuthenticationInfo(myProxyUserName, myProxyPassword, "myproxy.teragrid.org",
-                7512, 17280000, certificateLocation);
-
-        // Server info
-        ServerInfo serverInfo = new ServerInfo("ogce", hostDescription.getType().getHostAddress());
-
-
-        Cluster pbsCluster = new PBSCluster(serverInfo, authenticationInfo, CommonUtils.getPBSJobManager("/opt/torque/bin/"));
-
-
-        // Execute command
-        System.out.println("Target PBS file path: " + workingDirectory);
-        // constructing the job object
-        JobDescriptor jobDescriptor = new JobDescriptor();
-        jobDescriptor.setWorkingDirectory(workingDirectory);
-        jobDescriptor.setShellName("/bin/bash");
-        jobDescriptor.setJobName("GSI_SSH_SLEEP_JOB");
-        jobDescriptor.setExecutablePath("/bin/echo");
-        jobDescriptor.setAllEnvExport(true);
-        jobDescriptor.setMailOptions("n");
-        jobDescriptor.setStandardOutFile(workingDirectory + File.separator + "application.out");
-        jobDescriptor.setStandardErrorFile(workingDirectory + File.separator + "application.err");
-        jobDescriptor.setNodes(1);
-        jobDescriptor.setProcessesPerNode(1);
-        jobDescriptor.setQueueName("normal");
-        jobDescriptor.setMaxWallTime("60");
-        jobDescriptor.setAcountString("sds128");
-        List<String> inputs = new ArrayList<String>();
-        jobDescriptor.setOwner("ogce");
-        inputs.add("Hello World");
-        jobDescriptor.setInputValues(inputs);
-        //finished construction of job object
-        System.out.println(jobDescriptor.toXML());
-        for (int i = 0; i < 1; i++) {
-            String jobID = pbsCluster.submitBatchJob(jobDescriptor);
-            System.out.println("Job submitted successfully, Job ID: " +  jobID);
-            MonitorID monitorID = new HPCMonitorID(hostDescription, jobID,null,null,null, "ogce","");
-            ((HPCMonitorID)monitorID).setAuthenticationInfo(authenticationInfo);
-            try {
-                org.apache.airavata.gfac.monitor.util.CommonUtils.addMonitortoQueue(pullQueue, monitorID);
-            } catch (Exception e) {
-                e.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
-            }
-        }
-        try {
-
-            monitorThread.join();
-        } catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-    @Subscribe
-    public void testCaseShutDown(JobStatusChangeEvent status) {
-        Assert.assertNotNull(status.getState());
-        monitorThread.stop();
-    }
-}
+///*
+// *
+// * Licensed to the Apache Software Foundation (ASF) under one
+// * or more contributor license agreements.  See the NOTICE file
+// * distributed with this work for additional information
+// * regarding copyright ownership.  The ASF licenses this file
+// * to you under the Apache License, Version 2.0 (the
+// * "License"); you may not use this file except in compliance
+// * with the License.  You may obtain a copy of the License at
+// *
+// *   http://www.apache.org/licenses/LICENSE-2.0
+// *
+// * Unless required by applicable law or agreed to in writing,
+// * software distributed under the License is distributed on an
+// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// * KIND, either express or implied.  See the License for the
+// * specific language governing permissions and limitations
+// * under the License.
+// *
+//*/
+//package org.apache.airavata.job;
+//
+//import java.io.File;
+//import java.util.ArrayList;
+//import java.util.List;
+//import java.util.concurrent.BlockingQueue;
+//import java.util.concurrent.LinkedBlockingQueue;
+//
+//import org.apache.airavata.common.utils.MonitorPublisher;
+//import org.apache.airavata.commons.gfac.type.HostDescription;
+//import org.apache.airavata.gfac.core.monitor.MonitorID;
+//import org.apache.airavata.gfac.monitor.HPCMonitorID;
+//import org.apache.airavata.gfac.monitor.UserMonitorData;
+//import org.apache.airavata.gfac.monitor.impl.pull.qstat.HPCPullMonitor;
+//import org.apache.airavata.gsi.ssh.api.Cluster;
+//import org.apache.airavata.gsi.ssh.api.SSHApiException;
+//import org.apache.airavata.gsi.ssh.api.ServerInfo;
+//import org.apache.airavata.gsi.ssh.api.authentication.GSIAuthenticationInfo;
+//import org.apache.airavata.gsi.ssh.api.job.JobDescriptor;
+//import org.apache.airavata.gsi.ssh.impl.PBSCluster;
+//import org.apache.airavata.gsi.ssh.impl.authentication.MyProxyAuthenticationInfo;
+//import org.apache.airavata.gsi.ssh.util.CommonUtils;
+//import org.apache.airavata.model.messaging.event.JobStatusChangeEvent;
+//import org.apache.airavata.schemas.gfac.GsisshHostType;
+//import org.junit.Assert;
+//import org.testng.annotations.Test;
+//
+//import com.google.common.eventbus.EventBus;
+//import com.google.common.eventbus.Subscribe;
+//
+//public class QstatMonitorTestWithMyProxyAuth {
+//    private String myProxyUserName;
+//    private String myProxyPassword;
+//    private String certificateLocation;
+//    private String pbsFilePath;
+//    private String workingDirectory;
+//    private HostDescription hostDescription;
+//    private MonitorPublisher monitorPublisher;
+//    private BlockingQueue<UserMonitorData> pullQueue;
+//    private Thread monitorThread;
+//
+//    @org.testng.annotations.BeforeClass
+//    public void setUp() throws Exception {
+////        System.setProperty("myproxy.username", "ogce");
+////        System.setProperty("myproxy.password", "");
+////        System.setProperty("basedir", "/Users/lahirugunathilake/work/airavata/sandbox/gsissh");
+////        System.setProperty("gsi.working.directory", "/home/ogce");
+////        System.setProperty("trusted.cert.location", "/Users/lahirugunathilake/Downloads/certificates");
+//        myProxyUserName = System.getProperty("myproxy.username");
+//        myProxyPassword = System.getProperty("myproxy.password");
+//        workingDirectory = System.getProperty("gsi.working.directory");
+//        certificateLocation = System.getProperty("trusted.cert.location");
+//        if (myProxyUserName == null || myProxyPassword == null || workingDirectory == null) {
+//            System.out.println(">>>>>> Please run tests with my proxy user name and password. " +
+//                    "E.g :- mvn clean install -Dmyproxy.username=xxx -Dmyproxy.password=xxx -Dgsi.working.directory=/path<<<<<<<");
+//            throw new Exception("Need my proxy user name password to run tests.");
+//        }
+//
+//        monitorPublisher =  new MonitorPublisher(new EventBus());
+//        class InnerClassQstat {
+//
+//            @Subscribe
+//            private void getStatus(JobStatusChangeEvent status) {
+//                Assert.assertNotNull(status);
+//                System.out.println(status.getState().toString());
+//                monitorThread.interrupt();
+//            }
+//        }
+//        monitorPublisher.registerListener(this);
+//        pullQueue = new LinkedBlockingQueue<UserMonitorData>();
+//        final HPCPullMonitor qstatMonitor = new
+//                HPCPullMonitor(pullQueue, monitorPublisher);
+//        try {
+//            (new Thread(){
+//                public void run(){
+//                    qstatMonitor.run();
+//                }
+//            }).start();
+//        } catch (Exception e) {
+//            e.printStackTrace();
+//        }
+//
+//        hostDescription = new HostDescription(GsisshHostType.type);
+//        hostDescription.getType().setHostAddress("trestles.sdsc.edu");
+//        hostDescription.getType().setHostName("gsissh-gordon");
+//        ((GsisshHostType) hostDescription.getType()).setPort(22);
+//        ((GsisshHostType)hostDescription.getType()).setInstalledPath("/opt/torque/bin/");
+//    }
+//
+//    @Test
+//    public void testQstatMonitor() throws SSHApiException {
+//        /* now have to submit a job to some machine and add that job to the queue */
+//        //Create authentication
+//        GSIAuthenticationInfo authenticationInfo
+//                = new MyProxyAuthenticationInfo(myProxyUserName, myProxyPassword, "myproxy.teragrid.org",
+//                7512, 17280000, certificateLocation);
+//
+//        // Server info
+//        ServerInfo serverInfo = new ServerInfo("ogce", hostDescription.getType().getHostAddress());
+//
+//
+//        Cluster pbsCluster = new PBSCluster(serverInfo, authenticationInfo, CommonUtils.getPBSJobManager("/opt/torque/bin/"));
+//
+//
+//        // Execute command
+//        System.out.println("Target PBS file path: " + workingDirectory);
+//        // constructing the job object
+//        JobDescriptor jobDescriptor = new JobDescriptor();
+//        jobDescriptor.setWorkingDirectory(workingDirectory);
+//        jobDescriptor.setShellName("/bin/bash");
+//        jobDescriptor.setJobName("GSI_SSH_SLEEP_JOB");
+//        jobDescriptor.setExecutablePath("/bin/echo");
+//        jobDescriptor.setAllEnvExport(true);
+//        jobDescriptor.setMailOptions("n");
+//        jobDescriptor.setStandardOutFile(workingDirectory + File.separator + "application.out");
+//        jobDescriptor.setStandardErrorFile(workingDirectory + File.separator + "application.err");
+//        jobDescriptor.setNodes(1);
+//        jobDescriptor.setProcessesPerNode(1);
+//        jobDescriptor.setQueueName("normal");
+//        jobDescriptor.setMaxWallTime("60");
+//        jobDescriptor.setAcountString("sds128");
+//        List<String> inputs = new ArrayList<String>();
+//        jobDescriptor.setOwner("ogce");
+//        inputs.add("Hello World");
+//        jobDescriptor.setInputValues(inputs);
+//        //finished construction of job object
+//        System.out.println(jobDescriptor.toXML());
+//        for (int i = 0; i < 1; i++) {
+//            String jobID = pbsCluster.submitBatchJob(jobDescriptor);
+//            System.out.println("Job submitted successfully, Job ID: " +  jobID);
+//            MonitorID monitorID = new HPCMonitorID(hostDescription, jobID,null,null,null, "ogce","");
+//            ((HPCMonitorID)monitorID).setAuthenticationInfo(authenticationInfo);
+//            try {
+//                org.apache.airavata.gfac.monitor.util.CommonUtils.addMonitortoQueue(pullQueue, monitorID, jobExecutionContext);
+//            } catch (Exception e) {
+//                e.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
+//            }
+//        }
+//        try {
+//
+//            monitorThread.join();
+//        } catch (Exception e) {
+//            e.printStackTrace();
+//        }
+//    }
+//
+//    @Subscribe
+//    public void testCaseShutDown(JobStatusChangeEvent status) {
+//        Assert.assertNotNull(status.getState());
+//        monitorThread.stop();
+//    }
+//}


[49/50] [abbrv] airavata git commit: merge changes of master - AIRAVATA-1511

Posted by ch...@apache.org.
merge changes of master - AIRAVATA-1511


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/65ad5860
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/65ad5860
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/65ad5860

Branch: refs/heads/master
Commit: 65ad58606fdd4d4a8a1aa5c11a0bc7bdc1f6ac9c
Parents: 3693892 f7de359
Author: Chathuri Wimalasena <ka...@gmail.com>
Authored: Tue Nov 11 11:31:18 2014 -0500
Committer: Chathuri Wimalasena <ka...@gmail.com>
Committed: Tue Nov 11 11:31:18 2014 -0500

----------------------------------------------------------------------
 .../client/samples/CreateLaunchExperiment.java  |  21 +-
 .../tools/RegisterSampleApplications.java       |   9 +-
 .../resources/schemas/GFacParameterTypes.xsd    |   2 +-
 .../java/src/main/assembly/bin-assembly.xml     | 280 ++++++++++---------
 .../server/src/main/assembly/bin-assembly.xml   |  25 +-
 .../gfac/bes/provider/impl/BESProvider.java     | 206 ++++++++++++++
 .../bes/security/UNICORESecurityContext.java    |   6 +-
 .../gfac/bes/utils/DataTransferrer.java         |  53 +++-
 .../airavata/gfac/bes/utils/JSDLUtils.java      |   6 +-
 .../gfac/bes/utils/UASDataStagingProcessor.java |  73 ++---
 .../monitor/impl/pull/qstat/HPCPullMonitor.java |  61 ++--
 .../airavata/gfac/monitor/util/CommonUtils.java |   7 +-
 .../registry/jpa/impl/ExperimentRegistry.java   |   7 +-
 13 files changed, 525 insertions(+), 231 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/65ad5860/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperiment.java
----------------------------------------------------------------------
diff --cc airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperiment.java
index 2d7768b,05395e6..dbb4a0c
--- a/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperiment.java
+++ b/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperiment.java
@@@ -56,10 -55,11 +58,10 @@@ public class CreateLaunchExperiment 
      private static final String DEFAULT_GATEWAY = "default.registry.gateway";
      private static Airavata.Client airavataClient;
  
-     private static String echoAppId = "Echo_1869465f-f002-43a9-b243-c091f63ab059";
-     private static String wrfAppId = "WRF_a458df70-6808-4d5d-ae32-c49082f2a6cc";
-     private static String amberAppId = "Amber_1b99f73b-a88d-44e3-b04e-4f56ba95ed6f";
+     private static String echoAppId = "Echo_636b4530-6fb2-4c9e-998a-b41e648aa70f";
+     private static String wrfAppId = "WRF_d41bdc86-e280-4eb6-a045-708f69a8c116";
+     private static String amberAppId = "Amber_b23ee051-90d6-4892-827e-622a2f6c95ee";
  
 -
      private static String localHost = "localhost";
      private static String trestlesHostName = "trestles.sdsc.xsede.org";
      private static String unicoreHostName = "fsd-cloud15.zam.kfa-juelich.de";
@@@ -213,12 -213,17 +215,17 @@@
              input.setType(DataType.STRING);
              input.setValue("Echoed_Output=Hello World");
              exInputs.add(input);
 -            DataObjectType i2 = new DataObjectType();
 -            i2.setKey("Input_to_Echo1");
++            InputDataObjectType i2 = new InputDataObjectType();
++            i2.setName("Input_to_Echo1");
+             i2.setType(DataType.URI);
+             i2.setValue("http://shrib.com/22QmrrX4");
+             exInputs.add(i2);
  
 -            List<DataObjectType> exOut = new ArrayList<DataObjectType>();
 -            DataObjectType output = new DataObjectType();
 -            output.setKey("Echoed_Output");
 +            List<OutputDataObjectType> exOut = new ArrayList<OutputDataObjectType>();
 +            OutputDataObjectType output = new OutputDataObjectType();
 +            output.setName("Echoed_Output");
              output.setType(DataType.STRING);
-             output.setValue("");
+             output.setValue("22QmrrX4");
              exOut.add(output);
              
              

http://git-wip-us.apache.org/repos/asf/airavata/blob/65ad5860/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/tools/RegisterSampleApplications.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/airavata/blob/65ad5860/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/bes/provider/impl/BESProvider.java
----------------------------------------------------------------------
diff --cc modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/bes/provider/impl/BESProvider.java
index 398f05c,044ffa2..964e6d1
--- a/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/bes/provider/impl/BESProvider.java
+++ b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/bes/provider/impl/BESProvider.java
@@@ -105,165 -101,209 +105,371 @@@ public class BESProvider extends Abstra
  
  	public void execute(JobExecutionContext jobExecutionContext)
  			throws GFacProviderException, GFacException {
++<<<<<<< HEAD
 +        StorageClient sc = null;
 +        try {
 +            JobSubmissionInterface preferredJobSubmissionInterface = jobExecutionContext.getPreferredJobSubmissionInterface();
 +            JobSubmissionProtocol protocol = preferredJobSubmissionInterface.getJobSubmissionProtocol();
 +            String interfaceId = preferredJobSubmissionInterface.getJobSubmissionInterfaceId();
 +            String factoryUrl = null;
 +            if (protocol.equals(JobSubmissionProtocol.UNICORE)) {
 +                UnicoreJobSubmission unicoreJobSubmission = GFacUtils.getUnicoreJobSubmission(interfaceId);
 +                factoryUrl = unicoreJobSubmission.getUnicoreEndPointURL();
 +            }
 +            EndpointReferenceType eprt = EndpointReferenceType.Factory
 +                    .newInstance();
 +            eprt.addNewAddress().setStringValue(factoryUrl);
 +            String userDN = getUserName(jobExecutionContext);
 +
 +            // TODO: to be removed
 +            if (userDN == null || userDN.equalsIgnoreCase("admin")) {
 +                userDN = "CN=zdv575, O=Ultrascan Gateway, C=DE";
 +            }
 +            CreateActivityDocument cad = CreateActivityDocument.Factory
 +                    .newInstance();
 +            JobDefinitionDocument jobDefDoc = JobDefinitionDocument.Factory
 +                    .newInstance();
 +
 +            // create storage
 +            StorageCreator storageCreator = new StorageCreator(secProperties,
 +                    factoryUrl, 5, null);
 +            sc = storageCreator.createStorage();
 +
 +            JobDefinitionType jobDefinition = JSDLGenerator.buildJSDLInstance(
 +                    jobExecutionContext, sc.getUrl()).getJobDefinition();
 +            cad.addNewCreateActivity().addNewActivityDocument()
 +                    .setJobDefinition(jobDefinition);
 +            log.info("JSDL" + jobDefDoc.toString());
 +
 +            // upload files if any
 +            DataTransferrer dt = new DataTransferrer(jobExecutionContext, sc);
 +            dt.uploadLocalFiles();
 +
 +            JobDetails jobDetails = new JobDetails();
 +            FactoryClient factory = new FactoryClient(eprt, secProperties);
 +
 +            log.info(String.format("Activity Submitting to %s ... \n",
 +                    factoryUrl));
 +            jobExecutionContext.getNotifier().publish(new StartExecutionEvent());
 +            CreateActivityResponseDocument response = factory.createActivity(cad);
 +            log.info(String.format("Activity Submitted to %s \n", factoryUrl));
 +
 +            EndpointReferenceType activityEpr = response.getCreateActivityResponse().getActivityIdentifier();
 +
 +            log.info("Activity : " + activityEpr.getAddress().getStringValue() + " Submitted.");
 +
 +            // factory.waitWhileActivityIsDone(activityEpr, 1000);
 +            jobId = WSUtilities.extractResourceID(activityEpr);
 +            if (jobId == null) {
 +                jobId = new Long(Calendar.getInstance().getTimeInMillis())
 +                        .toString();
 +            }
 +            log.info("JobID: " + jobId);
 +            jobDetails.setJobID(activityEpr.toString());
 +            jobDetails.setJobDescription(activityEpr.toString());
 +
 +            jobExecutionContext.setJobDetails(jobDetails);
 +            log.info(formatStatusMessage(activityEpr.getAddress()
 +                    .getStringValue(), factory.getActivityStatus(activityEpr)
 +                    .toString()));
 +
 +            jobExecutionContext.getNotifier().publish(new UnicoreJobIDEvent(jobId));
 +            GFacUtils.saveJobStatus(jobExecutionContext, details, JobState.SUBMITTED);
 +
 +            factory.getActivityStatus(activityEpr);
 +            log.info(formatStatusMessage(activityEpr.getAddress()
 +                    .getStringValue(), factory.getActivityStatus(activityEpr)
 +                    .toString()));
 +
 +            // TODO publish the status messages to the message bus
 +            while ((factory.getActivityStatus(activityEpr) != ActivityStateEnumeration.FINISHED)
 +                    && (factory.getActivityStatus(activityEpr) != ActivityStateEnumeration.FAILED)
 +                    && (factory.getActivityStatus(activityEpr) != ActivityStateEnumeration.CANCELLED)) {
 +
 +                ActivityStatusType activityStatus = getStatus(factory, activityEpr);
 +                JobState applicationJobStatus = getApplicationJobStatus(activityStatus);
 +                String jobStatusMessage = "Status of job " + jobId + "is "
 +                        + applicationJobStatus;
 +                GFacUtils.updateJobStatus(jobExecutionContext, jobDetails,
 +                        applicationJobStatus);
 +
 +                jobExecutionContext.getNotifier().publish(
 +                        new StatusChangeEvent(jobStatusMessage));
 +
 +                // GFacUtils.updateApplicationJobStatus(jobExecutionContext,jobId,
 +                // applicationJobStatus);
 +                try {
 +                    Thread.sleep(5000);
 +                } catch (InterruptedException e) {
 +                }
 +                continue;
 +            }
 +
 +            ActivityStatusType activityStatus = null;
 +            activityStatus = getStatus(factory, activityEpr);
 +            log.info(formatStatusMessage(activityEpr.getAddress().getStringValue(), activityStatus.getState().toString()));
 +            ActivityClient activityClient;
 +            activityClient = new ActivityClient(activityEpr, secProperties);
 +            dt.setStorageClient(activityClient.getUspaceClient());
 +
 +            if ((activityStatus.getState() == ActivityStateEnumeration.FAILED)) {
 +                String error = activityStatus.getFault().getFaultcode()
 +                        .getLocalPart()
 +                        + "\n"
 +                        + activityStatus.getFault().getFaultstring()
 +                        + "\n EXITCODE: " + activityStatus.getExitCode();
 +                log.info(error);
 +                try {
 +                    Thread.sleep(5000);
 +                } catch (InterruptedException e) {
 +                }
 +                dt.downloadStdOuts();
 +            } else if (activityStatus.getState() == ActivityStateEnumeration.CANCELLED) {
 +                JobState applicationJobStatus = JobState.CANCELED;
 +                String jobStatusMessage = "Status of job " + jobId + "is "
 +                        + applicationJobStatus;
 +                jobExecutionContext.getNotifier().publish(
 +                        new StatusChangeEvent(jobStatusMessage));
 +                GFacUtils.updateJobStatus(jobExecutionContext, jobDetails,
 +                        applicationJobStatus);
 +                throw new GFacProviderException(
 +                        jobExecutionContext.getExperimentID() + "Job Canceled");
 +            } else if (activityStatus.getState() == ActivityStateEnumeration.FINISHED) {
 +                try {
 +                    Thread.sleep(5000);
 +                } catch (InterruptedException e) {
 +                }
 +                if (activityStatus.getExitCode() == 0) {
 +                    dt.downloadRemoteFiles();
 +                } else {
 +                    dt.downloadStdOuts();
 +                }
 +            }
 +        } catch (AppCatalogException e) {
 +            log.error("Error while retrieving UNICORE job submission..");
 +            throw new GFacProviderException("Error while retrieving UNICORE job submission..", e);
 +        } catch (Exception e) {
 +            log.error("Cannot create storage..");
 +            throw new GFacProviderException("Cannot create storage..", e);
 +        } finally {
 +            // destroy sms instance
 +            try {
 +                if (sc != null) {
 +                    sc.destroy();
 +                }
 +            } catch (Exception e) {
 +                log.warn(
 +                        "Cannot destroy temporary SMS instance:" + sc.getUrl(),
 +                        e);
 +            }
 +        }
 +
 +    }
++=======
+ 		UnicoreHostType host = (UnicoreHostType) jobExecutionContext
+ 				.getApplicationContext().getHostDescription().getType();
+ 
+ 		String factoryUrl = host.getUnicoreBESEndPointArray()[0];
+ 
+ 		EndpointReferenceType eprt = EndpointReferenceType.Factory
+ 				.newInstance();
+ 		eprt.addNewAddress().setStringValue(factoryUrl);
+ 
+ 		// WSUtilities.addServerIdentity(eprt, serverDN);
+ 
+ 		String userDN = getUserName(jobExecutionContext);
+ 
+ 		// TODO: to be removed
+ 		if (userDN == null || userDN.equalsIgnoreCase("admin")) {
+ 			userDN = "CN=zdv575, O=Ultrascan Gateway, C=DE";
+ 		}
+ 
+ 		StorageClient sc = null;
+ 
+ 		try {
+ 
+ 			CreateActivityDocument cad = CreateActivityDocument.Factory
+ 					.newInstance();
+ 			JobDefinitionDocument jobDefDoc = JobDefinitionDocument.Factory
+ 					.newInstance();
+ 
+ //			String xlogin = getCNFromUserDN(userDN);
+ 
+ 			// create storage
+ 			StorageCreator storageCreator = new StorageCreator(secProperties,
+ 					factoryUrl, 5, null);
+ 
+ 			try {
+ 				sc = storageCreator.createStorage();
+ 			} catch (Exception e2) {
+ 				log.error("Cannot create storage..");
+ 				throw new GFacProviderException("Cannot create storage..", e2);
+ 			}
+ 
+ 			JobDefinitionType jobDefinition = jobDefDoc.addNewJobDefinition();
+ 			try {
+ 				jobDefinition = JSDLGenerator.buildJSDLInstance(
+ 						jobExecutionContext, sc.getUrl()).getJobDefinition();
+ 				cad.addNewCreateActivity().addNewActivityDocument()
+ 						.setJobDefinition(jobDefinition);
+ 				log.info("JSDL" + jobDefDoc.toString());
+ 			} catch (Exception e1) {
+ 				throw new GFacProviderException(
+ 						"Cannot generate JSDL instance from the JobExecutionContext.",
+ 						e1);
+ 			}
+ 
+ 			// upload files if any
+ 			DataTransferrer dt = new DataTransferrer(jobExecutionContext, sc);
+ 			dt.uploadLocalFiles();
+ 
+ 			FactoryClient factory = null;
+ 			JobDetails jobDetails = new JobDetails();
+ 
+ 			try {
+ 				factory = new FactoryClient(eprt, secProperties);
+ 			} catch (Exception e) {
+ 				throw new GFacProviderException(e.getLocalizedMessage(), e);
+ 			}
+ 			CreateActivityResponseDocument response = null;
+ 			try {
+ 				log.info(String.format("Activity Submitting to %s ... \n",
+ 						factoryUrl));
+ 				jobExecutionContext.getNotifier().publish(new StartExecutionEvent());
+ 				response = factory.createActivity(cad);
+ 				log.info(String.format("Activity Submitted to %s \n", factoryUrl));
+ 			} catch (Exception e) {
+ 				throw new GFacProviderException("Cannot create activity.", e);
+ 			}
+ 			EndpointReferenceType activityEpr = response.getCreateActivityResponse().getActivityIdentifier();
+ 
+ 			log.info("Activity : " + activityEpr.getAddress().getStringValue()	+ " Submitted.");
+ 
+ 			// factory.waitWhileActivityIsDone(activityEpr, 1000);
+ 			jobId = WSUtilities.extractResourceID(activityEpr);
+ 			if (jobId == null) {
+ 				jobId = new Long(Calendar.getInstance().getTimeInMillis())
+ 						.toString();
+ 			}
+ 			log.info("JobID: " + jobId);
+ 			jobDetails.setJobID(jobId);
+ 			jobDetails.setJobDescription(jobId);
+ 
+ 			jobExecutionContext.setJobDetails(jobDetails);
+ 			try {
+ 			log.info(formatStatusMessage(activityEpr.getAddress()
+ 					.getStringValue(), factory.getActivityStatus(activityEpr)
+ 					.toString()));
+ 			
+ 			jobExecutionContext.getNotifier().publish(new UnicoreJobIDEvent(jobId));
+ //			GFacUtils.saveJobStatus(jobExecutionContext, details,JobState.SUBMITTED);
+ 
+ 			factory.getActivityStatus(activityEpr);
+ 			log.info(formatStatusMessage(activityEpr.getAddress()
+ 					.getStringValue(), factory.getActivityStatus(activityEpr)
+ 					.toString()));
+ 
+ 			// TODO publish the status messages to the message bus
+ 			while ((factory.getActivityStatus(activityEpr) != ActivityStateEnumeration.FINISHED)
+ 					&& (factory.getActivityStatus(activityEpr) != ActivityStateEnumeration.FAILED)
+ 					&& (factory.getActivityStatus(activityEpr) != ActivityStateEnumeration.CANCELLED)) {
+ 
+ 				ActivityStatusType activityStatus = null;
+ 				try {
+ 					activityStatus = getStatus(factory, activityEpr);
+ 					JobState applicationJobStatus = getApplicationJobStatus(activityStatus);
+ 					String jobStatusMessage = "Status of job " + jobId + "is "
+ 							+ applicationJobStatus;
+ 					//TODO: properly use GFacUtils..
+ //					GFacUtils.updateJobStatus(jobExecutionContext, jobDetails,	applicationJobStatus);
+ 
+ 					jobExecutionContext.getNotifier().publish(
+ 							new StatusChangeEvent(jobStatusMessage));
+ 
+ 					// GFacUtils.updateApplicationJobStatus(jobExecutionContext,jobId,
+ 					// applicationJobStatus);
+ 				} catch (UnknownActivityIdentifierFault e) {
+ 					throw new GFacProviderException(e.getMessage(),
+ 							e.getCause());
+ 				}
+ 
+ 				try {
+ 					Thread.sleep(5000);
+ 				} catch (InterruptedException e) {
+ 				}
+ 				continue;
+ 			}
+ 			}catch(Exception e) {
+ 				throw new GFacProviderException(e.getMessage(),
+ 						e.getCause());
+ 				
+ 			}
+ 			
+ 			ActivityStatusType activityStatus = null;
+ 			try {
+ 				activityStatus = getStatus(factory, activityEpr);
+ 				log.info(formatStatusMessage(activityEpr.getAddress().getStringValue(), activityStatus.getState().toString()));
+ 				ActivityClient activityClient;
+ 				activityClient = new ActivityClient(activityEpr,secProperties);
+ 				dt.setStorageClient(activityClient.getUspaceClient());
+ 			} catch (Exception e1) {
+ 				throw new GFacProviderException(e1.getMessage(),
+ 						e1.getCause());
+ 			}
+ 
+ 			
+ 
+ 			if ((activityStatus.getState() == ActivityStateEnumeration.FAILED)) {
+ 				String error = activityStatus.getFault().getFaultcode()
+ 						.getLocalPart()
+ 						+ "\n"
+ 						+ activityStatus.getFault().getFaultstring()
+ 						+ "\n EXITCODE: " + activityStatus.getExitCode();
+ 				log.info(error);
+ 				try {
+ 					Thread.sleep(5000);
+ 				} catch (InterruptedException e) {
+ 				}
+ 				dt.downloadStdOuts();
+ 			} else if (activityStatus.getState() == ActivityStateEnumeration.CANCELLED) {
+ 				JobState applicationJobStatus = JobState.CANCELED;
+ 				String jobStatusMessage = "Status of job " + jobId + "is "
+ 						+ applicationJobStatus;
+ 				jobExecutionContext.getNotifier().publish(
+ 						new StatusChangeEvent(jobStatusMessage));
+ 				//TODO: properly use GFacUtils..
+ //				GFacUtils.updateJobStatus(jobExecutionContext, jobDetails, applicationJobStatus);
+ 				throw new GFacProviderException(
+ 						jobExecutionContext.getExperimentID() + "Job Canceled");
+ 			}
+ 
+ 			else if (activityStatus.getState() == ActivityStateEnumeration.FINISHED) {
+ 				try {
+ 					Thread.sleep(5000);
+ 				} catch (InterruptedException e) {
+ 				}
+ 				if (activityStatus.getExitCode() == 0) {
+ 					dt.downloadRemoteFiles();
+ 				} else {
+ 					dt.downloadStdOuts();
+ 				}
+ 			}
+ 
+ 		} finally {
+ 			// destroy sms instance
+ 			try {
+ 				if (sc != null) {
+ 					sc.destroy();
+ 				}
+ 			} catch (Exception e) {
+ 				log.warn(
+ 						"Cannot destroy temporary SMS instance:" + sc.getUrl(),
+ 						e);
+ 			}
+ 		}
+ 
+ 	}
++>>>>>>> f7de359dcae3694912248e50a1a2fd5e30fc613e
  
  	private JobState getApplicationJobStatus(ActivityStatusType activityStatus) {
  		if (activityStatus == null) {

http://git-wip-us.apache.org/repos/asf/airavata/blob/65ad5860/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/bes/security/UNICORESecurityContext.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/airavata/blob/65ad5860/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/HPCPullMonitor.java
----------------------------------------------------------------------
diff --cc modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/HPCPullMonitor.java
index 25113fd,66cc5f7..171ca07
--- a/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/HPCPullMonitor.java
+++ b/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/HPCPullMonitor.java
@@@ -156,9 -158,11 +156,10 @@@ public class HPCPullMonitor extends Pul
          try {
              take = this.queue.take();
              List<HostMonitorData> hostMonitorData = take.getHostMonitorData();
-             for (HostMonitorData iHostMonitorData : hostMonitorData) {
+             for (ListIterator<HostMonitorData> hostIterator = hostMonitorData.listIterator(); hostIterator.hasNext();) {
+                 HostMonitorData iHostMonitorData = hostIterator.next();
 -                if (iHostMonitorData.getHost().getType() instanceof GsisshHostType
 -                        || iHostMonitorData.getHost().getType() instanceof SSHHostType) {
 -                    String hostName = iHostMonitorData.getHost().getType().getHostAddress();
 +                if (iHostMonitorData.getJobSubmissionProtocol() == JobSubmissionProtocol.SSH) {
 +                    String hostName = iHostMonitorData.getComputeResourceDescription().getHostName();
                      ResourceConnection connection = null;
                      if (connections.containsKey(hostName)) {
                          if (!connections.get(hostName).isConnected()) {

http://git-wip-us.apache.org/repos/asf/airavata/blob/65ad5860/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/util/CommonUtils.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/airavata/blob/65ad5860/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/impl/ExperimentRegistry.java
----------------------------------------------------------------------
diff --cc modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/impl/ExperimentRegistry.java
index a83f5f1,8e9ae58..edbf39e
--- a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/impl/ExperimentRegistry.java
+++ b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/impl/ExperimentRegistry.java
@@@ -93,11 -91,12 +93,12 @@@ public class ExperimentRegistry 
                  addUserConfigData(userConfigurationData, experimentID);
              }
  
 -            List<DataObjectType> experimentOutputs = experiment.getExperimentOutputs();
 +            List<OutputDataObjectType> experimentOutputs = experiment.getExperimentOutputs();
              if (experimentOutputs != null && !experimentOutputs.isEmpty()){
-                 for (OutputDataObjectType output : experimentOutputs){
-                     output.setValue("");
-                 }
+             	//TODO: short change.
+ //                for (DataObjectType output : experimentOutputs){
+ //                    output.setValue("");
+ //                }
                  addExpOutputs(experimentOutputs, experimentID);
              }
  


[42/50] [abbrv] airavata git commit: Replaced DataObjectType with Input and Output DataObjectType

Posted by ch...@apache.org.
Replaced DataObjectType with Input and Output DataObjectType


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/4b27ce8a
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/4b27ce8a
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/4b27ce8a

Branch: refs/heads/master
Commit: 4b27ce8a0b8a5979fb4ce82f9573b00b3f35ea82
Parents: 82127c7
Author: shamrath <sh...@gmail.com>
Authored: Thu Nov 6 19:35:11 2014 -0500
Committer: shamrath <sh...@gmail.com>
Committed: Thu Nov 6 19:35:11 2014 -0500

----------------------------------------------------------------------
 .../server/handler/AiravataServerHandler.java   | 72 +++++++++++++----
 .../apache/airavata/integration/BaseCaseIT.java | 49 ++++++------
 .../airavata/integration/DataRetrievalIT.java   | 15 ++--
 .../airavata/integration/SimpleEchoIT.java      | 23 +++---
 .../engine/interpretor/WorkflowInterpreter.java | 81 ++++++++++----------
 5 files changed, 140 insertions(+), 100 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/4b27ce8a/airavata-api/airavata-api-server/src/main/java/org/apache/airavata/api/server/handler/AiravataServerHandler.java
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-api-server/src/main/java/org/apache/airavata/api/server/handler/AiravataServerHandler.java b/airavata-api/airavata-api-server/src/main/java/org/apache/airavata/api/server/handler/AiravataServerHandler.java
index 810ffcc..180ed78 100644
--- a/airavata-api/airavata-api-server/src/main/java/org/apache/airavata/api/server/handler/AiravataServerHandler.java
+++ b/airavata-api/airavata-api-server/src/main/java/org/apache/airavata/api/server/handler/AiravataServerHandler.java
@@ -21,14 +21,23 @@
 
 package org.apache.airavata.api.server.handler;
 
-import org.airavata.appcatalog.cpi.*;
+import org.airavata.appcatalog.cpi.AppCatalog;
+import org.airavata.appcatalog.cpi.AppCatalogException;
+import org.airavata.appcatalog.cpi.ApplicationDeployment;
+import org.airavata.appcatalog.cpi.ComputeResource;
+import org.airavata.appcatalog.cpi.GwyResourceProfile;
+import org.airavata.appcatalog.cpi.WorkflowCatalog;
 import org.apache.aiaravata.application.catalog.data.impl.AppCatalogFactory;
-import org.apache.aiaravata.application.catalog.data.resources.*;
+import org.apache.aiaravata.application.catalog.data.resources.AbstractResource;
+import org.apache.aiaravata.application.catalog.data.resources.CloudSubmissionResource;
+import org.apache.aiaravata.application.catalog.data.resources.GridftpDataMovementResource;
+import org.apache.aiaravata.application.catalog.data.resources.LocalDataMovementResource;
+import org.apache.aiaravata.application.catalog.data.resources.LocalSubmissionResource;
+import org.apache.aiaravata.application.catalog.data.resources.ScpDataMovementResource;
+import org.apache.aiaravata.application.catalog.data.resources.SshJobSubmissionResource;
 import org.apache.aiaravata.application.catalog.data.util.AppCatalogThriftConversion;
 import org.apache.airavata.api.Airavata;
 import org.apache.airavata.api.airavataAPIConstants;
-import org.apache.airavata.api.server.util.AiravataServerThreadPoolExecutor;
-import org.apache.airavata.api.server.util.DataModelUtils;
 import org.apache.airavata.common.exception.AiravataException;
 import org.apache.airavata.common.exception.ApplicationSettingsException;
 import org.apache.airavata.common.logger.AiravataLogger;
@@ -44,31 +53,60 @@ import org.apache.airavata.model.appcatalog.appdeployment.ApplicationModule;
 import org.apache.airavata.model.appcatalog.appinterface.ApplicationInterfaceDescription;
 import org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType;
 import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
-import org.apache.airavata.model.appcatalog.computeresource.*;
+import org.apache.airavata.model.appcatalog.computeresource.CloudJobSubmission;
+import org.apache.airavata.model.appcatalog.computeresource.ComputeResourceDescription;
+import org.apache.airavata.model.appcatalog.computeresource.DataMovementInterface;
+import org.apache.airavata.model.appcatalog.computeresource.DataMovementProtocol;
+import org.apache.airavata.model.appcatalog.computeresource.GridFTPDataMovement;
+import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionInterface;
+import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol;
+import org.apache.airavata.model.appcatalog.computeresource.LOCALDataMovement;
+import org.apache.airavata.model.appcatalog.computeresource.LOCALSubmission;
+import org.apache.airavata.model.appcatalog.computeresource.ResourceJobManager;
+import org.apache.airavata.model.appcatalog.computeresource.SCPDataMovement;
+import org.apache.airavata.model.appcatalog.computeresource.SSHJobSubmission;
+import org.apache.airavata.model.appcatalog.computeresource.UnicoreJobSubmission;
 import org.apache.airavata.model.appcatalog.gatewayprofile.ComputeResourcePreference;
 import org.apache.airavata.model.appcatalog.gatewayprofile.GatewayResourceProfile;
-import org.apache.airavata.model.error.*;
+import org.apache.airavata.model.error.AiravataClientConnectException;
+import org.apache.airavata.model.error.AiravataClientException;
+import org.apache.airavata.model.error.AiravataErrorType;
+import org.apache.airavata.model.error.AiravataSystemException;
+import org.apache.airavata.model.error.ExperimentNotFoundException;
+import org.apache.airavata.model.error.InvalidRequestException;
+import org.apache.airavata.model.error.ProjectNotFoundException;
 import org.apache.airavata.model.messaging.event.ExperimentStatusChangeEvent;
 import org.apache.airavata.model.messaging.event.MessageType;
-import org.apache.airavata.model.util.ExecutionType;
 import org.apache.airavata.model.workspace.Project;
-import org.apache.airavata.model.workspace.experiment.*;
+import org.apache.airavata.model.workspace.experiment.ComputationalResourceScheduling;
+import org.apache.airavata.model.workspace.experiment.DataTransferDetails;
+import org.apache.airavata.model.workspace.experiment.Experiment;
+import org.apache.airavata.model.workspace.experiment.ExperimentState;
+import org.apache.airavata.model.workspace.experiment.ExperimentStatus;
+import org.apache.airavata.model.workspace.experiment.ExperimentSummary;
+import org.apache.airavata.model.workspace.experiment.JobDetails;
+import org.apache.airavata.model.workspace.experiment.JobStatus;
+import org.apache.airavata.model.workspace.experiment.TaskDetails;
+import org.apache.airavata.model.workspace.experiment.UserConfigurationData;
+import org.apache.airavata.model.workspace.experiment.WorkflowNodeDetails;
 import org.apache.airavata.orchestrator.client.OrchestratorClientFactory;
 import org.apache.airavata.orchestrator.cpi.OrchestratorService;
 import org.apache.airavata.orchestrator.cpi.OrchestratorService.Client;
 import org.apache.airavata.persistance.registry.jpa.ResourceUtils;
 import org.apache.airavata.persistance.registry.jpa.impl.RegistryFactory;
-import org.apache.airavata.registry.cpi.*;
+import org.apache.airavata.registry.cpi.ChildDataType;
+import org.apache.airavata.registry.cpi.ParentDataType;
+import org.apache.airavata.registry.cpi.Registry;
+import org.apache.airavata.registry.cpi.RegistryException;
+import org.apache.airavata.registry.cpi.RegistryModelType;
 import org.apache.airavata.registry.cpi.utils.Constants;
-import org.apache.airavata.registry.cpi.utils.Constants.FieldConstants.TaskDetailConstants;
-import org.apache.airavata.registry.cpi.utils.Constants.FieldConstants.WorkflowNodeConstants;
 import org.apache.airavata.workflow.catalog.WorkflowCatalogFactory;
-import org.apache.airavata.workflow.engine.WorkflowEngine;
-import org.apache.airavata.workflow.engine.WorkflowEngineException;
-import org.apache.airavata.workflow.engine.WorkflowEngineFactory;
 import org.apache.thrift.TException;
 
-import java.util.*;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
 
 public class AiravataServerHandler implements Airavata.Iface {
     private static final AiravataLogger logger = AiravataLoggerFactory.getLogger(AiravataServerHandler.class);
@@ -897,14 +935,14 @@ public class AiravataServerHandler implements Airavata.Iface {
     }
 
     @Override
-    public List<DataObjectType> getExperimentOutputs(String airavataExperimentId) throws TException {
+    public List<OutputDataObjectType> getExperimentOutputs(String airavataExperimentId) throws TException {
         try {
             registry = RegistryFactory.getDefaultRegistry();
             if (!registry.isExist(RegistryModelType.EXPERIMENT, airavataExperimentId)){
                 logger.errorId(airavataExperimentId, "Get experiment outputs failed, experiment {} doesn't exit.", airavataExperimentId);
                 throw new ExperimentNotFoundException("Requested experiment id " + airavataExperimentId + " does not exist in the system..");
             }
-            return (List<DataObjectType>)registry.get(RegistryModelType.EXPERIMENT_OUTPUT, airavataExperimentId);
+            return (List<OutputDataObjectType>)registry.get(RegistryModelType.EXPERIMENT_OUTPUT, airavataExperimentId);
         } catch (Exception e) {
             logger.errorId(airavataExperimentId, "Error while retrieving the experiment outputs", e);
             AiravataSystemException exception = new AiravataSystemException();

http://git-wip-us.apache.org/repos/asf/airavata/blob/4b27ce8a/modules/integration-tests/src/test/java/org/apache/airavata/integration/BaseCaseIT.java
----------------------------------------------------------------------
diff --git a/modules/integration-tests/src/test/java/org/apache/airavata/integration/BaseCaseIT.java b/modules/integration-tests/src/test/java/org/apache/airavata/integration/BaseCaseIT.java
index 0b44013..17107cf 100644
--- a/modules/integration-tests/src/test/java/org/apache/airavata/integration/BaseCaseIT.java
+++ b/modules/integration-tests/src/test/java/org/apache/airavata/integration/BaseCaseIT.java
@@ -21,34 +21,35 @@
 
 package org.apache.airavata.integration;
 
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
 import junit.framework.Assert;
-
-//import org.apache.airavata.client.api.exception.AiravataAPIInvocationException;
-//import org.apache.airavata.client.tools.DocumentCreatorNew;
 import org.apache.airavata.integration.tools.DocumentCreatorNew;
+import org.apache.airavata.model.appcatalog.appinterface.DataType;
+import org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType;
+import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
 import org.apache.airavata.model.util.ExperimentModelUtil;
 import org.apache.airavata.model.util.ProjectModelUtil;
 import org.apache.airavata.model.workspace.Project;
 import org.apache.airavata.model.workspace.experiment.ComputationalResourceScheduling;
-import org.apache.airavata.model.workspace.experiment.DataObjectType;
-import org.apache.airavata.model.workspace.experiment.DataType;
 import org.apache.airavata.model.workspace.experiment.Experiment;
 import org.apache.airavata.model.workspace.experiment.JobState;
 import org.apache.airavata.model.workspace.experiment.JobStatus;
 import org.apache.airavata.model.workspace.experiment.UserConfigurationData;
+import org.testng.annotations.BeforeTest;
+import org.testng.annotations.Test;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+//import org.apache.airavata.client.api.exception.AiravataAPIInvocationException;
+//import org.apache.airavata.client.tools.DocumentCreatorNew;
 //import org.apache.airavata.workflow.model.wf.Workflow;
 //import org.apache.airavata.ws.monitor.EventData;
 //import org.apache.airavata.ws.monitor.EventDataListenerAdapter;
 //import org.apache.airavata.ws.monitor.EventDataRepository;
 //import org.apache.airavata.ws.monitor.Monitor;
 //import org.apache.airavata.ws.monitor.MonitorUtil;
-import org.testng.annotations.BeforeTest;
-import org.testng.annotations.Test;
 
 /**
  * Integration test class.
@@ -87,16 +88,16 @@ public class BaseCaseIT extends WorkflowIntegrationTestBase {
         log.info("Running job in trestles...");
         DocumentCreatorNew documentCreator = new DocumentCreatorNew(client);
         documentCreator.createPBSDocsForOGCE_Echo();
-        List<DataObjectType> exInputs = new ArrayList<DataObjectType>();
-        DataObjectType input = new DataObjectType();
-        input.setKey("echo_input");
+        List<InputDataObjectType> exInputs = new ArrayList<InputDataObjectType>();
+        InputDataObjectType input = new InputDataObjectType();
+        input.setName("echo_input");
         input.setType(DataType.STRING);
         input.setValue("echo_output=Hello World");
         exInputs.add(input);
 
-        List<DataObjectType> exOut = new ArrayList<DataObjectType>();
-        DataObjectType output = new DataObjectType();
-        output.setKey("echo_output");
+        List<OutputDataObjectType> exOut = new ArrayList<OutputDataObjectType>();
+        OutputDataObjectType output = new OutputDataObjectType();
+        output.setName("echo_output");
         output.setType(DataType.STRING);
         output.setValue("");
         exOut.add(output);
@@ -161,16 +162,16 @@ public class BaseCaseIT extends WorkflowIntegrationTestBase {
         log.info("Running job in Stampede...");
         DocumentCreatorNew documentCreator = new DocumentCreatorNew(client);
         documentCreator.createSlurmDocs();
-        List<DataObjectType> exInputs = new ArrayList<DataObjectType>();
-        DataObjectType input = new DataObjectType();
-        input.setKey("echo_input");
+        List<InputDataObjectType> exInputs = new ArrayList<InputDataObjectType>();
+        InputDataObjectType input = new InputDataObjectType();
+        input.setName("echo_input");
         input.setType(DataType.STRING);
         input.setValue("echo_output=Hello World");
         exInputs.add(input);
 
-        List<DataObjectType> exOut = new ArrayList<DataObjectType>();
-        DataObjectType output = new DataObjectType();
-        output.setKey("echo_output");
+        List<OutputDataObjectType> exOut = new ArrayList<OutputDataObjectType>();
+        OutputDataObjectType output = new OutputDataObjectType();
+        output.setName("echo_output");
         output.setType(DataType.STRING);
         output.setValue("");
         exOut.add(output);

http://git-wip-us.apache.org/repos/asf/airavata/blob/4b27ce8a/modules/integration-tests/src/test/java/org/apache/airavata/integration/DataRetrievalIT.java
----------------------------------------------------------------------
diff --git a/modules/integration-tests/src/test/java/org/apache/airavata/integration/DataRetrievalIT.java b/modules/integration-tests/src/test/java/org/apache/airavata/integration/DataRetrievalIT.java
index af9a729..c768b68 100644
--- a/modules/integration-tests/src/test/java/org/apache/airavata/integration/DataRetrievalIT.java
+++ b/modules/integration-tests/src/test/java/org/apache/airavata/integration/DataRetrievalIT.java
@@ -25,6 +25,9 @@ import org.airavata.appcatalog.cpi.AppCatalogException;
 import org.apache.airavata.api.Airavata.Client;
 import org.apache.airavata.common.exception.ApplicationSettingsException;
 import org.apache.airavata.integration.tools.DocumentCreatorNew;
+import org.apache.airavata.model.appcatalog.appinterface.DataType;
+import org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType;
+import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
 import org.apache.airavata.model.error.*;
 import org.apache.airavata.model.util.ExperimentModelUtil;
 import org.apache.airavata.model.util.ProjectModelUtil;
@@ -176,16 +179,16 @@ public class DataRetrievalIT extends AbstractIntegrationTest {
 	public String runExperiment(String user, String project) throws ApplicationSettingsException, AiravataClientConnectException,
 			InvalidRequestException, AiravataClientException,
 			AiravataSystemException, TException, ExperimentNotFoundException {
-		List<DataObjectType> exInputs = new ArrayList<DataObjectType>();
-		DataObjectType input = new DataObjectType();
-		input.setKey("echo_input");
+		List<InputDataObjectType> exInputs = new ArrayList<InputDataObjectType>();
+        InputDataObjectType input = new InputDataObjectType();
+        input.setName("echo_input");
 		input.setType(DataType.STRING);
 		input.setValue("echo_output=Hello World");
 		exInputs.add(input);
 
-		List<DataObjectType> exOut = new ArrayList<DataObjectType>();
-		DataObjectType output = new DataObjectType();
-		output.setKey("echo_output");
+		List<OutputDataObjectType> exOut = new ArrayList<OutputDataObjectType>();
+        OutputDataObjectType output = new OutputDataObjectType();
+		output.setName("echo_output");
 		output.setType(DataType.STRING);
 		output.setValue("");
 		exOut.add(output);

http://git-wip-us.apache.org/repos/asf/airavata/blob/4b27ce8a/modules/integration-tests/src/test/java/org/apache/airavata/integration/SimpleEchoIT.java
----------------------------------------------------------------------
diff --git a/modules/integration-tests/src/test/java/org/apache/airavata/integration/SimpleEchoIT.java b/modules/integration-tests/src/test/java/org/apache/airavata/integration/SimpleEchoIT.java
index 499dcda..81d1383 100644
--- a/modules/integration-tests/src/test/java/org/apache/airavata/integration/SimpleEchoIT.java
+++ b/modules/integration-tests/src/test/java/org/apache/airavata/integration/SimpleEchoIT.java
@@ -21,16 +21,14 @@
 
 package org.apache.airavata.integration;
 
-import java.util.ArrayList;
-import java.util.List;
-
 import org.apache.airavata.integration.tools.DocumentCreatorNew;
+import org.apache.airavata.model.appcatalog.appinterface.DataType;
+import org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType;
+import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
 import org.apache.airavata.model.util.ExperimentModelUtil;
 import org.apache.airavata.model.util.ProjectModelUtil;
 import org.apache.airavata.model.workspace.Project;
 import org.apache.airavata.model.workspace.experiment.ComputationalResourceScheduling;
-import org.apache.airavata.model.workspace.experiment.DataObjectType;
-import org.apache.airavata.model.workspace.experiment.DataType;
 import org.apache.airavata.model.workspace.experiment.Experiment;
 import org.apache.airavata.model.workspace.experiment.UserConfigurationData;
 import org.slf4j.Logger;
@@ -38,6 +36,9 @@ import org.slf4j.LoggerFactory;
 import org.testng.annotations.BeforeTest;
 import org.testng.annotations.Test;
 
+import java.util.ArrayList;
+import java.util.List;
+
 public class SimpleEchoIT extends SingleAppIntegrationTestBase {
     private final static Logger log = LoggerFactory.getLogger(SimpleEchoIT.class);
 
@@ -61,16 +62,16 @@ public class SimpleEchoIT extends SingleAppIntegrationTestBase {
         String appId = hostAndappId.split(",")[1];
         String hostId = hostAndappId.split(",")[0];
         
-        List<DataObjectType> exInputs = new ArrayList<DataObjectType>();
-        DataObjectType input = new DataObjectType();
-        input.setKey("echo_input");
+        List<InputDataObjectType> exInputs = new ArrayList<InputDataObjectType>();
+        InputDataObjectType input = new InputDataObjectType();
+        input.setName("echo_input");
         input.setType(DataType.STRING);
         input.setValue("echo_output=Hello World");
         exInputs.add(input);
 
-        List<DataObjectType> exOut = new ArrayList<DataObjectType>();
-        DataObjectType output = new DataObjectType();
-        output.setKey("echo_output");
+        List<OutputDataObjectType> exOut = new ArrayList<OutputDataObjectType>();
+        OutputDataObjectType output = new OutputDataObjectType();
+        output.setName("echo_output");
         output.setType(DataType.STRING);
         output.setValue("");
         exOut.add(output);

http://git-wip-us.apache.org/repos/asf/airavata/blob/4b27ce8a/modules/workflow-model/workflow-engine/src/main/java/org/apache/airavata/workflow/engine/interpretor/WorkflowInterpreter.java
----------------------------------------------------------------------
diff --git a/modules/workflow-model/workflow-engine/src/main/java/org/apache/airavata/workflow/engine/interpretor/WorkflowInterpreter.java b/modules/workflow-model/workflow-engine/src/main/java/org/apache/airavata/workflow/engine/interpretor/WorkflowInterpreter.java
index 59681c7..fa4fd32 100644
--- a/modules/workflow-model/workflow-engine/src/main/java/org/apache/airavata/workflow/engine/interpretor/WorkflowInterpreter.java
+++ b/modules/workflow-model/workflow-engine/src/main/java/org/apache/airavata/workflow/engine/interpretor/WorkflowInterpreter.java
@@ -21,28 +21,7 @@
 
 package org.apache.airavata.workflow.engine.interpretor;
 
-import java.net.URL;
-import java.sql.Timestamp;
-import java.util.ArrayList;
-import java.util.Calendar;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.UUID;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.atomic.AtomicInteger;
-
-import javax.xml.namespace.QName;
-import javax.xml.xpath.XPath;
-import javax.xml.xpath.XPathConstants;
-import javax.xml.xpath.XPathExpressionException;
-import javax.xml.xpath.XPathFactory;
-
-import org.apache.airavata.api.Airavata;
+import com.google.common.eventbus.Subscribe;
 import org.apache.airavata.common.exception.AiravataException;
 import org.apache.airavata.common.exception.ApplicationSettingsException;
 import org.apache.airavata.common.utils.AiravataUtils;
@@ -52,6 +31,8 @@ import org.apache.airavata.common.utils.XMLUtil;
 import org.apache.airavata.common.utils.listener.AbstractActivityListener;
 import org.apache.airavata.messaging.core.MessageContext;
 import org.apache.airavata.messaging.core.Publisher;
+import org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType;
+import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
 import org.apache.airavata.model.messaging.event.ExperimentStatusChangeEvent;
 import org.apache.airavata.model.messaging.event.MessageType;
 import org.apache.airavata.model.messaging.event.TaskOutputChangeEvent;
@@ -59,7 +40,6 @@ import org.apache.airavata.model.messaging.event.TaskStatusChangeEvent;
 import org.apache.airavata.model.messaging.event.WorkflowIdentifier;
 import org.apache.airavata.model.messaging.event.WorkflowNodeStatusChangeEvent;
 import org.apache.airavata.model.util.ExperimentModelUtil;
-import org.apache.airavata.model.workspace.experiment.DataObjectType;
 import org.apache.airavata.model.workspace.experiment.ExecutionUnit;
 import org.apache.airavata.model.workspace.experiment.Experiment;
 import org.apache.airavata.model.workspace.experiment.ExperimentState;
@@ -125,10 +105,27 @@ import org.apache.thrift.TException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.xmlpull.infoset.XmlElement;
-
 import xsul5.XmlConstants;
 
-import com.google.common.eventbus.Subscribe;
+import javax.xml.namespace.QName;
+import javax.xml.xpath.XPath;
+import javax.xml.xpath.XPathConstants;
+import javax.xml.xpath.XPathExpressionException;
+import javax.xml.xpath.XPathFactory;
+import java.net.URL;
+import java.sql.Timestamp;
+import java.util.ArrayList;
+import java.util.Calendar;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.UUID;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.atomic.AtomicInteger;
 
 public class WorkflowInterpreter implements AbstractActivityListener{
     private static final Logger log = LoggerFactory.getLogger(WorkflowInterpreter.class);
@@ -238,10 +235,10 @@ public class WorkflowInterpreter implements AbstractActivityListener{
 		try {
 			this.getWorkflow().setExecutionState(WorkflowExecutionState.RUNNING);
 			ArrayList<Node> inputNodes = this.getInputNodesDynamically();
-			List<DataObjectType> experimentInputs = experiment.getExperimentInputs();
+			List<InputDataObjectType> experimentInputs = experiment.getExperimentInputs();
 			Map<String,String> inputDataStrings=new HashMap<String, String>();
-			for (DataObjectType dataObjectType : experimentInputs) {
-				inputDataStrings.put(dataObjectType.getKey(), dataObjectType.getValue());
+			for (InputDataObjectType dataObjectType : experimentInputs) {
+				inputDataStrings.put(dataObjectType.getName(), dataObjectType.getValue());
 			}
 			for (Node node : inputNodes) {
                 publishNodeStatusChange(WorkflowNodeState.EXECUTING,node.getID(),experiment.getExperimentID());
@@ -261,8 +258,8 @@ public class WorkflowInterpreter implements AbstractActivityListener{
 				Object portValue = ((InputNode) node).getDefaultValue();
                 //Saving workflow input Node data before running the workflow
 				WorkflowNodeDetails workflowNode = createWorkflowNodeDetails(node);
-				DataObjectType elem = new DataObjectType();
-				elem.setKey(portId);
+                InputDataObjectType elem = new InputDataObjectType();
+				elem.setName(portId);
 				elem.setValue(portValue==null?null:portValue.toString());
 				workflowNode.addToNodeInputs(elem);
 				getRegistry().update(RegistryModelType.WORKFLOW_NODE_DETAIL, workflowNode, workflowNode.getNodeInstanceId());
@@ -508,8 +505,8 @@ public class WorkflowInterpreter implements AbstractActivityListener{
                     String portValue = ((OutputNode) node).getDescription();
 //                    this.getConfig().getConfiguration().getAiravataAPI().getProvenanceManager().setWorkflowInstanceNodeOutput(workflowInstanceNode, portname + "=" + portValue);
 //                    this.getConfig().getConfiguration().getAiravataAPI().getProvenanceManager().setWorkflowNodeType(workflowInstanceNode, workflowNodeType);
-                    DataObjectType elem = new DataObjectType();
-                    elem.setKey(portname);
+                    OutputDataObjectType elem = new OutputDataObjectType();
+                    elem.setName(portname);
                     elem.setValue(portValue);
 					workflowNodeDetails.addToNodeOutputs(elem);
 					getRegistry().update(RegistryModelType.WORKFLOW_NODE_DETAIL, workflowNodeDetails, workflowNodeDetails.getNodeInstanceId());
@@ -572,8 +569,8 @@ public class WorkflowInterpreter implements AbstractActivityListener{
 					throw new WorkFlowInterpreterException("Unable to find output for the node:" + node.getID());
 				}
 				WorkflowNodeDetails workflowNodeDetails = nodeInstanceList.get(node);
-				DataObjectType elem = new DataObjectType();
-				elem.setKey(node.getName());
+                OutputDataObjectType elem = new OutputDataObjectType();
+				elem.setName(node.getName());
 				elem.setValue(val.toString());
 				workflowNodeDetails.addToNodeOutputs(elem);
 				try {
@@ -1061,8 +1058,8 @@ public class WorkflowInterpreter implements AbstractActivityListener{
                     portInputValue = outputData.get(dataPort.getEdge(0).getFromPort().getName());
                 }
 			}
-			DataObjectType elem = new DataObjectType();
-			elem.setKey(dataPort.getName());
+            InputDataObjectType elem = new InputDataObjectType();
+            elem.setName(dataPort.getName());
 			elem.setValue(portInputValue);
 			nodeDetails.addToNodeInputs(elem);
 		}
@@ -1079,8 +1076,8 @@ public class WorkflowInterpreter implements AbstractActivityListener{
 		Map<String, String> outputData = nodeOutputData.get(node);
 		for (DataPort dataPort : outputPorts) {
 			String portInputValue = outputData.get(dataPort.getName());
-			DataObjectType elem = new DataObjectType();
-			elem.setKey(dataPort.getName());
+            OutputDataObjectType elem = new OutputDataObjectType();
+			elem.setName(dataPort.getName());
 			elem.setValue(portInputValue);
 			nodeDetails.addToNodeOutputs(elem);
 		}
@@ -1466,13 +1463,13 @@ public class WorkflowInterpreter implements AbstractActivityListener{
 		if (isTaskAwaiting(taskId)){
         	WorkflowNodeState state=WorkflowNodeState.COMPLETED;
 			Node node = getAwaitingNodeForTask(taskId);
-    		List<DataObjectType> applicationOutputs = taskOutputEvent.getOutput();
+    		List<OutputDataObjectType> applicationOutputs = taskOutputEvent.getOutput();
 			Map<String, String> outputData = new HashMap<String, String>();
-			for (DataObjectType outputObj : applicationOutputs) {
+			for (OutputDataObjectType outputObj : applicationOutputs) {
 				List<DataPort> outputPorts = node.getOutputPorts();
 				for (DataPort dataPort : outputPorts) {
-					if (dataPort.getName().equals(outputObj.getKey())){
-						outputData.put(outputObj.getKey(), outputObj.getValue());
+					if (dataPort.getName().equals(outputObj.getName())){
+						outputData.put(outputObj.getName(), outputObj.getValue());
 					}
 				}
 			}


[04/50] [abbrv] airavata git commit: Ingegrated appCatalog thrift model with GSISSH input and output handlers and inprove job execution context

Posted by ch...@apache.org.
Ingegrated appCatalog thrift model with GSISSH input and output handlers and inprove job execution context


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/ad2b1d3a
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/ad2b1d3a
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/ad2b1d3a

Branch: refs/heads/master
Commit: ad2b1d3a97ec38cb98c970fa815b4863c9e8e898
Parents: f29dfbe
Author: shamrath <sh...@gmail.com>
Authored: Fri Oct 31 17:41:22 2014 -0400
Committer: shamrath <sh...@gmail.com>
Committed: Fri Oct 31 17:41:22 2014 -0400

----------------------------------------------------------------------
 .../model/workspace/experiment/JobDetails.java  | 11 ++-
 .../gfac/core/context/JobExecutionContext.java  | 27 +++++-
 .../airavata/gfac/core/cpi/BetterGfacImpl.java  | 91 ++++++++++++++++----
 .../handler/GSISSHDirectorySetupHandler.java    |  7 +-
 .../gfac/gsissh/handler/GSISSHInputHandler.java | 18 ++--
 .../gsissh/handler/GSISSHOutputHandler.java     | 53 +++---------
 .../airavata/gsi/ssh/api/job/JobDescriptor.java |  7 ++
 7 files changed, 143 insertions(+), 71 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/ad2b1d3a/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/workspace/experiment/JobDetails.java
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/workspace/experiment/JobDetails.java b/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/workspace/experiment/JobDetails.java
index d1cbe5e..c1034a0 100644
--- a/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/workspace/experiment/JobDetails.java
+++ b/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/workspace/experiment/JobDetails.java
@@ -271,9 +271,14 @@ import org.slf4j.LoggerFactory;
     }
   }
 
-  public String getJobDescription() {
-    return this.jobDescription;
-  }
+    /**
+     * this method is deprecated after we introduce new thirft model with appcatalog
+     * @return
+     */
+    @Deprecated
+    public String getJobDescription() {
+        return this.jobDescription;
+    }
 
   public void setJobDescription(String jobDescription) {
     this.jobDescription = jobDescription;

http://git-wip-us.apache.org/repos/asf/airavata/blob/ad2b1d3a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
index 891bece..d5e8daa 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
@@ -33,6 +33,7 @@ import org.apache.airavata.gfac.SecurityContext;
 import org.apache.airavata.gfac.core.cpi.GFac;
 import org.apache.airavata.gfac.core.notification.GFacNotifier;
 import org.apache.airavata.gfac.core.provider.GFacProvider;
+import org.apache.airavata.model.appcatalog.computeresource.DataMovementInterface;
 import org.apache.airavata.model.appcatalog.computeresource.DataMovementProtocol;
 import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionInterface;
 import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol;
@@ -100,12 +101,20 @@ public class JobExecutionContext extends AbstractContext implements Serializable
     private DataMovementProtocol preferredDataMovementProtocol;
     /**
      * List of job submission protocols sorted by priority order.
-      */
+     */
     private List<JobSubmissionInterface> hostPrioritizedJobSubmissionInterfaces;
     /**
      * use preferred job submission protocol.
      */
     private JobSubmissionInterface preferredJobSubmissionInterface;
+    /**
+     * List of job submission protocols sorted by priority order.
+     */
+    private List<DataMovementInterface> hostPrioritizedDataMovementInterfaces;
+    /**
+     * use preferred job submission protocol.
+     */
+    private DataMovementInterface preferredDataMovementInterface;
 
 //    private ContextHeaderDocument.ContextHeader contextHeader;
 
@@ -434,4 +443,20 @@ public class JobExecutionContext extends AbstractContext implements Serializable
     public String getHostName() {
         return applicationContext.getComputeResourceDescription().getHostName();
     }
+
+    public List<DataMovementInterface> getHostPrioritizedDataMovementInterfaces() {
+        return hostPrioritizedDataMovementInterfaces;
+    }
+
+    public void setHostPrioritizedDataMovementInterfaces(List<DataMovementInterface> hostPrioritizedDataMovementInterfaces) {
+        this.hostPrioritizedDataMovementInterfaces = hostPrioritizedDataMovementInterfaces;
+    }
+
+    public DataMovementInterface getPreferredDataMovementInterface() {
+        return preferredDataMovementInterface;
+    }
+
+    public void setPreferredDataMovementInterface(DataMovementInterface preferredDataMovementInterface) {
+        this.preferredDataMovementInterface = preferredDataMovementInterface;
+    }
 }

http://git-wip-us.apache.org/repos/asf/airavata/blob/ad2b1d3a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
index e8e4c66..656a291 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
@@ -52,8 +52,9 @@ import org.apache.airavata.messaging.core.PublisherFactory;
 import org.apache.airavata.model.appcatalog.appdeployment.ApplicationDeploymentDescription;
 import org.apache.airavata.model.appcatalog.appinterface.ApplicationInterfaceDescription;
 import org.apache.airavata.model.appcatalog.computeresource.ComputeResourceDescription;
+import org.apache.airavata.model.appcatalog.computeresource.DataMovementInterface;
+import org.apache.airavata.model.appcatalog.computeresource.FileSystems;
 import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionInterface;
-import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol;
 import org.apache.airavata.model.appcatalog.gatewayprofile.ComputeResourcePreference;
 import org.apache.airavata.model.messaging.event.*;
 import org.apache.airavata.model.workspace.experiment.*;
@@ -74,6 +75,7 @@ import java.util.ArrayList;
 import java.util.Collections;
 import java.util.Comparator;
 import java.util.List;
+import java.util.Map;
 import java.util.Properties;
 
 /**
@@ -303,6 +305,7 @@ public class BetterGfacImpl implements GFac,Watcher {
         jobExecutionContext.setZk(zk);
         jobExecutionContext.setCredentialStoreToken(AiravataZKUtils.getExpTokenId(zk, experimentID, taskID));
 
+        // handle job submission protocol
         List<JobSubmissionInterface> jobSubmissionInterfaces = computeResource.getJobSubmissionInterfaces();
         if (jobSubmissionInterfaces != null && !jobSubmissionInterfaces.isEmpty()){
             Collections.sort(jobSubmissionInterfaces, new Comparator<JobSubmissionInterface>() {
@@ -316,36 +319,92 @@ public class BetterGfacImpl implements GFac,Watcher {
         }else {
             throw new GFacException("Compute resource should have at least one job submission interface defined...");
         }
+        // handle data movement protocol
+        List<DataMovementInterface> dataMovementInterfaces = computeResource.getDataMovementInterfaces();
+        if (dataMovementInterfaces != null && !dataMovementInterfaces.isEmpty()) {
+            Collections.sort(dataMovementInterfaces, new Comparator<DataMovementInterface>() {
+                @Override
+                public int compare(DataMovementInterface dataMovementInterface, DataMovementInterface dataMovementInterface2) {
+                    return dataMovementInterface.getPriorityOrder() - dataMovementInterface2.getPriorityOrder();
+                }
+            });
+            jobExecutionContext.setHostPrioritizedDataMovementInterfaces(dataMovementInterfaces);
+        }
+
+        // set compute resource configuration as default preferred values, after that replace those with gateway user preferences.
+        populateDefaultComputeResourceConfiguration(jobExecutionContext, applicationInterface, computeResource);
+        // if gateway resource preference is set
         if (gatewayResourcePreferences != null ) {
             if (gatewayResourcePreferences.getScratchLocation() == null) {
                 gatewayResourcePreferences.setScratchLocation("/tmp");
             }
+            setUpWorkingLocation(jobExecutionContext, applicationInterface, gatewayResourcePreferences.getScratchLocation());
 
-            /**
-             * Working dir
-             */
-            String workingDir = gatewayResourcePreferences.getScratchLocation() + File.separator + jobExecutionContext.getExperimentID();
-            jobExecutionContext.setWorkingDir(workingDir);
+            jobExecutionContext.setPreferredJobSubmissionProtocol(gatewayResourcePreferences.getPreferredJobSubmissionProtocol());
+            if (gatewayResourcePreferences.getPreferredJobSubmissionProtocol() == null) {
+                jobExecutionContext.setPreferredJobSubmissionInterface(jobExecutionContext.getHostPrioritizedJobSubmissionInterfaces().get(0));
+                jobExecutionContext.setPreferredJobSubmissionProtocol(jobExecutionContext.getPreferredJobSubmissionInterface().getJobSubmissionProtocol());
+            } else {
+                for (JobSubmissionInterface jobSubmissionInterface : jobSubmissionInterfaces) {
+                    if (gatewayResourcePreferences.getPreferredJobSubmissionProtocol() == jobSubmissionInterface.getJobSubmissionProtocol()) {
+                        jobExecutionContext.setPreferredJobSubmissionInterface(jobSubmissionInterface);
+                        break;
+                    }
+                }
+            }
+
+            // set gatewayUserPreferred data movement protocol and interface
+            jobExecutionContext.setPreferredDataMovementProtocol(gatewayResourcePreferences.getPreferredDataMovementProtocol());
+            if (gatewayResourcePreferences.getPreferredJobSubmissionProtocol() == null) {
+                jobExecutionContext.setPreferredDataMovementInterface(jobExecutionContext.getHostPrioritizedDataMovementInterfaces().get(0));
+                jobExecutionContext.setPreferredDataMovementProtocol(jobExecutionContext.getPreferredDataMovementInterface().getDataMovementProtocol());
+            } else {
+                for (DataMovementInterface dataMovementInterface : dataMovementInterfaces) {
+                    if (gatewayResourcePreferences.getPreferredDataMovementProtocol() == dataMovementInterface.getDataMovementProtocol()) {
+                        jobExecutionContext.setPreferredDataMovementInterface(dataMovementInterface);
+                        break;
+                    }
+                }
+            }
+        }
+        return jobExecutionContext;
+    }
+
+    private void setUpWorkingLocation(JobExecutionContext jobExecutionContext, ApplicationInterfaceDescription applicationInterface, String scratchLocation) {
+
+        /**
+         * Working dir
+         */
+        String workingDir = scratchLocation + File.separator + jobExecutionContext.getExperimentID();
+        jobExecutionContext.setWorkingDir(workingDir);
 
             /*
             * Input and Output Directory
             */
-            jobExecutionContext.setInputDir(workingDir + File.separator + Constants.INPUT_DATA_DIR_VAR_NAME);
-            jobExecutionContext.setOutputDir(workingDir + File.separator + Constants.OUTPUT_DATA_DIR_VAR_NAME);
+        jobExecutionContext.setInputDir(workingDir + File.separator + Constants.INPUT_DATA_DIR_VAR_NAME);
+        jobExecutionContext.setOutputDir(workingDir + File.separator + Constants.OUTPUT_DATA_DIR_VAR_NAME);
 
             /*
             * Stdout and Stderr for Shell
             */
-            jobExecutionContext.setStandardOutput(workingDir + File.separator + applicationInterface.getApplicationName().replaceAll("\\s+", "") + ".stdout");
-            jobExecutionContext.setStandardError(workingDir + File.separator + applicationInterface.getApplicationName().replaceAll("\\s+", "") + ".stderr");
+        jobExecutionContext.setStandardOutput(workingDir + File.separator + applicationInterface.getApplicationName().replaceAll("\\s+", "") + ".stdout");
+        jobExecutionContext.setStandardError(workingDir + File.separator + applicationInterface.getApplicationName().replaceAll("\\s+", "") + ".stderr");
+    }
 
-            jobExecutionContext.setPreferredJobSubmissionProtocol(gatewayResourcePreferences.getPreferredJobSubmissionProtocol());
-            if (gatewayResourcePreferences.getPreferredJobSubmissionProtocol() == null) {
-                jobExecutionContext.setPreferredJobSubmissionInterface(jobExecutionContext.getHostPrioritizedJobSubmissionInterfaces().get(0));
-                jobExecutionContext.setPreferredJobSubmissionProtocol(jobExecutionContext.getPreferredJobSubmissionInterface().getJobSubmissionProtocol());
-            }
+    private void populateDefaultComputeResourceConfiguration(JobExecutionContext jobExecutionContext, ApplicationInterfaceDescription applicationInterface, ComputeResourceDescription computeResource) {
+        Map<FileSystems, String> fileSystems = computeResource.getFileSystems();
+        String scratchLocation = fileSystems.get(FileSystems.SCRATCH);
+        if (scratchLocation != null) {
+            setUpWorkingLocation(jobExecutionContext, applicationInterface, scratchLocation);
+        }
+
+        jobExecutionContext.setPreferredJobSubmissionInterface(jobExecutionContext.getHostPrioritizedJobSubmissionInterfaces().get(0));
+        jobExecutionContext.setPreferredJobSubmissionProtocol(jobExecutionContext.getPreferredJobSubmissionInterface().getJobSubmissionProtocol());
+
+        if (jobExecutionContext.getHostPrioritizedDataMovementInterfaces() != null) {
+            jobExecutionContext.setPreferredDataMovementInterface(jobExecutionContext.getHostPrioritizedDataMovementInterfaces().get(0));
+            jobExecutionContext.setPreferredDataMovementProtocol(jobExecutionContext.getPreferredDataMovementInterface().getDataMovementProtocol());
         }
-        return jobExecutionContext;
     }
 
     private boolean submitJob(JobExecutionContext jobExecutionContext) throws GFacException {

http://git-wip-us.apache.org/repos/asf/airavata/blob/ad2b1d3a/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHDirectorySetupHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHDirectorySetupHandler.java b/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHDirectorySetupHandler.java
index b87f99a..b2790c9 100644
--- a/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHDirectorySetupHandler.java
+++ b/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHDirectorySetupHandler.java
@@ -77,12 +77,11 @@ public class GSISSHDirectorySetupHandler extends AbstractRecoverableHandler {
         } else {
             log.info("Successfully retrieved the Security Context");
         }
-        ApplicationDeploymentDescriptionType app = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription().getType();
 
-            String workingDirectory = app.getScratchWorkingDirectory();
+            String workingDirectory = jobExecutionContext.getWorkingDir();
             cluster.makeDirectory(workingDirectory);
-            cluster.makeDirectory(app.getInputDataDirectory());
-            cluster.makeDirectory(app.getOutputDataDirectory());
+            cluster.makeDirectory(jobExecutionContext.getInputDir());
+            cluster.makeDirectory(jobExecutionContext.getOutputDir());
             DataTransferDetails detail = new DataTransferDetails();
             TransferStatus status = new TransferStatus();
             status.setTransferState(TransferState.DIRECTORY_SETUP);

http://git-wip-us.apache.org/repos/asf/airavata/blob/ad2b1d3a/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHInputHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHInputHandler.java b/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHInputHandler.java
index 5665b5b..b882be6 100644
--- a/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHInputHandler.java
+++ b/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHInputHandler.java
@@ -27,17 +27,18 @@ import org.apache.airavata.commons.gfac.type.MappingFactory;
 import org.apache.airavata.gfac.GFacException;
 import org.apache.airavata.gfac.core.context.JobExecutionContext;
 import org.apache.airavata.gfac.core.context.MessageContext;
-import org.apache.airavata.gfac.core.handler.AbstractHandler;
 import org.apache.airavata.gfac.core.handler.AbstractRecoverableHandler;
 import org.apache.airavata.gfac.core.handler.GFacHandlerException;
 import org.apache.airavata.gfac.core.utils.GFacUtils;
 import org.apache.airavata.gfac.gsissh.security.GSISecurityContext;
 import org.apache.airavata.gfac.gsissh.util.GFACGSISSHUtils;
 import org.apache.airavata.gsi.ssh.api.Cluster;
-import org.apache.airavata.gsi.ssh.api.SSHApiException;
-import org.apache.airavata.model.workspace.experiment.*;
+import org.apache.airavata.model.workspace.experiment.CorrectiveAction;
+import org.apache.airavata.model.workspace.experiment.DataTransferDetails;
+import org.apache.airavata.model.workspace.experiment.ErrorCategory;
+import org.apache.airavata.model.workspace.experiment.TransferState;
+import org.apache.airavata.model.workspace.experiment.TransferStatus;
 import org.apache.airavata.registry.cpi.ChildDataType;
-import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
 import org.apache.airavata.schemas.gfac.URIArrayType;
 import org.apache.airavata.schemas.gfac.URIParameterType;
 import org.slf4j.Logger;
@@ -45,7 +46,11 @@ import org.slf4j.LoggerFactory;
 
 import java.io.File;
 import java.io.IOException;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Properties;
+import java.util.Set;
 
 /**
  * Recoverability for this handler assumes the same input values will come in the second
@@ -171,11 +176,10 @@ public class GSISSHInputHandler extends AbstractRecoverableHandler {
     }
 
     private static String stageInputFiles(Cluster cluster, JobExecutionContext jobExecutionContext, String paramValue) throws IOException, GFacException {
-        ApplicationDeploymentDescriptionType app = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription().getType();
         int i = paramValue.lastIndexOf(File.separator);
         String substring = paramValue.substring(i + 1);
         try {
-            String targetFile = app.getInputDataDirectory() + File.separator + substring;
+            String targetFile = jobExecutionContext.getInputDir() + File.separator + substring;
             if (paramValue.startsWith("file")) {
                 paramValue = paramValue.substring(paramValue.indexOf(":") + 1, paramValue.length());
             }

http://git-wip-us.apache.org/repos/asf/airavata/blob/ad2b1d3a/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHOutputHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHOutputHandler.java b/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHOutputHandler.java
index ac9bf3c..a714099 100644
--- a/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHOutputHandler.java
+++ b/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHOutputHandler.java
@@ -27,6 +27,7 @@ import java.util.*;
 import net.schmizz.sshj.connection.ConnectionException;
 import net.schmizz.sshj.transport.TransportException;
 
+import org.apache.aiaravata.application.catalog.data.impl.AppCatalogFactory;
 import org.apache.airavata.common.exception.ApplicationSettingsException;
 import org.apache.airavata.common.utils.Constants;
 import org.apache.airavata.commons.gfac.type.ActualParameter;
@@ -46,6 +47,10 @@ import org.apache.airavata.gfac.gsissh.util.GFACGSISSHUtils;
 import org.apache.airavata.gsi.ssh.api.Cluster;
 import org.apache.airavata.gsi.ssh.api.SSHApiException;
 import org.apache.airavata.gsi.ssh.api.job.JobDescriptor;
+import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol;
+import org.apache.airavata.model.appcatalog.computeresource.MonitorMode;
+import org.apache.airavata.model.appcatalog.computeresource.SSHJobSubmission;
+import org.apache.airavata.model.appcatalog.computeresource.SecurityProtocol;
 import org.apache.airavata.model.messaging.event.TaskIdentifier;
 import org.apache.airavata.model.messaging.event.TaskOutputChangeEvent;
 import org.apache.airavata.model.workspace.experiment.*;
@@ -67,36 +72,6 @@ public class GSISSHOutputHandler extends AbstractRecoverableHandler {
         int oldIndex = 0;
         List<String> oldFiles = new ArrayList<String>();
         StringBuffer data = new StringBuffer("|");
-        if (jobExecutionContext.getApplicationContext().getHostDescription().getType() instanceof GsisshHostType) { // this is because we don't have the right jobexecution context
-            // so attempting to get it from the registry
-            if (Constants.PUSH.equals(((GsisshHostType) jobExecutionContext.getApplicationContext().getHostDescription().getType()).getMonitorMode())) {
-                log.warn("During the out handler chain jobExecution context came null, so trying to handler");
-                ApplicationDescription applicationDeploymentDescription = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription();
-                TaskDetails taskData = null;
-                try {
-                    taskData = (TaskDetails) jobExecutionContext.getRegistry().get(RegistryModelType.TASK_DETAIL, jobExecutionContext.getTaskData().getTaskID());
-                } catch (RegistryException e) {
-                    log.error("Error retrieving job details from Registry");
-                    throw new GFacHandlerException("Error retrieving job details from Registry", e);
-                }
-                JobDetails jobDetails = taskData.getJobDetailsList().get(0);
-                String jobDescription = jobDetails.getJobDescription();
-                if (jobDescription != null) {
-                    JobDescriptor jobDescriptor = null;
-                    try {
-                        jobDescriptor = JobDescriptor.fromXML(jobDescription);
-                    } catch (XmlException e1) {
-                        e1.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
-                    }
-                    applicationDeploymentDescription.getType().setScratchWorkingDirectory(
-                            jobDescriptor.getJobDescriptorDocument().getJobDescriptor().getWorkingDirectory());
-                    applicationDeploymentDescription.getType().setInputDataDirectory(jobDescriptor.getInputDirectory());
-                    applicationDeploymentDescription.getType().setOutputDataDirectory(jobDescriptor.getOutputDirectory());
-                    applicationDeploymentDescription.getType().setStandardError(jobDescriptor.getJobDescriptorDocument().getJobDescriptor().getStandardErrorFile());
-                    applicationDeploymentDescription.getType().setStandardOutput(jobDescriptor.getJobDescriptorDocument().getJobDescriptor().getStandardOutFile());
-                }
-            }
-        }
         try {
             if (jobExecutionContext.getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT) == null) {
 
@@ -114,8 +89,6 @@ public class GSISSHOutputHandler extends AbstractRecoverableHandler {
         DataTransferDetails detail = new DataTransferDetails();
         TransferStatus status = new TransferStatus();
 
-        ApplicationDeploymentDescriptionType app = jobExecutionContext.getApplicationContext()
-                .getApplicationDeploymentDescription().getType();
         Cluster cluster = null;
         
         try {
@@ -174,7 +147,7 @@ public class GSISSHOutputHandler extends AbstractRecoverableHandler {
                 localStdOutFile = new File(outputDataDir + File.separator + timeStampedExperimentID + "stdout");
                 while(stdOutStr.isEmpty()){
                 try {
-                	cluster.scpFrom(app.getStandardOutput(), localStdOutFile.getAbsolutePath());
+                	cluster.scpFrom(jobExecutionContext.getStandardOutput(), localStdOutFile.getAbsolutePath());
                 	stdOutStr = GFacUtils.readFileToString(localStdOutFile.getAbsolutePath());
 				} catch (Exception e) {
 					log.error(e.getLocalizedMessage());
@@ -192,7 +165,7 @@ public class GSISSHOutputHandler extends AbstractRecoverableHandler {
                 data.append(oldFiles.get(index++)).append(",");
             } else {
                 localStdErrFile = new File(outputDataDir + File.separator + timeStampedExperimentID + "stderr");
-                cluster.scpFrom(app.getStandardError(), localStdErrFile.getAbsolutePath());
+                cluster.scpFrom(jobExecutionContext.getStandardError(), localStdErrFile.getAbsolutePath());
                 StringBuffer temp = new StringBuffer(data.append(localStdErrFile.getAbsolutePath()).append(",").toString());
                 GFacUtils.savePluginData(jobExecutionContext, temp.insert(0, ++index), this.getClass().getName());
             }
@@ -219,7 +192,7 @@ public class GSISSHOutputHandler extends AbstractRecoverableHandler {
                     List<String> outputList = null;
                     int retry=3;
                     while(retry>0){
-                    	 outputList = cluster.listDirectory(app.getOutputDataDirectory());
+                    	 outputList = cluster.listDirectory(jobExecutionContext.getOutputDir());
                         if (outputList.size() == 1 && outputList.get(0).isEmpty()) {
                             Thread.sleep(10000);
                         } else if (outputList.size() > 0) {
@@ -229,7 +202,6 @@ public class GSISSHOutputHandler extends AbstractRecoverableHandler {
                         }
                         retry--;
                         if(retry==0){
-//                            log.info("Ohhhhhhh shitttttttOhhhhhhh shitttttttOhhhhhhh shitttttttOhhhhhhh shitttttttOhhhhhhh shitttttttOhhhhhhh shittttttt");
                         }
                     	 Thread.sleep(10000);
                     }
@@ -269,7 +241,7 @@ public class GSISSHOutputHandler extends AbstractRecoverableHandler {
                             outputFile = oldFiles.get(index);
                             data.append(oldFiles.get(index++)).append(",");
                         } else {
-                            cluster.scpFrom(app.getOutputDataDirectory() + File.separator + valueList, outputDataDir);
+                            cluster.scpFrom(jobExecutionContext.getOutputDir() + File.separator + valueList, outputDataDir);
                             outputFile = outputDataDir + File.separator + valueList;
                             jobExecutionContext.addOutputFile(outputFile);
                             StringBuffer temp = new StringBuffer(data.append(outputFile).append(",").toString());
@@ -296,9 +268,10 @@ public class GSISSHOutputHandler extends AbstractRecoverableHandler {
                 );
                 }
             }
-            app.setStandardError(localStdErrFile.getAbsolutePath());
-            app.setStandardOutput(localStdOutFile.getAbsolutePath());
-            app.setOutputDataDirectory(outputDataDir);
+            // Why we set following?
+//            app.setStandardError(localStdErrFile.getAbsolutePath());
+//            app.setStandardOutput(localStdOutFile.getAbsolutePath());
+//            app.setOutputDataDirectory(outputDataDir);
             status.setTransferState(TransferState.DOWNLOAD);
             detail.setTransferStatus(status);
             detail.setTransferDescription(outputDataDir);

http://git-wip-us.apache.org/repos/asf/airavata/blob/ad2b1d3a/tools/gsissh/src/main/java/org/apache/airavata/gsi/ssh/api/job/JobDescriptor.java
----------------------------------------------------------------------
diff --git a/tools/gsissh/src/main/java/org/apache/airavata/gsi/ssh/api/job/JobDescriptor.java b/tools/gsissh/src/main/java/org/apache/airavata/gsi/ssh/api/job/JobDescriptor.java
index 9a0639b..9b7102b 100644
--- a/tools/gsissh/src/main/java/org/apache/airavata/gsi/ssh/api/job/JobDescriptor.java
+++ b/tools/gsissh/src/main/java/org/apache/airavata/gsi/ssh/api/job/JobDescriptor.java
@@ -60,6 +60,13 @@ public class JobDescriptor {
         return this.jobDescriptionDocument;
     }
 
+    /**
+     * With new app catalog thrift object integration, we don't use this
+     * @param xml
+     * @return
+     * @throws XmlException
+     */
+    @Deprecated
     public static JobDescriptor fromXML(String xml)
             throws XmlException {
         JobDescriptorDocument parse = JobDescriptorDocument.Factory


[33/50] [abbrv] airavata git commit: Removed local test class

Posted by ch...@apache.org.
Removed local test class


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/d7f35e18
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/d7f35e18
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/d7f35e18

Branch: refs/heads/master
Commit: d7f35e18d1735e6709b859a1d692f16d144aaedd
Parents: 198de99
Author: shamrath <sh...@gmail.com>
Authored: Thu Nov 6 15:52:45 2014 -0500
Committer: shamrath <sh...@gmail.com>
Committed: Thu Nov 6 15:52:45 2014 -0500

----------------------------------------------------------------------
 .../CreateLaunchExperimentForLocalhost.java     | 269 -------------------
 1 file changed, 269 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/d7f35e18/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperimentForLocalhost.java
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperimentForLocalhost.java b/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperimentForLocalhost.java
deleted file mode 100644
index dcd684c..0000000
--- a/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperimentForLocalhost.java
+++ /dev/null
@@ -1,269 +0,0 @@
-package org.apache.airavata.client.samples;
-
-import org.apache.airavata.api.Airavata;
-import org.apache.airavata.api.client.AiravataClientFactory;
-import org.apache.airavata.client.tools.RegisterSampleApplicationsUtils;
-import org.apache.airavata.model.appcatalog.appdeployment.ApplicationParallelismType;
-import org.apache.airavata.model.appcatalog.appinterface.DataType;
-import org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType;
-import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
-import org.apache.airavata.model.appcatalog.computeresource.ComputeResourceDescription;
-import org.apache.airavata.model.appcatalog.computeresource.LOCALSubmission;
-import org.apache.airavata.model.appcatalog.computeresource.ResourceJobManager;
-import org.apache.airavata.model.appcatalog.computeresource.ResourceJobManagerType;
-import org.apache.airavata.model.appcatalog.gatewayprofile.ComputeResourcePreference;
-import org.apache.airavata.model.appcatalog.gatewayprofile.GatewayResourceProfile;
-import org.apache.airavata.model.error.AiravataClientConnectException;
-import org.apache.airavata.model.error.AiravataClientException;
-import org.apache.thrift.TException;
-
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * Created by shameera on 9/30/14.
- */
-public class CreateLaunchExperimentForLocalhost {
-
-    private static final String THRIFT_SERVER_HOST = "127.0.0.1";
-    private static final int THRIFT_SERVER_PORT = 8930;
-    private static final String DEFAULT_GATEWAY = "Sample";
-
-    private Airavata.Client airavataClient;
-    private String localhostId;
-    private String echoModuleId;
-    private String addModuleId;
-    private String multiplyModuleId;
-    private String subtractModuleId;
-
-    public static void main(String[] args) throws AiravataClientConnectException, TException {
-        CreateLaunchExperimentForLocalhost worker = new CreateLaunchExperimentForLocalhost();
-        worker.register();
-    }
-
-
-    public void register() throws AiravataClientConnectException, TException {
-        airavataClient = AiravataClientFactory.createAiravataClient(THRIFT_SERVER_HOST, THRIFT_SERVER_PORT);
-
-        registerLocalhost();
-//        registerGatewayProfile();
-        registerApplicationModules();
-        registerApplicationDeployments();
-        registerApplicationInterfaces();
-    }
-
-    private void registerGatewayProfile() throws TException {
-        ComputeResourcePreference localhostResourcePreference = RegisterSampleApplicationsUtils.
-             createComputeResourcePreference("localhost", "test", false, null, null, null,
-                "/Users/shameera/work/source/git_airavata/modules/distribution/server/target/apache-airavata-server-0.14-SNAPSHOT/tmp");
-        GatewayResourceProfile gatewayResourceProfile = new GatewayResourceProfile();
-        gatewayResourceProfile.setGatewayID(DEFAULT_GATEWAY);
-        gatewayResourceProfile.setGatewayName(DEFAULT_GATEWAY);
-        gatewayResourceProfile.addToComputeResourcePreferences(localhostResourcePreference);
-        airavataClient.registerGatewayResourceProfile(gatewayResourceProfile);
-    }
-
-    private void registerLocalhost() {
-//        try {
-//            System.out.println("\n #### Registering Localhost Computational Resource #### \n");
-//
-//            ComputeResourceDescription computeResourceDescription = RegisterSampleApplicationsUtils.
-//                    createComputeResourceDescription("localhost", "LocalHost", null, null);
-//            localhostId = airavataClient.registerComputeResource(computeResourceDescription);
-//            ResourceJobManager resourceJobManager = RegisterSampleApplicationsUtils.
-//                    createResourceJobManager(ResourceJobManagerType.FORK, null, null, null);
-//            LOCALSubmission submission = new LOCALSubmission();
-//            submission.setResourceJobManager(resourceJobManager);
-//            boolean localSubmission = airavataClient.addLocalSubmissionDetails(localhostId, 1, submission);
-//            if (!localSubmission) throw new AiravataClientException();
-//            System.out.println("LocalHost Resource Id is " + localhostId);
-//
-//        } catch (TException e) {
-//            e.printStackTrace();
-//        }
-    }
-
-    private void registerApplicationInterfaces() {
-         registerAddApplicationInterface();
-        registerSubtractApplicationInterface();
-        registerMultiplyApplicationInterface();
-        registerEchoInterface();
-    }
-
-    private void registerApplicationDeployments() throws TException {
-        System.out.println("#### Registering Application Deployments on Localhost #### \n");
-        //Register Echo
-        String echoAppDeployId = airavataClient.registerApplicationDeployment(
-                RegisterSampleApplicationsUtils.createApplicationDeployment(echoModuleId, localhostId,
-                        "/Users/shameera/work/tryout/scripts/echo.sh", ApplicationParallelismType.SERIAL, "Echo application description"));
-        System.out.println("Echo on localhost Id " + echoAppDeployId);
-
-        //Register Add application
-        String addAppDeployId = airavataClient.registerApplicationDeployment(
-                RegisterSampleApplicationsUtils.createApplicationDeployment(addModuleId, localhostId,
-                        "/Users/shameera/work/tryout/scripts/add.sh", ApplicationParallelismType.SERIAL, "Add application description"));
-        System.out.println("Add on localhost Id " + addAppDeployId);
-
-        //Register Multiply application
-        String multiplyAppDeployId = airavataClient.registerApplicationDeployment(
-                RegisterSampleApplicationsUtils.createApplicationDeployment(multiplyModuleId, localhostId,
-                        "/Users/shameera/work/tryout/scripts/multiply.sh", ApplicationParallelismType.SERIAL, "Multiply application description"));
-        System.out.println("Echo on localhost Id " + multiplyAppDeployId);
-
-        //Register Subtract application
-        String subtractAppDeployId = airavataClient.registerApplicationDeployment(
-                RegisterSampleApplicationsUtils.createApplicationDeployment(subtractModuleId, localhostId,
-                        "/Users/shameera/work/tryout/scripts/subtract.sh", ApplicationParallelismType.SERIAL, "Subtract application description "));
-        System.out.println("Echo on localhost Id " + subtractAppDeployId);
-    }
-
-    private void registerApplicationModules() throws TException {
-        //Register Echo
-        echoModuleId = airavataClient.registerApplicationModule(
-                RegisterSampleApplicationsUtils.createApplicationModule(
-                        "Echo", "1.0", "Echo application description"));
-        System.out.println("Echo Module Id " + echoModuleId);
-        //Register Echo
-        addModuleId = airavataClient.registerApplicationModule(
-                RegisterSampleApplicationsUtils.createApplicationModule(
-                        "Add", "1.0", "Add application description"));
-        System.out.println("Add Module Id " + addModuleId);
-        //Register Echo
-        multiplyModuleId = airavataClient.registerApplicationModule(
-                RegisterSampleApplicationsUtils.createApplicationModule(
-                        "Multiply", "1.0", "Multiply application description"));
-        System.out.println("Multiply Module Id " + multiplyModuleId);
-        //Register Echo
-        subtractModuleId = airavataClient.registerApplicationModule(
-                RegisterSampleApplicationsUtils.createApplicationModule(
-                        "Subtract", "1.0", "Subtract application description"));
-        System.out.println("Subtract Module Id " + subtractModuleId);
-
-    }
-
-
-    public void registerEchoInterface() {
-        try {
-            System.out.println("#### Registering Echo Interface #### \n");
-
-            List<String> appModules = new ArrayList<String>();
-            appModules.add(echoModuleId);
-
-            InputDataObjectType input1 = RegisterSampleApplicationsUtils.createAppInput("Input_to_Echo", "Hello World",
-                    DataType.STRING, null, false, "A test string to Echo", null);
-
-            List<InputDataObjectType> applicationInputs = new ArrayList<InputDataObjectType>();
-            applicationInputs.add(input1);
-
-            OutputDataObjectType output1 = RegisterSampleApplicationsUtils.createAppOutput("Echoed_Output",
-                    "", DataType.STRING);
-
-            List<OutputDataObjectType> applicationOutputs = new ArrayList<OutputDataObjectType>();
-            applicationOutputs.add(output1);
-
-            String echoInterfaceId = airavataClient.registerApplicationInterface(
-                    RegisterSampleApplicationsUtils.createApplicationInterfaceDescription("Echo", "Echo application description",
-                            appModules, applicationInputs, applicationOutputs));
-            System.out.println("Echo Application Interface Id " + echoInterfaceId);
-
-        } catch (TException e) {
-            e.printStackTrace();
-        }
-    }
-
-    public void registerAddApplicationInterface() {
-        try {
-            System.out.println("#### Registering Add Application Interface #### \n");
-
-            List<String> appModules = new ArrayList<String>();
-            appModules.add(addModuleId);
-
-            InputDataObjectType input1 = RegisterSampleApplicationsUtils.createAppInput("x", "2",
-                    DataType.STRING, null, false, "Add operation input_1", null);
-            InputDataObjectType input2 = RegisterSampleApplicationsUtils.createAppInput("y", "3",
-                    DataType.STRING, null, false, "Add operation input_2", null);
-
-            List<InputDataObjectType> applicationInputs = new ArrayList<InputDataObjectType>();
-            applicationInputs.add(input1);
-            applicationInputs.add(input2);
-
-            OutputDataObjectType output1 = RegisterSampleApplicationsUtils.createAppOutput("Result",
-                    "0", DataType.STRING);
-
-            List<OutputDataObjectType> applicationOutputs = new ArrayList<OutputDataObjectType>();
-            applicationOutputs.add(output1);
-
-            String addApplicationInterfaceId = airavataClient.registerApplicationInterface(
-                    RegisterSampleApplicationsUtils.createApplicationInterfaceDescription("Add", "Add two numbers",
-                            appModules, applicationInputs, applicationOutputs));
-            System.out.println("Add Application Interface Id " + addApplicationInterfaceId);
-
-        } catch (TException e) {
-            e.printStackTrace();
-        }
-    }
-
-    public void registerMultiplyApplicationInterface() {
-        try {
-            System.out.println("#### Registering Multiply Application Interface #### \n");
-
-            List<String> appModules = new ArrayList<String>();
-            appModules.add(multiplyModuleId);
-
-            InputDataObjectType input1 = RegisterSampleApplicationsUtils.createAppInput("x", "4",
-                    DataType.STRING, null, false, "Multiply operation input_1", null);
-            InputDataObjectType input2 = RegisterSampleApplicationsUtils.createAppInput("y", "5",
-                    DataType.STRING, null, false, "Multiply operation input_2", null);
-
-            List<InputDataObjectType> applicationInputs = new ArrayList<InputDataObjectType>();
-            applicationInputs.add(input1);
-            applicationInputs.add(input2);
-
-            OutputDataObjectType output1 = RegisterSampleApplicationsUtils.createAppOutput("Result",
-                    "0", DataType.STRING);
-
-            List<OutputDataObjectType> applicationOutputs = new ArrayList<OutputDataObjectType>();
-            applicationOutputs.add(output1);
-
-            String multiplyApplicationInterfaceId = airavataClient.registerApplicationInterface(
-                    RegisterSampleApplicationsUtils.createApplicationInterfaceDescription("Multiply", "Multiply two numbers",
-                            appModules, applicationInputs, applicationOutputs));
-            System.out.println("Multiply Application Interface Id " + multiplyApplicationInterfaceId);
-
-        } catch (TException e) {
-            e.printStackTrace();
-        }
-    }
-
-    public void registerSubtractApplicationInterface() {
-        try {
-            System.out.println("#### Registering Subtract Application Interface #### \n");
-
-            List<String> appModules = new ArrayList<String>();
-            appModules.add(subtractModuleId);
-
-            InputDataObjectType input1 = RegisterSampleApplicationsUtils.createAppInput("x", "6",
-                    DataType.STRING, null, false, "Subtract operation input_1", null);
-            InputDataObjectType input2 = RegisterSampleApplicationsUtils.createAppInput("y", "7",
-                    DataType.STRING, null, false, "Subtract operation input_2", null);
-
-            List<InputDataObjectType> applicationInputs = new ArrayList<InputDataObjectType>();
-            applicationInputs.add(input1);
-            applicationInputs.add(input2);
-
-            OutputDataObjectType output1 = RegisterSampleApplicationsUtils.createAppOutput("Result",
-                    "0", DataType.STRING);
-
-            List<OutputDataObjectType> applicationOutputs = new ArrayList<OutputDataObjectType>();
-            applicationOutputs.add(output1);
-
-            String subtractApplicationInterfaceId = airavataClient.registerApplicationInterface(
-                    RegisterSampleApplicationsUtils.createApplicationInterfaceDescription("Subtract", "Subtract two numbers",
-                            appModules, applicationInputs, applicationOutputs));
-            System.out.println("Subtract Application Interface Id " + subtractApplicationInterfaceId);
-
-        } catch (TException e) {
-            e.printStackTrace();
-        }
-    }
-}


[20/50] [abbrv] airavata git commit: Integrated appCatalog for ssh and gsi modules, commented out old test classes, need to fix this

Posted by ch...@apache.org.
Integrated appCatalog for ssh and gsi modules, commented out old test classes, need to fix this


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/d94e8c95
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/d94e8c95
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/d94e8c95

Branch: refs/heads/master
Commit: d94e8c955763243f7b36b5151fd0a27aff90e0f6
Parents: 5a28f74
Author: shamrath <sh...@gmail.com>
Authored: Tue Nov 4 12:32:09 2014 -0500
Committer: Chathuri Wimalasena <ka...@gmail.com>
Committed: Wed Nov 5 11:53:12 2014 -0500

----------------------------------------------------------------------
 .../org/apache/airavata/gfac/Scheduler.java     |   6 +-
 .../gfac/core/context/JobExecutionContext.java  |  31 +-
 .../airavata/gfac/core/cpi/BetterGfacImpl.java  |   3 +-
 .../gfac/gram/handler/GridFTPOutputHandler.java |   2 +-
 .../gfac/gsissh/util/GFACGSISSHUtils.java       |  58 +--
 .../impl/GSISSHProviderTestWithMyProxyAuth.java | 465 +++++++++--------
 .../ssh/handler/AdvancedSCPOutputHandler.java   |  16 +
 .../ssh/handler/SSHDirectorySetupHandler.java   |   7 +-
 .../gfac/ssh/handler/SSHInputHandler.java       |   3 +-
 .../gfac/ssh/handler/SSHOutputHandler.java      | 142 +++---
 .../gfac/ssh/provider/impl/SSHProvider.java     |  69 +--
 .../airavata/gfac/ssh/util/GFACSSHUtils.java    | 300 +++++------
 .../services/impl/BigRed2TestWithSSHAuth.java   | 504 +++++++++----------
 .../impl/SSHProviderTestWithSSHAuth.java        | 342 ++++++-------
 14 files changed, 909 insertions(+), 1039 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/d94e8c95/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/Scheduler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/Scheduler.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/Scheduler.java
index 9e642fe..2bd612c 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/Scheduler.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/Scheduler.java
@@ -60,9 +60,9 @@ public class Scheduler {
         jobExecutionContext.setProvider(getProvider(jobExecutionContext));
         // TODO: Selecting the provider based on application description.
         jobExecutionContext.getGFacConfiguration().setInHandlers(jobExecutionContext.getProvider().getClass().getName(),
-                jobExecutionContext.getServiceName());
+                jobExecutionContext.getApplicationName());
         jobExecutionContext.getGFacConfiguration().setOutHandlers(jobExecutionContext.getProvider().getClass().getName(),
-        		 jobExecutionContext.getServiceName());
+        		 jobExecutionContext.getApplicationName());
         jobExecutionContext.getGFacConfiguration().setExecutionMode(getExecutionMode(jobExecutionContext));
     }
 
@@ -72,7 +72,7 @@ public class Scheduler {
      * @return GFacProvider instance.
      */
     private static GFacProvider getProvider(JobExecutionContext jobExecutionContext) throws GFacException {
-        String applicationName = jobExecutionContext.getServiceName();
+        String applicationName = jobExecutionContext.getApplicationName();
 
         URL resource = Scheduler.class.getClassLoader().getResource(org.apache.airavata.common.utils.Constants.GFAC_CONFIG_XML);
         DocumentBuilderFactory docBuilderFactory = DocumentBuilderFactory.newInstance();

http://git-wip-us.apache.org/repos/asf/airavata/blob/d94e8c95/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
index dcae96a..2d1a975 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
@@ -139,7 +139,7 @@ public class JobExecutionContext extends AbstractContext implements Serializable
     // a scientific application(or algorithm) as a service. Service name is there to identify to
     // which service description we should refer during the execution of the current job represented
     // by this context instance.
-    private String serviceName;
+    private String applicationName;
 
     private String experimentID;
 
@@ -166,10 +166,10 @@ public class JobExecutionContext extends AbstractContext implements Serializable
      */
     private Map<String, SecurityContext> securityContext = new HashMap<String, SecurityContext>();
 
-    public JobExecutionContext(GFacConfiguration gFacConfiguration,String serviceName){
+    public JobExecutionContext(GFacConfiguration gFacConfiguration,String applicationName){
         this.gfacConfiguration = gFacConfiguration;
         notifier = new GFacNotifier();
-        setServiceName(serviceName);
+        setApplicationName(applicationName);
         outputFileList = new ArrayList<String>();
     }
 
@@ -238,12 +238,12 @@ public class JobExecutionContext extends AbstractContext implements Serializable
         this.outHandlers = outHandlers;
     }
 
-    public String getServiceName() {
-        return serviceName;
+    public String getApplicationName() {
+        return applicationName;
     }
 
-    public void setServiceName(String serviceName) {
-        this.serviceName = serviceName;
+    public void setApplicationName(String applicationName) {
+        this.applicationName = applicationName;
     }
 
     public GFacNotifier getNotifier() {
@@ -274,15 +274,6 @@ public class JobExecutionContext extends AbstractContext implements Serializable
         this.inPath = false;
     }
 
-//    public ContextHeaderDocument.ContextHeader getContextHeader() {
-//        return contextHeader;
-//    }
-//
-//    public void setContextHeader(ContextHeaderDocument.ContextHeader contextHeader) {
-//        this.contextHeader = contextHeader;
-//    }
-
-	
 	public SecurityContext getSecurityContext(String name) throws GFacException{
 		SecurityContext secContext = securityContext.get(name+"-"+this.getApplicationContext().getHostDescription().getType().getHostAddress());
 		return secContext;
@@ -459,4 +450,12 @@ public class JobExecutionContext extends AbstractContext implements Serializable
     public void setPreferredDataMovementInterface(DataMovementInterface preferredDataMovementInterface) {
         this.preferredDataMovementInterface = preferredDataMovementInterface;
     }
+
+    public String getExecutablePath() {
+        if (applicationContext == null || applicationContext.getApplicationDeploymentDescription() == null) {
+            return null;
+        } else {
+            return applicationContext.getApplicationDeploymentDescription().getExecutablePath();
+        }
+    }
 }

http://git-wip-us.apache.org/repos/asf/airavata/blob/d94e8c95/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
index 656a291..0455f7e 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
@@ -269,7 +269,7 @@ public class BetterGfacImpl implements GFac,Watcher {
         GFacConfiguration gFacConfiguration = GFacConfiguration.create(new File(resource.getPath()), configurationProperties);
 
         // start constructing jobexecutioncontext
-        jobExecutionContext = new JobExecutionContext(gFacConfiguration, applicationInterfaceId);
+        jobExecutionContext = new JobExecutionContext(gFacConfiguration, applicationInterface.getApplicationName());
 
         // setting experiment/task/workflownode related information
         Experiment experiment = (Experiment) registry.get(RegistryModelType.EXPERIMENT, experimentID);
@@ -281,6 +281,7 @@ public class BetterGfacImpl implements GFac,Watcher {
 
 
         List<JobDetails> jobDetailsList = taskData.getJobDetailsList();
+        //FIXME: Following for loop only set last jobDetails element to the jobExecutionContext
         for(JobDetails jDetails:jobDetailsList){
             jobExecutionContext.setJobDetails(jDetails);
         }

http://git-wip-us.apache.org/repos/asf/airavata/blob/d94e8c95/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/handler/GridFTPOutputHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/handler/GridFTPOutputHandler.java b/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/handler/GridFTPOutputHandler.java
index a424da0..7e226ea 100644
--- a/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/handler/GridFTPOutputHandler.java
+++ b/modules/gfac/gfac-gram/src/main/java/org/apache/airavata/gfac/gram/handler/GridFTPOutputHandler.java
@@ -133,7 +133,7 @@ public class GridFTPOutputHandler extends AbstractHandler {
                     }
 
                     String timeStampedServiceName = GFacUtils.createUniqueNameWithDate(jobExecutionContext
-                            .getServiceName());
+                            .getApplicationName());
                     File localStdOutFile = File.createTempFile(timeStampedServiceName, "stdout");
                     localStdErrFile = File.createTempFile(timeStampedServiceName, "stderr");
 

http://git-wip-us.apache.org/repos/asf/airavata/blob/d94e8c95/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/util/GFACGSISSHUtils.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/util/GFACGSISSHUtils.java b/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/util/GFACGSISSHUtils.java
index baca65c..0a521b5 100644
--- a/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/util/GFACGSISSHUtils.java
+++ b/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/util/GFACGSISSHUtils.java
@@ -42,7 +42,6 @@ import org.apache.airavata.gsi.ssh.api.Cluster;
 import org.apache.airavata.gsi.ssh.api.ServerInfo;
 import org.apache.airavata.gsi.ssh.api.job.JobDescriptor;
 import org.apache.airavata.gsi.ssh.api.job.JobManagerConfiguration;
-import org.apache.airavata.gsi.ssh.impl.GSISSHAbstractCluster;
 import org.apache.airavata.gsi.ssh.impl.PBSCluster;
 import org.apache.airavata.gsi.ssh.util.CommonUtils;
 import org.apache.airavata.model.appcatalog.appdeployment.ApplicationDeploymentDescription;
@@ -50,21 +49,13 @@ import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionInterfa
 import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol;
 import org.apache.airavata.model.appcatalog.computeresource.SSHJobSubmission;
 import org.apache.airavata.model.appcatalog.computeresource.SecurityProtocol;
-import org.apache.airavata.model.workspace.experiment.ComputationalResourceScheduling;
-import org.apache.airavata.model.workspace.experiment.TaskDetails;
 import org.apache.airavata.schemas.gfac.FileArrayType;
-import org.apache.airavata.schemas.gfac.HpcApplicationDeploymentType;
 import org.apache.airavata.schemas.gfac.StringArrayType;
 import org.apache.airavata.schemas.gfac.URIArrayType;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Random;
-import java.util.Set;
+import java.util.*;
 
 
 public class GFACGSISSHUtils {
@@ -181,7 +172,7 @@ public class GFACGSISSHUtils {
         jobDescriptor.setCallBackPort(ServerSettings.getSetting(org.apache.airavata.common.utils.Constants.GFAC_SERVER_PORT, "8950"));
         jobDescriptor.setInputDirectory(jobExecutionContext.getInputDir());
         jobDescriptor.setOutputDirectory(jobExecutionContext.getOutputDir());
-        jobDescriptor.setExecutablePath(app.getExecutablePath());
+        jobDescriptor.setExecutablePath(jobExecutionContext.getExecutablePath());
         jobDescriptor.setStandardOutFile(jobExecutionContext.getStandardOutput());
         jobDescriptor.setStandardErrorFile(jobExecutionContext.getStandardError());
         Random random = new Random();
@@ -214,51 +205,6 @@ public class GFACGSISSHUtils {
         }
         jobDescriptor.setInputValues(inputValues);
 
-        // this part will fill out the hpcApplicationDescriptor
-        if (app instanceof HpcApplicationDeploymentType) {
-            HpcApplicationDeploymentType applicationDeploymentType
-                    = (HpcApplicationDeploymentType) app;
-            jobDescriptor.setUserName(((GSISSHAbstractCluster)cluster).getServerInfo().getUserName());
-            jobDescriptor.setShellName("/bin/bash");
-            jobDescriptor.setAllEnvExport(true);
-            jobDescriptor.setMailOptions("n");
-            jobDescriptor.setNodes(applicationDeploymentType.getNodeCount());
-            jobDescriptor.setProcessesPerNode(applicationDeploymentType.getProcessorsPerNode());
-            jobDescriptor.setMaxWallTime(String.valueOf(applicationDeploymentType.getMaxWallTime()));
-            jobDescriptor.setJobSubmitter(applicationDeploymentType.getJobSubmitterCommand());
-            jobDescriptor.setCPUCount(applicationDeploymentType.getCpuCount());
-            if (applicationDeploymentType.getProjectAccount() != null) {
-                if (applicationDeploymentType.getProjectAccount().getProjectAccountNumber() != null) {
-                    jobDescriptor.setAcountString(applicationDeploymentType.getProjectAccount().getProjectAccountNumber());
-                }
-            }
-            if (applicationDeploymentType.getQueue() != null) {
-                if (applicationDeploymentType.getQueue().getQueueName() != null) {
-                    jobDescriptor.setQueueName(applicationDeploymentType.getQueue().getQueueName());
-                }
-            }
-            jobDescriptor.setOwner(((PBSCluster) cluster).getServerInfo().getUserName());
-            TaskDetails taskData = jobExecutionContext.getTaskData();
-            if (taskData != null && taskData.isSetTaskScheduling()) {
-                ComputationalResourceScheduling computionnalResource = taskData.getTaskScheduling();
-                if (computionnalResource.getNodeCount() > 0) {
-                    jobDescriptor.setNodes(computionnalResource.getNodeCount());
-                }
-                if (computionnalResource.getComputationalProjectAccount() != null) {
-                    jobDescriptor.setAcountString(computionnalResource.getComputationalProjectAccount());
-                }
-                if (computionnalResource.getQueueName() != null) {
-                    jobDescriptor.setQueueName(computionnalResource.getQueueName());
-                }
-                if (computionnalResource.getTotalCPUCount() > 0) {
-                    jobDescriptor.setProcessesPerNode(computionnalResource.getTotalCPUCount());
-                }
-                if (computionnalResource.getWallTimeLimit() > 0) {
-                    jobDescriptor.setMaxWallTime(String.valueOf(computionnalResource.getWallTimeLimit()));
-                }
-            }
-
-        }
         return jobDescriptor;
     }
 }

http://git-wip-us.apache.org/repos/asf/airavata/blob/d94e8c95/modules/gfac/gfac-gsissh/src/test/java/org/apache/airavata/core/gfac/services/impl/GSISSHProviderTestWithMyProxyAuth.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gsissh/src/test/java/org/apache/airavata/core/gfac/services/impl/GSISSHProviderTestWithMyProxyAuth.java b/modules/gfac/gfac-gsissh/src/test/java/org/apache/airavata/core/gfac/services/impl/GSISSHProviderTestWithMyProxyAuth.java
index 0774022..630cd5c 100644
--- a/modules/gfac/gfac-gsissh/src/test/java/org/apache/airavata/core/gfac/services/impl/GSISSHProviderTestWithMyProxyAuth.java
+++ b/modules/gfac/gfac-gsissh/src/test/java/org/apache/airavata/core/gfac/services/impl/GSISSHProviderTestWithMyProxyAuth.java
@@ -1,236 +1,229 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.core.gfac.services.impl;
-
-import java.io.File;
-import java.net.URL;
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.List;
-import java.util.UUID;
-
-import org.apache.airavata.commons.gfac.type.ActualParameter;
-import org.apache.airavata.commons.gfac.type.ApplicationDescription;
-import org.apache.airavata.commons.gfac.type.HostDescription;
-import org.apache.airavata.commons.gfac.type.ServiceDescription;
-import org.apache.airavata.gfac.GFacConfiguration;
-import org.apache.airavata.gfac.GFacException;
-import org.apache.airavata.gfac.SecurityContext;
-import org.apache.airavata.gfac.core.context.ApplicationContext;
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.context.MessageContext;
-import org.apache.airavata.gfac.core.cpi.BetterGfacImpl;
-import org.apache.airavata.gfac.gsissh.security.GSISecurityContext;
-import org.apache.airavata.gsi.ssh.api.Cluster;
-import org.apache.airavata.gsi.ssh.api.SSHApiException;
-import org.apache.airavata.gsi.ssh.api.ServerInfo;
-import org.apache.airavata.gsi.ssh.api.authentication.GSIAuthenticationInfo;
-import org.apache.airavata.gsi.ssh.impl.PBSCluster;
-import org.apache.airavata.gsi.ssh.impl.authentication.MyProxyAuthenticationInfo;
-import org.apache.airavata.gsi.ssh.util.CommonUtils;
-import org.apache.airavata.model.workspace.experiment.TaskDetails;
-import org.apache.airavata.persistance.registry.jpa.impl.RegistryFactory;
-import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
-import org.apache.airavata.schemas.gfac.GsisshHostType;
-import org.apache.airavata.schemas.gfac.HpcApplicationDeploymentType;
-import org.apache.airavata.schemas.gfac.InputParameterType;
-import org.apache.airavata.schemas.gfac.JobTypeType;
-import org.apache.airavata.schemas.gfac.OutputParameterType;
-import org.apache.airavata.schemas.gfac.ProjectAccountType;
-import org.apache.airavata.schemas.gfac.QueueType;
-import org.apache.airavata.schemas.gfac.StringParameterType;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.Test;
-
-public class GSISSHProviderTestWithMyProxyAuth {
-    private JobExecutionContext jobExecutionContext;
-
-    //FIXME: move job properties to configuration file
-    private static final String hostAddress = "trestles.sdsc.edu";
-    private static final String hostName = "trestles";
-    private String myProxyUserName;
-    private String myProxyPassword;
-    private String workingDirectory;
-    private String certificateLocation = "/Users/lahirugunathilake/Downloads/certificates";
-
-    @BeforeClass
-    public void setUp() throws Exception {
-//        System.setProperty("myproxy.user", "ogce");
-//        System.setProperty("myproxy.password", "");
-//        System.setProperty("basedir", "/Users/lahirugunathilake/Downloads");
-//        System.setProperty("gsi.working.directory", "/home/ogce");
-//        System.setProperty("gsi.certificate.path", "/Users/lahirugunathilake/Downloads/certificates");
-        certificateLocation = System.getProperty("trusted.cert.location");
-        myProxyUserName = System.getProperty("myproxy.username");
-        myProxyPassword = System.getProperty("myproxy.password");
-        workingDirectory = System.getProperty("gsi.working.directory");
-
-        if (myProxyUserName == null || myProxyPassword == null || certificateLocation == null) {
-            System.out.println(">>>>>> Please run tests with my proxy user name and password. " +
-                    "E.g :- mvn clean install -Dmyproxy.username=xxx -Dmyproxy.password=xxx -Dgsi.working.directory=/path<<<<<<<");
-            throw new Exception("Need my proxy user name password to run tests.");
-        }
-        URL resource = GSISSHProviderTestWithMyProxyAuth.class.getClassLoader().getResource(org.apache.airavata.common.utils.Constants.GFAC_CONFIG_XML);
-        assert resource != null;
-        System.out.println(resource.getFile());
-        GFacConfiguration gFacConfiguration = GFacConfiguration.create(new File(resource.getPath()), null);
-
-//        gFacConfiguration.setMyProxyLifeCycle(3600);
-//        gFacConfiguration.setMyProxyServer("myproxy.teragrid.org");
-//        gFacConfiguration.setMyProxyUser("*****");
-//        gFacConfiguration.setMyProxyPassphrase("*****");
-//        gFacConfiguration.setTrustedCertLocation("./certificates");
-//        //have to set InFlwo Handlers and outFlowHandlers
-//        gFacConfiguration.setInHandlers(Arrays.asList(new String[] {"org.apache.airavata.gfac.handler.GramDirectorySetupHandler","org.apache.airavata.gfac.handler.GridFTPInputHandler"}));
-//        gFacConfiguration.setOutHandlers(Arrays.asList(new String[] {"org.apache.airavata.gfac.handler.GridFTPOutputHandler"}));
-
-        /*
-        * Host
-        */
-        HostDescription host = new HostDescription(GsisshHostType.type);
-        host.getType().setHostAddress(hostAddress);
-        host.getType().setHostName(hostName);
-
-        /*
-        * App
-        */
-        ApplicationDescription appDesc = new ApplicationDescription(HpcApplicationDeploymentType.type);
-        HpcApplicationDeploymentType app = (HpcApplicationDeploymentType) appDesc.getType();
-        ApplicationDeploymentDescriptionType.ApplicationName name = ApplicationDeploymentDescriptionType.ApplicationName.Factory.newInstance();
-        name.setStringValue("EchoLocal");
-        app.setApplicationName(name);
-        ProjectAccountType projectAccountType = app.addNewProjectAccount();
-        projectAccountType.setProjectAccountNumber("sds128");
-
-        QueueType queueType = app.addNewQueue();
-        queueType.setQueueName("normal");
-
-        app.setCpuCount(1);
-        app.setJobType(JobTypeType.SERIAL);
-        app.setNodeCount(1);
-        app.setProcessorsPerNode(1);
-
-        /*
-        * Use bat file if it is compiled on Windows
-        */
-        app.setExecutableLocation("/bin/echo");
-
-        /*
-        * Default tmp location
-        */
-        String tempDir = "/home/ogce/scratch/";
-        String date = (new Date()).toString();
-        date = date.replaceAll(" ", "_");
-        date = date.replaceAll(":", "_");
-
-        tempDir = workingDirectory + File.separator
-                + "SimpleEcho" + "_" + date + "_" + UUID.randomUUID();
-
-        System.out.println(tempDir);
-        app.setScratchWorkingDirectory(tempDir);
-        app.setStaticWorkingDirectory(tempDir);
-        app.setInputDataDirectory(tempDir + File.separator + "inputData");
-        app.setOutputDataDirectory(tempDir + File.separator + "outputData");
-        app.setStandardOutput(tempDir + File.separator + app.getApplicationName().getStringValue() + ".stdout");
-        app.setStandardError(tempDir + File.separator + app.getApplicationName().getStringValue() + ".stderr");
-        app.setMaxWallTime(5);
-        app.setInstalledParentPath("/opt/torque/bin/");
-
-        /*
-        * Service
-        */
-        ServiceDescription serv = new ServiceDescription();
-        serv.getType().setName("SimpleEcho");
-
-        List<InputParameterType> inputList = new ArrayList<InputParameterType>();
-
-        InputParameterType input = InputParameterType.Factory.newInstance();
-        input.setParameterName("echo_input");
-        input.setParameterType(StringParameterType.Factory.newInstance());
-        inputList.add(input);
-
-        InputParameterType[] inputParamList = inputList.toArray(new InputParameterType[inputList
-
-                .size()]);
-        List<OutputParameterType> outputList = new ArrayList<OutputParameterType>();
-        OutputParameterType output = OutputParameterType.Factory.newInstance();
-        output.setParameterName("echo_output");
-        output.setParameterType(StringParameterType.Factory.newInstance());
-        outputList.add(output);
-
-        OutputParameterType[] outputParamList = outputList
-                .toArray(new OutputParameterType[outputList.size()]);
-
-        serv.getType().setInputParametersArray(inputParamList);
-        serv.getType().setOutputParametersArray(outputParamList);
-
-        jobExecutionContext = new JobExecutionContext(gFacConfiguration, serv.getType().getName());
-        // Adding security context
-        jobExecutionContext.addSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT, getSecurityContext(app));
-        ApplicationContext applicationContext = new ApplicationContext();
-        jobExecutionContext.setApplicationContext(applicationContext);
-        applicationContext.setServiceDescription(serv);
-        applicationContext.setApplicationDeploymentDescription(appDesc);
-        applicationContext.setHostDescription(host);
-
-        MessageContext inMessage = new MessageContext();
-        ActualParameter echo_input = new ActualParameter();
-        ((StringParameterType) echo_input.getType()).setValue("echo_output=hello");
-        inMessage.addParameter("echo_input", echo_input);
-
-
-        jobExecutionContext.setInMessageContext(inMessage);
-
-        MessageContext outMessage = new MessageContext();
-        ActualParameter echo_out = new ActualParameter();
-//		((StringParameterType)echo_input.getType()).setValue("echo_output=hello");
-        outMessage.addParameter("echo_output", echo_out);
-        jobExecutionContext.setRegistry(RegistryFactory.getLoggingRegistry());
-        jobExecutionContext.setTaskData(new TaskDetails("11323"));
-        jobExecutionContext.setOutMessageContext(outMessage);
-
-    }
-
-    private SecurityContext getSecurityContext(HpcApplicationDeploymentType app) {
-        GSIAuthenticationInfo authenticationInfo
-                = new MyProxyAuthenticationInfo(myProxyUserName, myProxyPassword, "myproxy.teragrid.org",
-                7512, 17280000, certificateLocation);
-
-        // Server info
-        ServerInfo serverInfo = new ServerInfo("ogce", "trestles.sdsc.edu");
-        Cluster pbsCluster = null;
-        try {
-            pbsCluster = new PBSCluster(serverInfo, authenticationInfo, CommonUtils.getPBSJobManager(app.getInstalledParentPath()));
-        } catch (SSHApiException e) {
-            e.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
-        }
-        GSISecurityContext sshSecurityContext = new GSISecurityContext(pbsCluster);
-        return sshSecurityContext;
-    }
-    @Test
-    public void testGSISSHProvider() throws GFacException {
-        BetterGfacImpl gFacAPI = new BetterGfacImpl();
-        gFacAPI.submitJob(jobExecutionContext.getExperimentID(), jobExecutionContext.getTaskData().getTaskID(), jobExecutionContext.getGatewayID());
-        System.out.println(jobExecutionContext.getJobDetails().getJobDescription());
-        System.out.println(jobExecutionContext.getJobDetails().getJobID());
-    }
-
-}
+///*
+// *
+// * Licensed to the Apache Software Foundation (ASF) under one
+// * or more contributor license agreements.  See the NOTICE file
+// * distributed with this work for additional information
+// * regarding copyright ownership.  The ASF licenses this file
+// * to you under the Apache License, Version 2.0 (the
+// * "License"); you may not use this file except in compliance
+// * with the License.  You may obtain a copy of the License at
+// *
+// *   http://www.apache.org/licenses/LICENSE-2.0
+// *
+// * Unless required by applicable law or agreed to in writing,
+// * software distributed under the License is distributed on an
+// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// * KIND, either express or implied.  See the License for the
+// * specific language governing permissions and limitations
+// * under the License.
+// *
+//*/
+//package org.apache.airavata.core.gfac.services.impl;
+//
+//import java.io.File;
+//import java.net.URL;
+//import java.util.ArrayList;
+//import java.util.Date;
+//import java.util.List;
+//import java.util.UUID;
+//
+//import org.apache.aiaravata.application.catalog.data.model.ApplicationInterface;
+//import org.apache.airavata.commons.gfac.type.ActualParameter;
+//import org.apache.airavata.commons.gfac.type.ApplicationDescription;
+//import org.apache.airavata.commons.gfac.type.HostDescription;
+//import org.apache.airavata.commons.gfac.type.ServiceDescription;
+//import org.apache.airavata.gfac.GFacConfiguration;
+//import org.apache.airavata.gfac.GFacException;
+//import org.apache.airavata.gfac.SecurityContext;
+//import org.apache.airavata.gfac.core.context.ApplicationContext;
+//import org.apache.airavata.gfac.core.context.JobExecutionContext;
+//import org.apache.airavata.gfac.core.context.MessageContext;
+//import org.apache.airavata.gfac.core.cpi.BetterGfacImpl;
+//import org.apache.airavata.gfac.gsissh.security.GSISecurityContext;
+//import org.apache.airavata.gsi.ssh.api.Cluster;
+//import org.apache.airavata.gsi.ssh.api.SSHApiException;
+//import org.apache.airavata.gsi.ssh.api.ServerInfo;
+//import org.apache.airavata.gsi.ssh.api.authentication.GSIAuthenticationInfo;
+//import org.apache.airavata.gsi.ssh.impl.PBSCluster;
+//import org.apache.airavata.gsi.ssh.impl.authentication.MyProxyAuthenticationInfo;
+//import org.apache.airavata.gsi.ssh.util.CommonUtils;
+//import org.apache.airavata.model.appcatalog.appdeployment.ApplicationDeploymentDescription;
+//import org.apache.airavata.model.workspace.experiment.TaskDetails;
+//import org.apache.airavata.persistance.registry.jpa.impl.RegistryFactory;
+//import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
+//import org.apache.airavata.schemas.gfac.GsisshHostType;
+//import org.apache.airavata.schemas.gfac.HpcApplicationDeploymentType;
+//import org.apache.airavata.schemas.gfac.InputParameterType;
+//import org.apache.airavata.schemas.gfac.JobTypeType;
+//import org.apache.airavata.schemas.gfac.OutputParameterType;
+//import org.apache.airavata.schemas.gfac.ProjectAccountType;
+//import org.apache.airavata.schemas.gfac.QueueType;
+//import org.apache.airavata.schemas.gfac.StringParameterType;
+//import org.testng.annotations.BeforeClass;
+//import org.testng.annotations.Test;
+//
+//public class GSISSHProviderTestWithMyProxyAuth {
+//    private JobExecutionContext jobExecutionContext;
+//
+//    //FIXME: move job properties to configuration file
+//    private static final String hostAddress = "trestles.sdsc.edu";
+//    private static final String hostName = "trestles";
+//    private String myProxyUserName;
+//    private String myProxyPassword;
+//    private String workingDirectory;
+//    private String certificateLocation = "/Users/lahirugunathilake/Downloads/certificates";
+//
+//    @BeforeClass
+//    public void setUp() throws Exception {
+////        System.setProperty("myproxy.user", "ogce");
+////        System.setProperty("myproxy.password", "");
+////        System.setProperty("basedir", "/Users/lahirugunathilake/Downloads");
+////        System.setProperty("gsi.working.directory", "/home/ogce");
+////        System.setProperty("gsi.certificate.path", "/Users/lahirugunathilake/Downloads/certificates");
+//        certificateLocation = System.getProperty("trusted.cert.location");
+//        myProxyUserName = System.getProperty("myproxy.username");
+//        myProxyPassword = System.getProperty("myproxy.password");
+//        workingDirectory = System.getProperty("gsi.working.directory");
+//
+//        if (myProxyUserName == null || myProxyPassword == null || certificateLocation == null) {
+//            System.out.println(">>>>>> Please run tests with my proxy user name and password. " +
+//                    "E.g :- mvn clean install -Dmyproxy.username=xxx -Dmyproxy.password=xxx -Dgsi.working.directory=/path<<<<<<<");
+//            throw new Exception("Need my proxy user name password to run tests.");
+//        }
+//        URL resource = GSISSHProviderTestWithMyProxyAuth.class.getClassLoader().getResource(org.apache.airavata.common.utils.Constants.GFAC_CONFIG_XML);
+//        assert resource != null;
+//        System.out.println(resource.getFile());
+//        GFacConfiguration gFacConfiguration = GFacConfiguration.create(new File(resource.getPath()), null);
+//
+//        /*
+//        * Host
+//        */
+//        HostDescription host = new HostDescription(GsisshHostType.type);
+//        host.getType().setHostAddress(hostAddress);
+//        host.getType().setHostName(hostName);
+//
+//        /*
+//        * App
+//        */
+//        ApplicationDescription appDesc = new ApplicationDescription(HpcApplicationDeploymentType.type);
+//        HpcApplicationDeploymentType app = (HpcApplicationDeploymentType) appDesc.getType();
+//        ApplicationDeploymentDescriptionType.ApplicationName name = ApplicationDeploymentDescriptionType.ApplicationName.Factory.newInstance();
+//        name.setStringValue("EchoLocal");
+//        app.setApplicationName(name);
+//        ProjectAccountType projectAccountType = app.addNewProjectAccount();
+//        projectAccountType.setProjectAccountNumber("sds128");
+//
+//        QueueType queueType = app.addNewQueue();
+//        queueType.setQueueName("normal");
+//
+//        app.setCpuCount(1);
+//        app.setJobType(JobTypeType.SERIAL);
+//        app.setNodeCount(1);
+//        app.setProcessorsPerNode(1);
+//
+//        /*
+//        * Use bat file if it is compiled on Windows
+//        */
+//        app.setExecutableLocation("/bin/echo");
+//
+//        /*
+//        * Default tmp location
+//        */
+//        String tempDir = "/home/ogce/scratch/";
+//        String date = (new Date()).toString();
+//        date = date.replaceAll(" ", "_");
+//        date = date.replaceAll(":", "_");
+//
+//        tempDir = workingDirectory + File.separator
+//                + "SimpleEcho" + "_" + date + "_" + UUID.randomUUID();
+//
+//        System.out.println(tempDir);
+//        app.setScratchWorkingDirectory(tempDir);
+//        app.setStaticWorkingDirectory(tempDir);
+//        app.setInputDataDirectory(tempDir + File.separator + "inputData");
+//        app.setOutputDataDirectory(tempDir + File.separator + "outputData");
+//        app.setStandardOutput(tempDir + File.separator + app.getApplicationName().getStringValue() + ".stdout");
+//        app.setStandardError(tempDir + File.separator + app.getApplicationName().getStringValue() + ".stderr");
+//        app.setMaxWallTime(5);
+//        app.setInstalledParentPath("/opt/torque/bin/");
+//
+//        /*
+//        * Service
+//        */
+//        ServiceDescription serv = new ServiceDescription();
+//        serv.getType().setName("SimpleEcho");
+//
+//        List<InputParameterType> inputList = new ArrayList<InputParameterType>();
+//
+//        InputParameterType input = InputParameterType.Factory.newInstance();
+//        input.setParameterName("echo_input");
+//        input.setParameterType(StringParameterType.Factory.newInstance());
+//        inputList.add(input);
+//
+//        InputParameterType[] inputParamList = inputList.toArray(new InputParameterType[inputList
+//
+//                .size()]);
+//        List<OutputParameterType> outputList = new ArrayList<OutputParameterType>();
+//        OutputParameterType output = OutputParameterType.Factory.newInstance();
+//        output.setParameterName("echo_output");
+//        output.setParameterType(StringParameterType.Factory.newInstance());
+//        outputList.add(output);
+//
+//        OutputParameterType[] outputParamList = outputList
+//                .toArray(new OutputParameterType[outputList.size()]);
+//
+//        serv.getType().setInputParametersArray(inputParamList);
+//        serv.getType().setOutputParametersArray(outputParamList);
+//
+//        jobExecutionContext = new JobExecutionContext(gFacConfiguration, serv.getType().getName());
+//        // Adding security context
+//        jobExecutionContext.addSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT, getSecurityContext(app));
+//        ApplicationContext applicationContext = new ApplicationContext();
+//        jobExecutionContext.setApplicationContext(applicationContext);
+//        applicationContext.setServiceDescription(serv);
+//        applicationContext.setApplicationDeploymentDescription(appDesc);
+//        applicationContext.setHostDescription(host);
+//
+//        MessageContext inMessage = new MessageContext();
+//        ActualParameter echo_input = new ActualParameter();
+//        ((StringParameterType) echo_input.getType()).setValue("echo_output=hello");
+//        inMessage.addParameter("echo_input", echo_input);
+//
+//
+//        jobExecutionContext.setInMessageContext(inMessage);
+//
+//        MessageContext outMessage = new MessageContext();
+//        ActualParameter echo_out = new ActualParameter();
+////		((StringParameterType)echo_input.getType()).setValue("echo_output=hello");
+//        outMessage.addParameter("echo_output", echo_out);
+//        jobExecutionContext.setRegistry(RegistryFactory.getLoggingRegistry());
+//        jobExecutionContext.setTaskData(new TaskDetails("11323"));
+//        jobExecutionContext.setOutMessageContext(outMessage);
+//
+//    }
+//
+//    private SecurityContext getSecurityContext(HpcApplicationDeploymentType app) {
+//        GSIAuthenticationInfo authenticationInfo
+//                = new MyProxyAuthenticationInfo(myProxyUserName, myProxyPassword, "myproxy.teragrid.org",
+//                7512, 17280000, certificateLocation);
+//
+//        // Server info
+//        ServerInfo serverInfo = new ServerInfo("ogce", "trestles.sdsc.edu");
+//        Cluster pbsCluster = null;
+//        try {
+//            pbsCluster = new PBSCluster(serverInfo, authenticationInfo, CommonUtils.getPBSJobManager(app.getInstalledParentPath()));
+//        } catch (SSHApiException e) {
+//            e.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
+//        }
+//        GSISecurityContext sshSecurityContext = new GSISecurityContext(pbsCluster);
+//        return sshSecurityContext;
+//    }
+//    @Test
+//    public void testGSISSHProvider() throws GFacException {
+//        BetterGfacImpl gFacAPI = new BetterGfacImpl();
+//        gFacAPI.submitJob(jobExecutionContext.getExperimentID(), jobExecutionContext.getTaskData().getTaskID(), jobExecutionContext.getGatewayID());
+//        System.out.println(jobExecutionContext.getJobDetails().getJobDescription());
+//        System.out.println(jobExecutionContext.getJobDetails().getJobID());
+//    }
+//
+//}

http://git-wip-us.apache.org/repos/asf/airavata/blob/d94e8c95/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPOutputHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPOutputHandler.java b/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPOutputHandler.java
index dfd84de..f508e23 100644
--- a/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPOutputHandler.java
+++ b/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPOutputHandler.java
@@ -114,6 +114,22 @@ public class AdvancedSCPOutputHandler extends AbstractHandler {
                     .getApplicationDeploymentDescription().getType();
             String standardError = app.getStandardError();
             String standardOutput = app.getStandardOutput();
+            if (jobExecutionContext.getSecurityContext(SSHSecurityContext.SSH_SECURITY_CONTEXT) == null) {
+                try {
+                    GFACSSHUtils.addSecurityContext(jobExecutionContext);
+                } catch (ApplicationSettingsException e) {
+                    log.error(e.getMessage());
+                    try {
+         				GFacUtils.saveErrorDetails(jobExecutionContext, e.getLocalizedMessage(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
+         			} catch (GFacException e1) {
+         				 log.error(e1.getLocalizedMessage());
+         			}
+                    throw new GFacHandlerException("Error while creating SSHSecurityContext", e, e.getLocalizedMessage());
+                }
+            }
+            pbsCluster = ((SSHSecurityContext)jobExecutionContext.getSecurityContext(SSHSecurityContext.SSH_SECURITY_CONTEXT)).getPbsCluster();
+            String standardError = jobExecutionContext.getStandardError();
+            String standardOutput = jobExecutionContext.getStandardOutput();
             super.invoke(jobExecutionContext);
             // Server info
             if(jobExecutionContext.getTaskData().getAdvancedOutputDataHandling() != null && jobExecutionContext.getTaskData().getAdvancedOutputDataHandling().getOutputDataDir() != null){

http://git-wip-us.apache.org/repos/asf/airavata/blob/d94e8c95/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHDirectorySetupHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHDirectorySetupHandler.java b/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHDirectorySetupHandler.java
index 0be6820..f7cbcc0 100644
--- a/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHDirectorySetupHandler.java
+++ b/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHDirectorySetupHandler.java
@@ -73,11 +73,10 @@ public class SSHDirectorySetupHandler extends AbstractHandler {
         } else {
             log.info("Successfully retrieved the Security Context");
         }
-        ApplicationDeploymentDescriptionType app = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription().getType();
-            String workingDirectory = app.getScratchWorkingDirectory();
+            String workingDirectory = jobExecutionContext.getWorkingDir();
             cluster.makeDirectory(workingDirectory);
-            cluster.makeDirectory(app.getInputDataDirectory());
-            cluster.makeDirectory(app.getOutputDataDirectory());
+            cluster.makeDirectory(jobExecutionContext.getInputDir());
+            cluster.makeDirectory(jobExecutionContext.getOutputDir());
             DataTransferDetails detail = new DataTransferDetails();
             TransferStatus status = new TransferStatus();
             status.setTransferState(TransferState.DIRECTORY_SETUP);

http://git-wip-us.apache.org/repos/asf/airavata/blob/d94e8c95/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHInputHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHInputHandler.java b/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHInputHandler.java
index b26e035..b0367f3 100644
--- a/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHInputHandler.java
+++ b/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHInputHandler.java
@@ -150,11 +150,10 @@ public class SSHInputHandler extends AbstractHandler {
     }
 
     private static String stageInputFiles(Cluster cluster, JobExecutionContext jobExecutionContext, String paramValue) throws IOException, GFacException {
-        ApplicationDeploymentDescriptionType app = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription().getType();
         int i = paramValue.lastIndexOf(File.separator);
         String substring = paramValue.substring(i + 1);
         try {
-            String targetFile = app.getInputDataDirectory() + File.separator + substring;
+            String targetFile = jobExecutionContext.getInputDir() + File.separator + substring;
             if(paramValue.startsWith("scp:")){
             	paramValue = paramValue.substring(paramValue.indexOf(":") + 1, paramValue.length());
             	cluster.scpThirdParty(paramValue, targetFile);

http://git-wip-us.apache.org/repos/asf/airavata/blob/d94e8c95/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHOutputHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHOutputHandler.java b/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHOutputHandler.java
index 328ad32..d80e92b 100644
--- a/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHOutputHandler.java
+++ b/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHOutputHandler.java
@@ -27,6 +27,8 @@ import java.util.*;
 import net.schmizz.sshj.connection.ConnectionException;
 import net.schmizz.sshj.transport.TransportException;
 
+import org.airavata.appcatalog.cpi.AppCatalog;
+import org.apache.aiaravata.application.catalog.data.impl.AppCatalogFactory;
 import org.apache.airavata.common.exception.ApplicationSettingsException;
 import org.apache.airavata.common.utils.Constants;
 import org.apache.airavata.commons.gfac.type.ActualParameter;
@@ -44,6 +46,9 @@ import org.apache.airavata.gfac.ssh.util.GFACSSHUtils;
 import org.apache.airavata.gsi.ssh.api.Cluster;
 import org.apache.airavata.gsi.ssh.api.SSHApiException;
 import org.apache.airavata.gsi.ssh.api.job.JobDescriptor;
+import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol;
+import org.apache.airavata.model.appcatalog.computeresource.SSHJobSubmission;
+import org.apache.airavata.model.appcatalog.computeresource.SecurityProtocol;
 import org.apache.airavata.model.workspace.experiment.*;
 import org.apache.airavata.registry.cpi.ChildDataType;
 import org.apache.airavata.registry.cpi.RegistryModelType;
@@ -58,38 +63,6 @@ public class SSHOutputHandler extends AbstractHandler {
     private static final Logger log = LoggerFactory.getLogger(SSHOutputHandler.class);
 
     public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-        if (jobExecutionContext.getApplicationContext().getHostDescription().getType() instanceof GsisshHostType) { // this is because we don't have the right jobexecution context
-            // so attempting to get it from the registry
-            if (Constants.PUSH.equals(((GsisshHostType) jobExecutionContext.getApplicationContext().getHostDescription().getType()).getMonitorMode())) { // this is because we don't have the right jobexecution context
-                // so attempting to get it from the registry
-                log.warn("During the out handler chain jobExecution context came null, so trying to handler");
-                ApplicationDescription applicationDeploymentDescription = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription();
-                TaskDetails taskData = null;
-                try {
-                    taskData = (TaskDetails) registry.get(RegistryModelType.TASK_DETAIL, jobExecutionContext.getTaskData().getTaskID());
-                } catch (RegistryException e) {
-                    log.error("Error retrieving job details from Registry");
-                    throw new GFacHandlerException("Error retrieving job details from Registry", e);
-                }
-                JobDetails jobDetails = taskData.getJobDetailsList().get(0);
-                String jobDescription = jobDetails.getJobDescription();
-                if (jobDescription != null) {
-                    JobDescriptor jobDescriptor = null;
-                    try {
-                        jobDescriptor = JobDescriptor.fromXML(jobDescription);
-                    } catch (XmlException e1) {
-                        e1.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
-                    }
-                    applicationDeploymentDescription.getType().setScratchWorkingDirectory(
-                            jobDescriptor.getJobDescriptorDocument().getJobDescriptor().getWorkingDirectory());
-                    applicationDeploymentDescription.getType().setInputDataDirectory(jobDescriptor.getInputDirectory());
-                    applicationDeploymentDescription.getType().setOutputDataDirectory(jobDescriptor.getOutputDirectory());
-                    applicationDeploymentDescription.getType().setStandardError(jobDescriptor.getJobDescriptorDocument().getJobDescriptor().getStandardErrorFile());
-                    applicationDeploymentDescription.getType().setStandardOutput(jobDescriptor.getJobDescriptorDocument().getJobDescriptor().getStandardOutFile());
-                }
-            }
-        }
-
         try {
             if (jobExecutionContext.getSecurityContext(SSHSecurityContext.SSH_SECURITY_CONTEXT) == null) {
 
@@ -98,10 +71,10 @@ public class SSHOutputHandler extends AbstractHandler {
         } catch (Exception e) {
             log.error(e.getMessage());
             try {
- 				GFacUtils.saveErrorDetails(jobExecutionContext, e.getLocalizedMessage(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
- 			} catch (GFacException e1) {
- 				 log.error(e1.getLocalizedMessage());
- 			}
+                GFacUtils.saveErrorDetails(jobExecutionContext, e.getLocalizedMessage(), CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
+            } catch (GFacException e1) {
+                log.error(e1.getLocalizedMessage());
+            }
             throw new GFacHandlerException("Error while creating SSHSecurityContext", e, e.getLocalizedMessage());
         }
 
@@ -109,11 +82,9 @@ public class SSHOutputHandler extends AbstractHandler {
         DataTransferDetails detail = new DataTransferDetails();
         TransferStatus status = new TransferStatus();
 
-        ApplicationDeploymentDescriptionType app = jobExecutionContext.getApplicationContext()
-                .getApplicationDeploymentDescription().getType();
         Cluster cluster = null;
         try {
-             cluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(SSHSecurityContext.SSH_SECURITY_CONTEXT)).getPbsCluster();
+            cluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(SSHSecurityContext.SSH_SECURITY_CONTEXT)).getPbsCluster();
             if (cluster == null) {
                 throw new GFacProviderException("Security context is not set properly");
             } else {
@@ -143,19 +114,19 @@ public class SSHOutputHandler extends AbstractHandler {
 //            cluster.makeDirectory(outputDataDir);
             int i = 0;
             String stdOutStr = "";
-            while(stdOutStr.isEmpty()){ 		
-            try {
-            	cluster.scpFrom(app.getStandardOutput(), localStdOutFile.getAbsolutePath());
-                stdOutStr = GFacUtils.readFileToString(localStdOutFile.getAbsolutePath());
-			} catch (Exception e) {
-				log.error(e.getLocalizedMessage());
-				Thread.sleep(2000);
-			}
-            i++;
-            if(i == 3) break;
+            while (stdOutStr.isEmpty()) {
+                try {
+                    cluster.scpFrom(jobExecutionContext.getStandardOutput(), localStdOutFile.getAbsolutePath());
+                    stdOutStr = GFacUtils.readFileToString(localStdOutFile.getAbsolutePath());
+                } catch (Exception e) {
+                    log.error(e.getLocalizedMessage());
+                    Thread.sleep(2000);
+                }
+                i++;
+                if (i == 3) break;
             }
             Thread.sleep(1000);
-            cluster.scpFrom(app.getStandardError(), localStdErrFile.getAbsolutePath());
+            cluster.scpFrom(jobExecutionContext.getStandardError(), localStdErrFile.getAbsolutePath());
             Thread.sleep(1000);
 
             String stdErrStr = GFacUtils.readFileToString(localStdErrFile.getAbsolutePath());
@@ -177,72 +148,73 @@ public class SSHOutputHandler extends AbstractHandler {
                 ActualParameter actualParameter = (ActualParameter) output.get(paramName);
                 if ("URI".equals(actualParameter.getType().getType().toString())) {
                     List<String> outputList = null;
-                    int retry=3;
-                    while(retry>0){
-                    	 outputList = cluster.listDirectory(app.getOutputDataDirectory());
-                    	 if(outputList.size() > 0){
-                    		 break;
-                    	 }	
-                    	 retry--;
-                    	 Thread.sleep(2000);
+                    int retry = 3;
+                    while (retry > 0) {
+                        outputList = cluster.listDirectory(jobExecutionContext.getOutputDir());
+                        if (outputList.size() > 0) {
+                            break;
+                        }
+                        retry--;
+                        Thread.sleep(2000);
                     }
-                  
+
                     if (outputList.size() == 0 || outputList.get(0).isEmpty() || outputList.size() > 0) {
-                        OutputUtils.fillOutputFromStdout(output, stdOutStr, stdErrStr,outputArray);
+                        OutputUtils.fillOutputFromStdout(output, stdOutStr, stdErrStr, outputArray);
                         Set<String> strings = output.keySet();
                         outputArray.clear();
                         for (String key : strings) {
                             ActualParameter actualParameter1 = (ActualParameter) output.get(key);
                             if ("URI".equals(actualParameter1.getType().getType().toString())) {
-                              	String downloadFile = MappingFactory.toString(actualParameter1);
-                            	cluster.scpFrom(downloadFile, outputDataDir);
-                            	String fileName = downloadFile.substring(downloadFile.lastIndexOf(File.separatorChar)+1, downloadFile.length());
-                            	String localFile = outputDataDir +  File.separator +fileName;
-								jobExecutionContext.addOutputFile(localFile);
-								MappingFactory.fromString(actualParameter1, localFile);
-								DataObjectType dataObjectType = new DataObjectType();
+                                String downloadFile = MappingFactory.toString(actualParameter1);
+                                cluster.scpFrom(downloadFile, outputDataDir);
+                                String fileName = downloadFile.substring(downloadFile.lastIndexOf(File.separatorChar) + 1, downloadFile.length());
+                                String localFile = outputDataDir + File.separator + fileName;
+                                jobExecutionContext.addOutputFile(localFile);
+                                MappingFactory.fromString(actualParameter1, localFile);
+                                DataObjectType dataObjectType = new DataObjectType();
                                 dataObjectType.setValue(localFile);
                                 dataObjectType.setKey(key);
                                 dataObjectType.setType(DataType.URI);
                                 outputArray.add(dataObjectType);
                             }
                         }
-                    
+
                         break;
-                    } else if( outputList.size() == 0) {//FIXME: Ultrascan case
+                    } else if (outputList.size() == 0) {//FIXME: Ultrascan case
                         String valueList = outputList.get(0);
-                        cluster.scpFrom(app.getOutputDataDirectory() + File.separator + valueList, outputDataDir);
+                        cluster.scpFrom(jobExecutionContext.getOutputDir() + File.separator + valueList, outputDataDir);
                         String outputPath = outputDataDir + File.separator + valueList;
-						jobExecutionContext.addOutputFile(outputPath);
-						MappingFactory.fromString(actualParameter, outputPath);
-						DataObjectType dataObjectType = new DataObjectType();
+                        jobExecutionContext.addOutputFile(outputPath);
+                        MappingFactory.fromString(actualParameter, outputPath);
+                        DataObjectType dataObjectType = new DataObjectType();
                         dataObjectType.setValue(outputPath);
                         dataObjectType.setKey(paramName);
                         dataObjectType.setType(DataType.URI);
                         outputArray.add(dataObjectType);
                     }
                 } else {
-                    OutputUtils.fillOutputFromStdout(output, stdOutStr, stdErrStr,outputArray);
+                    OutputUtils.fillOutputFromStdout(output, stdOutStr, stdErrStr, outputArray);
                 }
             }
             if (outputArray == null || outputArray.isEmpty()) {
-            	log.error("Empty Output returned from the Application, Double check the application and ApplicationDescriptor output Parameter Names");
-            	if(jobExecutionContext.getTaskData().getAdvancedOutputDataHandling() == null){
-            		throw new GFacHandlerException(
-                        "Empty Output returned from the Application, Double check the application"
-                                + "and ApplicationDescriptor output Parameter Names");
-            	}
+                log.error("Empty Output returned from the Application, Double check the application and ApplicationDescriptor output Parameter Names");
+                if (jobExecutionContext.getTaskData().getAdvancedOutputDataHandling() == null) {
+                    throw new GFacHandlerException(
+                            "Empty Output returned from the Application, Double check the application"
+                                    + "and ApplicationDescriptor output Parameter Names");
+                }
             }
-            app.setStandardError(localStdErrFile.getAbsolutePath());
-            app.setStandardOutput(localStdOutFile.getAbsolutePath());
-            app.setOutputDataDirectory(outputDataDir);
+            // FIXME: why we set standard error ouput and outputDirectory again ?
+//            app.setStandardError(localStdErrFile.getAbsolutePath());
+//            app.setStandardOutput(localStdOutFile.getAbsolutePath());
+//            app.setOutputDataDirectory(outputDataDir);
             status.setTransferState(TransferState.DOWNLOAD);
             detail.setTransferStatus(status);
             detail.setTransferDescription(outputDataDir);
             registry.add(ChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
             registry.add(ChildDataType.EXPERIMENT_OUTPUT, outputArray, jobExecutionContext.getExperimentID());
-            
-        }catch (Exception e) {
+
+        } catch (Exception e) {
             try {
                 status.setTransferState(TransferState.FAILED);
                 detail.setTransferStatus(status);

http://git-wip-us.apache.org/repos/asf/airavata/blob/d94e8c95/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/provider/impl/SSHProvider.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/provider/impl/SSHProvider.java b/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/provider/impl/SSHProvider.java
index 0527c78..573ddf0 100644
--- a/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/provider/impl/SSHProvider.java
+++ b/modules/gfac/gfac-ssh/src/main/java/org/apache/airavata/gfac/ssh/provider/impl/SSHProvider.java
@@ -51,6 +51,8 @@ import org.apache.airavata.gsi.ssh.api.SSHApiException;
 import org.apache.airavata.gsi.ssh.api.job.JobDescriptor;
 import org.apache.airavata.gsi.ssh.impl.RawCommandInfo;
 import org.apache.airavata.gsi.ssh.impl.StandardOutReader;
+import org.apache.airavata.model.appcatalog.appdeployment.SetEnvPaths;
+import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol;
 import org.apache.airavata.model.workspace.experiment.CorrectiveAction;
 import org.apache.airavata.model.workspace.experiment.ErrorCategory;
 import org.apache.airavata.model.workspace.experiment.JobDetails;
@@ -86,16 +88,16 @@ public class SSHProvider extends AbstractProvider {
         }
         taskID = jobExecutionContext.getTaskData().getTaskID();
 
-        if (!((SSHHostType) jobExecutionContext.getApplicationContext().getHostDescription().getType()).getHpcResource()) {
-            jobID = "SSH_" + jobExecutionContext.getApplicationContext().getHostDescription().getType().getHostAddress() + "_" + Calendar.getInstance().getTimeInMillis();
+        JobSubmissionProtocol preferredJobSubmissionProtocol = jobExecutionContext.getPreferredJobSubmissionProtocol();
+        if (preferredJobSubmissionProtocol == JobSubmissionProtocol.SSH) {
+            jobID = "SSH_" + jobExecutionContext.getHostName() + "_" + Calendar.getInstance().getTimeInMillis();
             cluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(SSHSecurityContext.SSH_SECURITY_CONTEXT)).getPbsCluster();
 
-            ApplicationDeploymentDescriptionType app = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription().getType();
-            String remoteFile = app.getStaticWorkingDirectory() + File.separatorChar + Constants.EXECUTABLE_NAME;
+            String remoteFile = jobExecutionContext.getWorkingDir() + File.separatorChar + Constants.EXECUTABLE_NAME;
             details.setJobID(taskID);
             details.setJobDescription(remoteFile);
             jobExecutionContext.setJobDetails(details);
-            JobDescriptor jobDescriptor = GFACSSHUtils.createJobDescriptor(jobExecutionContext, app, null);
+            JobDescriptor jobDescriptor = GFACSSHUtils.createJobDescriptor(jobExecutionContext, null);
             details.setJobDescription(jobDescriptor.toXML());
 
             GFacUtils.saveJobStatus(jobExecutionContext, details, JobState.SETUP);
@@ -114,16 +116,15 @@ public class SSHProvider extends AbstractProvider {
 
     public void execute(JobExecutionContext jobExecutionContext) throws GFacProviderException {
         if (!hpcType) {
-            ApplicationDeploymentDescriptionType app = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription().getType();
             try {
                 /*
                  * Execute
                  */
-                String execuable = app.getStaticWorkingDirectory() + File.separatorChar + Constants.EXECUTABLE_NAME;
-                details.setJobDescription(execuable);
+                String executable = jobExecutionContext.getWorkingDir() + File.separatorChar + Constants.EXECUTABLE_NAME;
+                details.setJobDescription(executable);
 
 //                GFacUtils.updateJobStatus(details, JobState.SUBMITTED);
-                RawCommandInfo rawCommandInfo = new RawCommandInfo("/bin/chmod 755 " + execuable + "; " + execuable);
+                RawCommandInfo rawCommandInfo = new RawCommandInfo("/bin/chmod 755 " + executable + "; " + executable);
 
                 StandardOutReader jobIDReaderCommandOutput = new StandardOutReader();
 
@@ -139,10 +140,6 @@ public class SSHProvider extends AbstractProvider {
         } else {
             try {
                 jobExecutionContext.getNotifier().publish(new StartExecutionEvent());
-                HostDescriptionType host = jobExecutionContext.getApplicationContext().
-                        getHostDescription().getType();
-                HpcApplicationDeploymentType app = (HpcApplicationDeploymentType) jobExecutionContext.getApplicationContext().
-                        getApplicationDeploymentDescription().getType();
                 JobDetails jobDetails = new JobDetails();
                 try {
                     Cluster cluster = null;
@@ -155,7 +152,7 @@ public class SSHProvider extends AbstractProvider {
                         log.info("Successfully retrieved the Security Context");
                     }
                     // This installed path is a mandetory field, because this could change based on the computing resource
-                    JobDescriptor jobDescriptor = GFACSSHUtils.createJobDescriptor(jobExecutionContext, app, cluster);
+                    JobDescriptor jobDescriptor = GFACSSHUtils.createJobDescriptor(jobExecutionContext, cluster);
                     jobDetails.setJobName(jobDescriptor.getJobName());
                     log.info(jobDescriptor.toXML());
 
@@ -172,14 +169,14 @@ public class SSHProvider extends AbstractProvider {
                     }
                     delegateToMonitorHandlers(jobExecutionContext);
                 } catch (SSHApiException e) {
-                    String error = "Error submitting the job to host " + host.getHostAddress() + " message: " + e.getMessage();
+                    String error = "Error submitting the job to host " + jobExecutionContext.getHostName() + " message: " + e.getMessage();
                     log.error(error);
                     jobDetails.setJobID("none");
                     GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.FAILED);
                     GFacUtils.saveErrorDetails(jobExecutionContext, error, CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
                     throw new GFacProviderException(error, e);
                 } catch (Exception e) {
-                    String error = "Error submitting the job to host " + host.getHostAddress() + " message: " + e.getMessage();
+                    String error = "Error submitting the job to host " + jobExecutionContext.getHostName() + " message: " + e.getMessage();
                     log.error(error);
                     jobDetails.setJobID("none");
                     GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.FAILED);
@@ -199,8 +196,6 @@ public class SSHProvider extends AbstractProvider {
 
     public void cancelJob(JobExecutionContext jobExecutionContext) throws GFacProviderException, GFacException {
         JobDetails jobDetails = jobExecutionContext.getJobDetails();
-        HostDescriptionType host = jobExecutionContext.getApplicationContext().
-                getHostDescription().getType();
         StringBuffer data = new StringBuffer();
         if (!hpcType) {
             throw new NotImplementedException();
@@ -225,14 +220,14 @@ public class SSHProvider extends AbstractProvider {
                 }
                 GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.CANCELED);
             } catch (SSHApiException e) {
-                String error = "Error submitting the job to host " + host.getHostAddress() + " message: " + e.getMessage();
+                String error = "Error submitting the job to host " + jobExecutionContext.getHostName() + " message: " + e.getMessage();
                 log.error(error);
                 jobDetails.setJobID("none");
                 GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.FAILED);
                 GFacUtils.saveErrorDetails(jobExecutionContext, error, CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
                 throw new GFacProviderException(error, e);
             } catch (Exception e) {
-                String error = "Error submitting the job to host " + host.getHostAddress() + " message: " + e.getMessage();
+                String error = "Error submitting the job to host " + jobExecutionContext.getHostName() + " message: " + e.getMessage();
                 log.error(error);
                 jobDetails.setJobID("none");
                 GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.FAILED);
@@ -279,40 +274,28 @@ public class SSHProvider extends AbstractProvider {
         }
     }
     private File createShellScript(JobExecutionContext context) throws IOException {
-        ApplicationDeploymentDescriptionType app = context.getApplicationContext()
-                .getApplicationDeploymentDescription().getType();
-        String uniqueDir = app.getApplicationName().getStringValue() + System.currentTimeMillis()
+        String uniqueDir = jobExecutionContext.getApplicationName() + System.currentTimeMillis()
                 + new Random().nextLong();
 
         File shellScript = File.createTempFile(uniqueDir, "sh");
         OutputStream out = new FileOutputStream(shellScript);
 
         out.write("#!/bin/bash\n".getBytes());
-        out.write(("cd " + app.getStaticWorkingDirectory() + "\n").getBytes());
-        out.write(("export " + Constants.INPUT_DATA_DIR_VAR_NAME + "=" + app.getInputDataDirectory() + "\n").getBytes());
-        out.write(("export " + Constants.OUTPUT_DATA_DIR_VAR_NAME + "=" + app.getOutputDataDirectory() + "\n")
+        out.write(("cd " + jobExecutionContext.getWorkingDir() + "\n").getBytes());
+        out.write(("export " + Constants.INPUT_DATA_DIR_VAR_NAME + "=" + jobExecutionContext.getInputDir() + "\n").getBytes());
+        out.write(("export " + Constants.OUTPUT_DATA_DIR_VAR_NAME + "=" + jobExecutionContext.getOutputDir() + "\n")
                 .getBytes());
         // get the env of the host and the application
-        NameValuePairType[] env = app.getApplicationEnvironmentArray();
-
-        Map<String, String> nv = new HashMap<String, String>();
-        if (env != null) {
-            for (int i = 0; i < env.length; i++) {
-                String key = env[i].getName();
-                String value = env[i].getValue();
-                nv.put(key, value);
-            }
-        }
-        for (Entry<String, String> entry : nv.entrySet()) {
-            log.debug("Env[" + entry.getKey() + "] = " + entry.getValue());
-            out.write(("export " + entry.getKey() + "=" + entry.getValue() + "\n").getBytes());
-
+        List<SetEnvPaths> envPathList = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription().getSetEnvironment();
+        for (SetEnvPaths setEnvPaths : envPathList) {
+            log.debug("Env[" + setEnvPaths.getName() + "] = " + setEnvPaths.getValue());
+            out.write(("export " + setEnvPaths.getName() + "=" + setEnvPaths.getValue() + "\n").getBytes());
         }
 
         // prepare the command
         final String SPACE = " ";
         StringBuffer cmd = new StringBuffer();
-        cmd.append(app.getExecutableLocation());
+        cmd.append(jobExecutionContext.getExecutablePath());
         cmd.append(SPACE);
 
         MessageContext input = context.getInMessageContext();
@@ -338,11 +321,11 @@ public class SSHProvider extends AbstractProvider {
         cmd.append(SPACE);
         cmd.append("1>");
         cmd.append(SPACE);
-        cmd.append(app.getStandardOutput());
+        cmd.append(jobExecutionContext.getStandardOutput());
         cmd.append(SPACE);
         cmd.append("2>");
         cmd.append(SPACE);
-        cmd.append(app.getStandardError());
+        cmd.append(jobExecutionContext.getStandardError());
 
         String cmdStr = cmd.toString();
         log.info("Command = " + cmdStr);


[18/50] [abbrv] airavata git commit: Removed legacy descriptions from MonitorID, GSISSH provider and utils and AMQPMonitor classes

Posted by ch...@apache.org.
Removed legacy descriptions from MonitorID, GSISSH provider and utils and AMQPMonitor classes


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/eb626fa7
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/eb626fa7
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/eb626fa7

Branch: refs/heads/master
Commit: eb626fa754d75bcb6fc328507d9ff8c3bceb4bcf
Parents: 5136157
Author: shamrath <sh...@gmail.com>
Authored: Fri Oct 31 12:25:31 2014 -0400
Committer: Chathuri Wimalasena <ka...@gmail.com>
Committed: Wed Nov 5 11:23:05 2014 -0500

----------------------------------------------------------------------
 .../data/impl/GwyResourceProfileImpl.java       |   8 +-
 .../data/util/AppCatalogThriftConversion.java   |   4 +-
 .../app/catalog/test/GatewayProfileTest.java    |   8 +-
 .../gfac/core/context/JobExecutionContext.java  |   4 +
 .../airavata/gfac/core/cpi/BetterGfacImpl.java  |  33 +++---
 .../airavata/gfac/core/monitor/MonitorID.java   |  19 ++--
 .../gsissh/provider/impl/GSISSHProvider.java    |  64 ++++++-----
 .../gfac/gsissh/util/GFACGSISSHUtils.java       | 108 ++++++++++---------
 .../monitor/impl/push/amqp/AMQPMonitor.java     |  57 +++++-----
 .../apache/airavata/job/AMQPMonitorTest.java    |  64 +++++++----
 10 files changed, 213 insertions(+), 156 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/eb626fa7/modules/app-catalog/app-catalog-data/src/main/java/org/apache/aiaravata/application/catalog/data/impl/GwyResourceProfileImpl.java
----------------------------------------------------------------------
diff --git a/modules/app-catalog/app-catalog-data/src/main/java/org/apache/aiaravata/application/catalog/data/impl/GwyResourceProfileImpl.java b/modules/app-catalog/app-catalog-data/src/main/java/org/apache/aiaravata/application/catalog/data/impl/GwyResourceProfileImpl.java
index ed66bff..101b647 100644
--- a/modules/app-catalog/app-catalog-data/src/main/java/org/apache/aiaravata/application/catalog/data/impl/GwyResourceProfileImpl.java
+++ b/modules/app-catalog/app-catalog-data/src/main/java/org/apache/aiaravata/application/catalog/data/impl/GwyResourceProfileImpl.java
@@ -66,8 +66,8 @@ public class GwyResourceProfileImpl implements GwyResourceProfile {
                     resource.setComputeHostResource((ComputeResourceResource)computeHostResource.get(preference.getComputeResourceId()));
                     resource.setGatewayId(profileResource.getGatewayID());
                     resource.setOverrideByAiravata(preference.isOverridebyAiravata());
-                    resource.setPreferredJobProtocol(preference.getPreferredJobSubmissionProtocol());
-                    resource.setPreferedDMProtocol(preference.getPreferredDataMovementProtocol());
+                    resource.setPreferredJobProtocol(preference.getPreferredJobSubmissionProtocol().toString());
+                    resource.setPreferedDMProtocol(preference.getPreferredDataMovementProtocol().toString());
                     resource.setBatchQueue(preference.getPreferredBatchQueue());
                     resource.setProjectNumber(preference.getAllocationProjectNumber());
                     resource.setScratchLocation(preference.getScratchLocation());
@@ -100,8 +100,8 @@ public class GwyResourceProfileImpl implements GwyResourceProfile {
                     resource.setComputeHostResource((ComputeResourceResource)computeHostResource.get(preference.getComputeResourceId()));
                     resource.setGatewayId(gatewayId);
                     resource.setOverrideByAiravata(preference.isOverridebyAiravata());
-                    resource.setPreferredJobProtocol(preference.getPreferredJobSubmissionProtocol());
-                    resource.setPreferedDMProtocol(preference.getPreferredDataMovementProtocol());
+                    resource.setPreferredJobProtocol(preference.getPreferredJobSubmissionProtocol().toString());
+                    resource.setPreferedDMProtocol(preference.getPreferredDataMovementProtocol().toString());
                     resource.setBatchQueue(preference.getPreferredBatchQueue());
                     resource.setProjectNumber(preference.getAllocationProjectNumber());
                     resource.setScratchLocation(preference.getScratchLocation());

http://git-wip-us.apache.org/repos/asf/airavata/blob/eb626fa7/modules/app-catalog/app-catalog-data/src/main/java/org/apache/aiaravata/application/catalog/data/util/AppCatalogThriftConversion.java
----------------------------------------------------------------------
diff --git a/modules/app-catalog/app-catalog-data/src/main/java/org/apache/aiaravata/application/catalog/data/util/AppCatalogThriftConversion.java b/modules/app-catalog/app-catalog-data/src/main/java/org/apache/aiaravata/application/catalog/data/util/AppCatalogThriftConversion.java
index 14a0ab0..35549f4 100644
--- a/modules/app-catalog/app-catalog-data/src/main/java/org/apache/aiaravata/application/catalog/data/util/AppCatalogThriftConversion.java
+++ b/modules/app-catalog/app-catalog-data/src/main/java/org/apache/aiaravata/application/catalog/data/util/AppCatalogThriftConversion.java
@@ -670,8 +670,8 @@ public class AppCatalogThriftConversion {
         ComputeResourcePreference preference = new ComputeResourcePreference();
         preference.setComputeResourceId(resource.getResourceId());
         preference.setOverridebyAiravata(resource.getOverrideByAiravata());
-        preference.setPreferredJobSubmissionProtocol(resource.getPreferredJobProtocol());
-        preference.setPreferredDataMovementProtocol(resource.getPreferedDMProtocol());
+        preference.setPreferredJobSubmissionProtocol(JobSubmissionProtocol.valueOf(resource.getPreferredJobProtocol()));
+        preference.setPreferredDataMovementProtocol(DataMovementProtocol.valueOf(resource.getPreferedDMProtocol()));
         preference.setPreferredBatchQueue(resource.getBatchQueue());
         preference.setScratchLocation(resource.getScratchLocation());
         preference.setAllocationProjectNumber(resource.getProjectNumber());

http://git-wip-us.apache.org/repos/asf/airavata/blob/eb626fa7/modules/app-catalog/app-catalog-data/src/test/java/org/apache/airavata/app/catalog/test/GatewayProfileTest.java
----------------------------------------------------------------------
diff --git a/modules/app-catalog/app-catalog-data/src/test/java/org/apache/airavata/app/catalog/test/GatewayProfileTest.java b/modules/app-catalog/app-catalog-data/src/test/java/org/apache/airavata/app/catalog/test/GatewayProfileTest.java
index 66eb6bb..3593e11 100644
--- a/modules/app-catalog/app-catalog-data/src/test/java/org/apache/airavata/app/catalog/test/GatewayProfileTest.java
+++ b/modules/app-catalog/app-catalog-data/src/test/java/org/apache/airavata/app/catalog/test/GatewayProfileTest.java
@@ -84,8 +84,8 @@ public class GatewayProfileTest {
         ComputeResourcePreference preference1 = new ComputeResourcePreference();
         preference1.setComputeResourceId(hostId1);
         preference1.setOverridebyAiravata(true);
-        preference1.setPreferredJobSubmissionProtocol(JobSubmissionProtocol.SSH.toString());
-        preference1.setPreferredDataMovementProtocol(DataMovementProtocol.SCP.toString());
+        preference1.setPreferredJobSubmissionProtocol(JobSubmissionProtocol.SSH);
+                preference1.setPreferredDataMovementProtocol(DataMovementProtocol.SCP);
         preference1.setPreferredBatchQueue("queue1");
         preference1.setScratchLocation("/tmp");
         preference1.setAllocationProjectNumber("project1");
@@ -93,8 +93,8 @@ public class GatewayProfileTest {
         ComputeResourcePreference preference2 = new ComputeResourcePreference();
         preference2.setComputeResourceId(hostId2);
         preference2.setOverridebyAiravata(true);
-        preference2.setPreferredJobSubmissionProtocol(JobSubmissionProtocol.LOCAL.toString());
-        preference2.setPreferredDataMovementProtocol(DataMovementProtocol.GridFTP.toString());
+        preference2.setPreferredJobSubmissionProtocol(JobSubmissionProtocol.LOCAL);
+        preference2.setPreferredDataMovementProtocol(DataMovementProtocol.GridFTP);
         preference2.setPreferredBatchQueue("queue2");
         preference2.setScratchLocation("/tmp");
         preference2.setAllocationProjectNumber("project2");

http://git-wip-us.apache.org/repos/asf/airavata/blob/eb626fa7/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
index 3616b42..cade06b 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
@@ -430,4 +430,8 @@ public class JobExecutionContext extends AbstractContext implements Serializable
     public void setPreferredJobSubmissionInterface(JobSubmissionInterface preferredJobSubmissionInterface) {
         this.preferredJobSubmissionInterface = preferredJobSubmissionInterface;
     }
+
+    public String getHostName() {
+        return applicationContext.getComputeResourceDescription().getHostName();
+    }
 }

http://git-wip-us.apache.org/repos/asf/airavata/blob/eb626fa7/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
index 696b61b..e8e4c66 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
@@ -302,6 +302,20 @@ public class BetterGfacImpl implements GFac,Watcher {
         jobExecutionContext.setGfac(this);
         jobExecutionContext.setZk(zk);
         jobExecutionContext.setCredentialStoreToken(AiravataZKUtils.getExpTokenId(zk, experimentID, taskID));
+
+        List<JobSubmissionInterface> jobSubmissionInterfaces = computeResource.getJobSubmissionInterfaces();
+        if (jobSubmissionInterfaces != null && !jobSubmissionInterfaces.isEmpty()){
+            Collections.sort(jobSubmissionInterfaces, new Comparator<JobSubmissionInterface>() {
+                @Override
+                public int compare(JobSubmissionInterface jobSubmissionInterface, JobSubmissionInterface jobSubmissionInterface2) {
+                    return jobSubmissionInterface.getPriorityOrder() - jobSubmissionInterface2.getPriorityOrder();
+                }
+            });
+
+            jobExecutionContext.setHostPrioritizedJobSubmissionInterfaces(jobSubmissionInterfaces);
+        }else {
+            throw new GFacException("Compute resource should have at least one job submission interface defined...");
+        }
         if (gatewayResourcePreferences != null ) {
             if (gatewayResourcePreferences.getScratchLocation() == null) {
                 gatewayResourcePreferences.setScratchLocation("/tmp");
@@ -326,22 +340,11 @@ public class BetterGfacImpl implements GFac,Watcher {
             jobExecutionContext.setStandardError(workingDir + File.separator + applicationInterface.getApplicationName().replaceAll("\\s+", "") + ".stderr");
 
             jobExecutionContext.setPreferredJobSubmissionProtocol(gatewayResourcePreferences.getPreferredJobSubmissionProtocol());
+            if (gatewayResourcePreferences.getPreferredJobSubmissionProtocol() == null) {
+                jobExecutionContext.setPreferredJobSubmissionInterface(jobExecutionContext.getHostPrioritizedJobSubmissionInterfaces().get(0));
+                jobExecutionContext.setPreferredJobSubmissionProtocol(jobExecutionContext.getPreferredJobSubmissionInterface().getJobSubmissionProtocol());
+            }
         }
-
-        List<JobSubmissionInterface> jobSubmissionInterfaces = computeResource.getJobSubmissionInterfaces();
-        if (jobSubmissionInterfaces != null && !jobSubmissionInterfaces.isEmpty()){
-            Collections.sort(jobSubmissionInterfaces, new Comparator<JobSubmissionInterface>() {
-                @Override
-                public int compare(JobSubmissionInterface jobSubmissionInterface, JobSubmissionInterface jobSubmissionInterface2) {
-                    return jobSubmissionInterface.getPriorityOrder() - jobSubmissionInterface2.getPriorityOrder();
-                }
-            });
-
-            jobExecutionContext.setHostPrioritizedJobSubmissionInterfaces(jobSubmissionInterfaces);
-        }else {
-            throw new GFacException("Compute resource should have at least one job submission interface defined...");
-        }
-
         return jobExecutionContext;
     }
 

http://git-wip-us.apache.org/repos/asf/airavata/blob/eb626fa7/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/monitor/MonitorID.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/monitor/MonitorID.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/monitor/MonitorID.java
index 6ea1839..55da288 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/monitor/MonitorID.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/monitor/MonitorID.java
@@ -22,7 +22,6 @@ package org.apache.airavata.gfac.core.monitor;
 
 import org.apache.airavata.common.logger.AiravataLogger;
 import org.apache.airavata.common.logger.AiravataLoggerFactory;
-import org.apache.airavata.commons.gfac.type.HostDescription;
 import org.apache.airavata.gfac.core.context.JobExecutionContext;
 import org.apache.airavata.model.appcatalog.computeresource.ComputeResourceDescription;
 import org.apache.airavata.model.workspace.experiment.JobState;
@@ -44,7 +43,7 @@ public class MonitorID {
 
     private Timestamp lastMonitored;
 
-    private HostDescription host;
+    private ComputeResourceDescription computeResourceDescription;
 
     private Map<String, Object> parameters;
 
@@ -67,7 +66,7 @@ public class MonitorID {
     public MonitorID() {
     }
     public MonitorID(MonitorID monitorID){
-        this.host = monitorID.getHost();
+        this.computeResourceDescription = monitorID.getComputeResourceDescription();
         this.jobStartedTime = new Timestamp((new Date()).getTime());
         this.userName = monitorID.getUserName();
         this.jobID = monitorID.getJobID();
@@ -76,8 +75,8 @@ public class MonitorID {
         this.workflowNodeID = monitorID.getWorkflowNodeID();
         this.jobName = monitorID.getJobName();
     }
-    public MonitorID(HostDescription host, String jobID, String taskID, String workflowNodeID, String experimentID, String userName,String jobName) {
-        this.host = host;
+    public MonitorID(ComputeResourceDescription computeResourceDescription, String jobID, String taskID, String workflowNodeID, String experimentID, String userName,String jobName) {
+        this.computeResourceDescription = computeResourceDescription;
         this.jobStartedTime = new Timestamp((new Date()).getTime());
         this.userName = userName;
         this.jobID = jobID;
@@ -89,7 +88,7 @@ public class MonitorID {
 
     public MonitorID(JobExecutionContext jobExecutionContext) {
         this.jobExecutionContext = jobExecutionContext;
-        host = jobExecutionContext.getApplicationContext().getHostDescription();
+        this.computeResourceDescription = jobExecutionContext.getApplicationContext().getComputeResourceDescription();
         userName = jobExecutionContext.getExperiment().getUserName();
         taskID = jobExecutionContext.getTaskData().getTaskID();
         experimentID = jobExecutionContext.getExperiment().getExperimentID();
@@ -102,12 +101,12 @@ public class MonitorID {
         }
     }
 
-    public HostDescription getHost() {
-        return host;
+    public ComputeResourceDescription getComputeResourceDescription() {
+        return computeResourceDescription;
     }
 
-    public void setHost(HostDescription host) {
-        this.host = host;
+    public void setComputeResourceDescription(ComputeResourceDescription computeResourceDescription) {
+        this.computeResourceDescription = computeResourceDescription;
     }
 
     public Timestamp getLastMonitored() {

http://git-wip-us.apache.org/repos/asf/airavata/blob/eb626fa7/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/provider/impl/GSISSHProvider.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/provider/impl/GSISSHProvider.java b/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/provider/impl/GSISSHProvider.java
index b5a325a..92a50e4 100644
--- a/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/provider/impl/GSISSHProvider.java
+++ b/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/provider/impl/GSISSHProvider.java
@@ -20,6 +20,9 @@
 */
 package org.apache.airavata.gfac.gsissh.provider.impl;
 
+import org.airavata.appcatalog.cpi.AppCatalog;
+import org.airavata.appcatalog.cpi.AppCatalogException;
+import org.apache.aiaravata.application.catalog.data.impl.AppCatalogFactory;
 import org.apache.airavata.common.exception.ApplicationSettingsException;
 import org.apache.airavata.gfac.ExecutionMode;
 import org.apache.airavata.gfac.GFacException;
@@ -36,11 +39,16 @@ import org.apache.airavata.gfac.gsissh.util.GFACGSISSHUtils;
 import org.apache.airavata.gsi.ssh.api.Cluster;
 import org.apache.airavata.gsi.ssh.api.SSHApiException;
 import org.apache.airavata.gsi.ssh.api.job.JobDescriptor;
+import org.apache.airavata.model.appcatalog.appdeployment.ApplicationDeploymentDescription;
+import org.apache.airavata.model.appcatalog.computeresource.ComputeResourceDescription;
+import org.apache.airavata.model.appcatalog.computeresource.MonitorMode;
+import org.apache.airavata.model.appcatalog.computeresource.ResourceJobManager;
+import org.apache.airavata.model.appcatalog.computeresource.SSHJobSubmission;
 import org.apache.airavata.model.workspace.experiment.CorrectiveAction;
 import org.apache.airavata.model.workspace.experiment.ErrorCategory;
 import org.apache.airavata.model.workspace.experiment.JobDetails;
 import org.apache.airavata.model.workspace.experiment.JobState;
-import org.apache.airavata.schemas.gfac.GsisshHostType;
+//import org.apache.airavata.schemas.gfac.GsisshHostType;
 import org.apache.airavata.schemas.gfac.HostDescriptionType;
 import org.apache.airavata.schemas.gfac.HpcApplicationDeploymentType;
 import org.apache.zookeeper.KeeperException;
@@ -48,6 +56,7 @@ import org.apache.zookeeper.ZooKeeper;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import javax.management.monitor.Monitor;
 import java.util.List;
 import java.util.Map;
 
@@ -76,14 +85,18 @@ public class GSISSHProvider extends AbstractRecoverableProvider {
         log.info("Invoking GSISSH Provider Invoke ...");
         StringBuffer data = new StringBuffer();
         jobExecutionContext.getNotifier().publish(new StartExecutionEvent());
-        HostDescriptionType host = jobExecutionContext.getApplicationContext().
-                getHostDescription().getType();
-        HpcApplicationDeploymentType app = (HpcApplicationDeploymentType) jobExecutionContext.getApplicationContext().
-                getApplicationDeploymentDescription().getType();
+        ComputeResourceDescription computeResourceDescription = jobExecutionContext.getApplicationContext()
+                .getComputeResourceDescription();
+        ApplicationDeploymentDescription appDeployDesc = jobExecutionContext.getApplicationContext()
+                .getApplicationDeploymentDescription();
         JobDetails jobDetails = new JobDetails();
         Cluster cluster = null;
-        
+
         try {
+            AppCatalog appCatalog = AppCatalogFactory.getAppCatalog();
+            SSHJobSubmission sshJobSubmission = appCatalog.getComputeResource().getSSHJobSubmission(
+                    jobExecutionContext.getPreferredJobSubmissionInterface().getJobSubmissionInterfaceId());
+
             if (jobExecutionContext.getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT) != null) {
                 cluster = ((GSISecurityContext) jobExecutionContext.getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT)).getPbsCluster();
             }
@@ -93,7 +106,7 @@ public class GSISSHProvider extends AbstractRecoverableProvider {
                 log.info("Successfully retrieved the Security Context");
             }
             // This installed path is a mandetory field, because this could change based on the computing resource
-            JobDescriptor jobDescriptor = GFACGSISSHUtils.createJobDescriptor(jobExecutionContext, app, cluster);
+            JobDescriptor jobDescriptor = GFACGSISSHUtils.createJobDescriptor(jobExecutionContext, cluster);
             jobDetails.setJobName(jobDescriptor.getJobName());
 
             log.info(jobDescriptor.toXML());
@@ -113,10 +126,10 @@ public class GSISSHProvider extends AbstractRecoverableProvider {
 
             // Now job has submitted to the resource, its up to the Provider to parse the information to daemon handler
             // to perform monitoring, daemon handlers can be accessed from anywhere
-            delegateToMonitorHandlers(jobExecutionContext, (GsisshHostType) host, jobDetails.getJobID());
+            delegateToMonitorHandlers(jobExecutionContext, sshJobSubmission , jobDetails.getJobID());
             // we know this host is type GsiSSHHostType
         } catch (Exception e) {
-		    String error = "Error submitting the job to host " + host.getHostAddress() + " message: " + e.getMessage();
+		    String error = "Error submitting the job to host " + computeResourceDescription.getHostName() + " message: " + e.getMessage();
             log.error(error);
             jobDetails.setJobID("none");
             GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.FAILED);
@@ -130,18 +143,18 @@ public class GSISSHProvider extends AbstractRecoverableProvider {
           
     }
 
-    public void delegateToMonitorHandlers(JobExecutionContext jobExecutionContext, GsisshHostType host, String jobID) throws GFacHandlerException {
+    public void delegateToMonitorHandlers(JobExecutionContext jobExecutionContext, SSHJobSubmission sshJobSubmission, String jobID) throws GFacHandlerException {
         List<ThreadedHandler> daemonHandlers = BetterGfacImpl.getDaemonHandlers();
         if (daemonHandlers == null) {
             daemonHandlers = BetterGfacImpl.getDaemonHandlers();
         }
         ThreadedHandler pullMonitorHandler = null;
         ThreadedHandler pushMonitorHandler = null;
-        String monitorMode = host.getMonitorMode();
+        MonitorMode monitorMode = sshJobSubmission.getMonitorMode();
         for (ThreadedHandler threadedHandler : daemonHandlers) {
             if ("org.apache.airavata.gfac.monitor.handlers.GridPullMonitorHandler".equals(threadedHandler.getClass().getName())) {
                 pullMonitorHandler = threadedHandler;
-                if ("".equals(monitorMode) || monitorMode == null || org.apache.airavata.common.utils.Constants.PULL.equals(monitorMode)) {
+                if (monitorMode == null || monitorMode == MonitorMode.POLL_JOB_MANAGER) {
                     log.info("Job is launched successfully now parsing it to monitoring in pull mode, JobID Returned:  " + jobID);
                     pullMonitorHandler.invoke(jobExecutionContext);
                 } else {
@@ -150,7 +163,7 @@ public class GSISSHProvider extends AbstractRecoverableProvider {
                 }
             } else if ("org.apache.airavata.gfac.monitor.handlers.GridPushMonitorHandler".equals(threadedHandler.getClass().getName())) {
                 pushMonitorHandler = threadedHandler;
-                if ("".equals(monitorMode) || monitorMode == null || org.apache.airavata.common.utils.Constants.PUSH.equals(monitorMode)) {
+                if (monitorMode == null || monitorMode == MonitorMode.XSEDE_AMQP_SUBSCRIBE) {
                     log.info("Job is launched successfully now parsing it to monitoring in push mode, JobID Returned:  " + jobID);
                     pushMonitorHandler.invoke(jobExecutionContext);
                 } else {
@@ -166,18 +179,18 @@ public class GSISSHProvider extends AbstractRecoverableProvider {
         }
     }
 
-    public void removeFromMonitorHandlers(JobExecutionContext jobExecutionContext, GsisshHostType host, String jobID) throws GFacHandlerException {
+    public void removeFromMonitorHandlers(JobExecutionContext jobExecutionContext, SSHJobSubmission sshJobSubmission, String jobID) throws GFacHandlerException {
         List<ThreadedHandler> daemonHandlers = BetterGfacImpl.getDaemonHandlers();
         if (daemonHandlers == null) {
             daemonHandlers = BetterGfacImpl.getDaemonHandlers();
         }
         ThreadedHandler pullMonitorHandler = null;
         ThreadedHandler pushMonitorHandler = null;
-        String monitorMode = host.getMonitorMode();
+        MonitorMode monitorMode = sshJobSubmission.getMonitorMode();
         for (ThreadedHandler threadedHandler : daemonHandlers) {
             if ("org.apache.airavata.gfac.monitor.handlers.GridPullMonitorHandler".equals(threadedHandler.getClass().getName())) {
                 pullMonitorHandler = threadedHandler;
-                if ("".equals(monitorMode) || monitorMode == null || org.apache.airavata.common.utils.Constants.PULL.equals(monitorMode)) {
+                if (monitorMode == null || monitorMode == MonitorMode.POLL_JOB_MANAGER) {
                     jobExecutionContext.setProperty("cancel","true");
                     pullMonitorHandler.invoke(jobExecutionContext);
                 } else {
@@ -186,7 +199,7 @@ public class GSISSHProvider extends AbstractRecoverableProvider {
                 }
             } else if ("org.apache.airavata.gfac.monitor.handlers.GridPushMonitorHandler".equals(threadedHandler.getClass().getName())) {
                 pushMonitorHandler = threadedHandler;
-                if ("".equals(monitorMode) || monitorMode == null || org.apache.airavata.common.utils.Constants.PUSH.equals(monitorMode)) {
+                if ( monitorMode == null || monitorMode == MonitorMode.XSEDE_AMQP_SUBSCRIBE) {
                     pushMonitorHandler.invoke(jobExecutionContext);
                 } else {
                     log.error("Currently we only support Pull and Push monitoring and monitorMode should be PUSH" +
@@ -208,8 +221,6 @@ public class GSISSHProvider extends AbstractRecoverableProvider {
     public void cancelJob(JobExecutionContext jobExecutionContext) throws GFacProviderException,GFacException {
         //To change body of implemented methods use File | Settings | File Templates.
         log.info("canceling the job status in GSISSHProvider!!!!!");
-        HostDescriptionType host = jobExecutionContext.getApplicationContext().
-                getHostDescription().getType();
         JobDetails jobDetails = jobExecutionContext.getJobDetails();
         try {
             Cluster cluster = null;
@@ -236,14 +247,14 @@ public class GSISSHProvider extends AbstractRecoverableProvider {
             GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.CANCELED);
             // we know this host is type GsiSSHHostType
         } catch (SSHApiException e) {
-            String error = "Error submitting the job to host " + host.getHostAddress() + " message: " + e.getMessage();
+            String error = "Error submitting the job to host " + jobExecutionContext.getHostName() + " message: " + e.getMessage();
             log.error(error);
             jobDetails.setJobID("none");
             GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.FAILED);
             GFacUtils.saveErrorDetails(jobExecutionContext, error, CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
             throw new GFacProviderException(error, e);
         } catch (Exception e) {
-            String error = "Error submitting the job to host " + host.getHostAddress() + " message: " + e.getMessage();
+            String error = "Error submitting the job to host " + jobExecutionContext.getHostName() + " message: " + e.getMessage();
             log.error(error);
             jobDetails.setJobID("none");
             GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.FAILED);
@@ -255,8 +266,8 @@ public class GSISSHProvider extends AbstractRecoverableProvider {
     public void recover(JobExecutionContext jobExecutionContext) throws GFacProviderException,GFacException {
         // have to implement the logic to recover a gfac failure
         log.info("Invoking Recovering for the Experiment: " + jobExecutionContext.getExperimentID());
-        HostDescriptionType host = jobExecutionContext.getApplicationContext().
-                getHostDescription().getType();
+        ComputeResourceDescription computeResourceDescription = jobExecutionContext.getApplicationContext()
+                .getComputeResourceDescription();
         String jobId = "";
         String jobDesc = "";
         try {
@@ -306,8 +317,11 @@ public class GSISSHProvider extends AbstractRecoverableProvider {
                     throw new GFacHandlerException("Error while creating SSHSecurityContext", e, e.getLocalizedMessage());
                 }
             }
-            delegateToMonitorHandlers(jobExecutionContext, (GsisshHostType) host, jobId);
-        } catch (GFacHandlerException e) {
+            AppCatalog appCatalog = AppCatalogFactory.getAppCatalog();
+            SSHJobSubmission sshJobSubmission = appCatalog.getComputeResource().getSSHJobSubmission(
+                    jobExecutionContext.getPreferredJobSubmissionInterface().getJobSubmissionInterfaceId());
+            delegateToMonitorHandlers(jobExecutionContext, sshJobSubmission, jobId);
+        } catch (Exception e) {
             throw new GFacProviderException("Error delegating already ran job to Monitoring", e);
         }
     }

http://git-wip-us.apache.org/repos/asf/airavata/blob/eb626fa7/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/util/GFACGSISSHUtils.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/util/GFACGSISSHUtils.java b/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/util/GFACGSISSHUtils.java
index 2f9dbc3..baca65c 100644
--- a/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/util/GFACGSISSHUtils.java
+++ b/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/util/GFACGSISSHUtils.java
@@ -20,21 +20,19 @@
 */
 package org.apache.airavata.gfac.gsissh.util;
 
-import java.sql.SQLException;
-import java.util.*;
-
+import org.airavata.appcatalog.cpi.AppCatalog;
+import org.apache.aiaravata.application.catalog.data.impl.AppCatalogFactory;
 import org.apache.airavata.common.exception.ApplicationSettingsException;
 import org.apache.airavata.common.utils.ServerSettings;
 import org.apache.airavata.common.utils.StringUtil;
 import org.apache.airavata.commons.gfac.type.ActualParameter;
-import org.apache.airavata.commons.gfac.type.HostDescription;
 import org.apache.airavata.commons.gfac.type.MappingFactory;
-import org.apache.airavata.credential.store.credential.Credential;
 import org.apache.airavata.credential.store.credential.impl.certificate.CertificateCredential;
 import org.apache.airavata.credential.store.store.CredentialReader;
 import org.apache.airavata.gfac.Constants;
 import org.apache.airavata.gfac.GFacException;
 import org.apache.airavata.gfac.RequestData;
+import org.apache.airavata.gfac.core.context.ApplicationContext;
 import org.apache.airavata.gfac.core.context.JobExecutionContext;
 import org.apache.airavata.gfac.core.context.MessageContext;
 import org.apache.airavata.gfac.core.utils.GFacUtils;
@@ -47,22 +45,26 @@ import org.apache.airavata.gsi.ssh.api.job.JobManagerConfiguration;
 import org.apache.airavata.gsi.ssh.impl.GSISSHAbstractCluster;
 import org.apache.airavata.gsi.ssh.impl.PBSCluster;
 import org.apache.airavata.gsi.ssh.util.CommonUtils;
+import org.apache.airavata.model.appcatalog.appdeployment.ApplicationDeploymentDescription;
+import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionInterface;
+import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol;
+import org.apache.airavata.model.appcatalog.computeresource.SSHJobSubmission;
+import org.apache.airavata.model.appcatalog.computeresource.SecurityProtocol;
 import org.apache.airavata.model.workspace.experiment.ComputationalResourceScheduling;
 import org.apache.airavata.model.workspace.experiment.TaskDetails;
-import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
 import org.apache.airavata.schemas.gfac.FileArrayType;
-import org.apache.airavata.schemas.gfac.GlobusHostType;
-import org.apache.airavata.schemas.gfac.GsisshHostType;
 import org.apache.airavata.schemas.gfac.HpcApplicationDeploymentType;
-import org.apache.airavata.schemas.gfac.SSHHostType;
 import org.apache.airavata.schemas.gfac.StringArrayType;
 import org.apache.airavata.schemas.gfac.URIArrayType;
-import org.apache.airavata.schemas.gfac.UnicoreHostType;
-import org.apache.openjpa.lib.log.Log;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import javax.validation.constraints.Max;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+import java.util.Set;
 
 
 public class GFACGSISSHUtils {
@@ -74,32 +76,35 @@ public class GFACGSISSHUtils {
     public static int maxClusterCount = 5;
     public static Map<String, List<Cluster>> clusters = new HashMap<String, List<Cluster>>();
     public static void addSecurityContext(JobExecutionContext jobExecutionContext) throws GFacException, ApplicationSettingsException {
-        HostDescription registeredHost = jobExecutionContext.getApplicationContext().getHostDescription();
-        if (registeredHost.getType() instanceof GlobusHostType || registeredHost.getType() instanceof UnicoreHostType
-                || registeredHost.getType() instanceof SSHHostType) {
-            logger.error("This is a wrong method to invoke to non ssh host types,please check your gfac-config.xml");
-        } else if (registeredHost.getType() instanceof GsisshHostType) {
-            String credentialStoreToken = jobExecutionContext.getCredentialStoreToken(); // this is set by the framework
-            RequestData requestData = new RequestData(ServerSettings.getDefaultUserGateway());
-            requestData.setTokenId(credentialStoreToken);
-            PBSCluster pbsCluster = null;
-            GSISecurityContext context = null;
-            try {
+        JobSubmissionInterface jobSubmissionInterface = jobExecutionContext.getPreferredJobSubmissionInterface();
+        JobSubmissionProtocol jobProtocol = jobSubmissionInterface.getJobSubmissionProtocol();
+        try {
+            AppCatalog appCatalog = AppCatalogFactory.getAppCatalog();
+            SSHJobSubmission sshJobSubmission = appCatalog.getComputeResource().getSSHJobSubmission(jobSubmissionInterface.getJobSubmissionInterfaceId());
+            if (jobProtocol == JobSubmissionProtocol.GLOBUS || jobProtocol == JobSubmissionProtocol.UNICORE
+                    || jobProtocol == JobSubmissionProtocol.CLOUD || jobProtocol == JobSubmissionProtocol.LOCAL) {
+                logger.error("This is a wrong method to invoke to non ssh host types,please check your gfac-config.xml");
+            } else if (jobProtocol == JobSubmissionProtocol.SSH && sshJobSubmission.getSecurityProtocol() == SecurityProtocol.GSI) {
+                String credentialStoreToken = jobExecutionContext.getCredentialStoreToken(); // this is set by the framework
+                RequestData requestData = new RequestData(ServerSettings.getDefaultUserGateway());
+                requestData.setTokenId(credentialStoreToken);
+                PBSCluster pbsCluster = null;
+                GSISecurityContext context = null;
+
                 TokenizedMyProxyAuthInfo tokenizedMyProxyAuthInfo = new TokenizedMyProxyAuthInfo(requestData);
                 CredentialReader credentialReader = GFacUtils.getCredentialReader();
-                if(credentialReader != null){
-                	CertificateCredential credential = null;
-					try {
-						credential = (CertificateCredential)credentialReader.getCredential(ServerSettings.getDefaultUserGateway(), credentialStoreToken);
-			      		requestData.setMyProxyUserName(credential.getCommunityUser().getUserName());
-					} catch (Exception e) {
-						logger.error(e.getLocalizedMessage());
-					}
+                if (credentialReader != null) {
+                    CertificateCredential credential = null;
+                    try {
+                        credential = (CertificateCredential) credentialReader.getCredential(ServerSettings.getDefaultUserGateway(), credentialStoreToken);
+                        requestData.setMyProxyUserName(credential.getCommunityUser().getUserName());
+                    } catch (Exception e) {
+                        logger.error(e.getLocalizedMessage());
+                    }
                 }
 
-                GsisshHostType gsisshHostType = (GsisshHostType) registeredHost.getType();
-                String key = requestData.getMyProxyUserName() + registeredHost.getType().getHostAddress() +
-                        gsisshHostType.getPort();
+                String key = requestData.getMyProxyUserName() + jobExecutionContext.getHostName()+
+                        sshJobSubmission.getSshPort();
                 boolean recreate = false;
                 synchronized (clusters) {
                     if (clusters.containsKey(key) && clusters.get(key).size() < maxClusterCount) {
@@ -112,7 +117,7 @@ public class GFACGSISSHUtils {
                             clusters.get(key).remove(i);
                             recreate = true;
                         }
-                        if(!recreate) {
+                        if (!recreate) {
                             try {
                                 pbsCluster.listDirectory("~/"); // its hard to trust isConnected method, so we try to connect if it works we are good,else we recreate
                             } catch (Exception e) {
@@ -129,13 +134,12 @@ public class GFACGSISSHUtils {
                     }
 
                     if (recreate) {
-                        ServerInfo serverInfo = new ServerInfo(requestData.getMyProxyUserName(), registeredHost.getType().getHostAddress(),
-                                gsisshHostType.getPort());
+                        ServerInfo serverInfo = new ServerInfo(requestData.getMyProxyUserName(), jobExecutionContext.getHostName(),
+                                sshJobSubmission.getSshPort());
 
                         JobManagerConfiguration jConfig = null;
-                        String installedParentPath = ((HpcApplicationDeploymentType)
-                                jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription().getType()).getInstalledParentPath();
-                        String jobManager = ((GsisshHostType) registeredHost.getType()).getJobManager();
+                        String installedParentPath = sshJobSubmission.getResourceJobManager().getJobManagerBinPath();
+                        String jobManager = sshJobSubmission.getResourceJobManager().getResourceJobManagerType().toString();
                         if (jobManager == null) {
                             logger.error("No Job Manager is configured, so we are picking pbs as the default job manager");
                             jConfig = CommonUtils.getPBSJobManager(installedParentPath);
@@ -160,28 +164,30 @@ public class GFACGSISSHUtils {
                         clusters.put(key, pbsClusters);
                     }
                 }
-            } catch (Exception e) {
-                throw new GFacException("An error occurred while creating GSI security context", e);
+
+                jobExecutionContext.addSecurityContext(Constants.GSI_SECURITY_CONTEXT, context);
             }
-            jobExecutionContext.addSecurityContext(Constants.GSI_SECURITY_CONTEXT+"-"+registeredHost.getType().getHostAddress(), context);
+        } catch (Exception e) {
+            throw new GFacException("An error occurred while creating GSI security context", e);
         }
     }
 
-    public static JobDescriptor createJobDescriptor(JobExecutionContext jobExecutionContext,
-                                                    ApplicationDeploymentDescriptionType app, Cluster cluster) {
+    public static JobDescriptor createJobDescriptor(JobExecutionContext jobExecutionContext, Cluster cluster) {
         JobDescriptor jobDescriptor = new JobDescriptor();
+        ApplicationContext applicationContext = jobExecutionContext.getApplicationContext();
+        ApplicationDeploymentDescription app = applicationContext.getApplicationDeploymentDescription();
         // this is common for any application descriptor
         jobDescriptor.setCallBackIp(ServerSettings.getIp());
         jobDescriptor.setCallBackPort(ServerSettings.getSetting(org.apache.airavata.common.utils.Constants.GFAC_SERVER_PORT, "8950"));
-        jobDescriptor.setInputDirectory(app.getInputDataDirectory());
-        jobDescriptor.setOutputDirectory(app.getOutputDataDirectory());
-        jobDescriptor.setExecutablePath(app.getExecutableLocation());
-        jobDescriptor.setStandardOutFile(app.getStandardOutput());
-        jobDescriptor.setStandardErrorFile(app.getStandardError());
+        jobDescriptor.setInputDirectory(jobExecutionContext.getInputDir());
+        jobDescriptor.setOutputDirectory(jobExecutionContext.getOutputDir());
+        jobDescriptor.setExecutablePath(app.getExecutablePath());
+        jobDescriptor.setStandardOutFile(jobExecutionContext.getStandardOutput());
+        jobDescriptor.setStandardErrorFile(jobExecutionContext.getStandardError());
         Random random = new Random();
         int i = random.nextInt(Integer.MAX_VALUE); // We always set the job name
         jobDescriptor.setJobName("A" + String.valueOf(i+99999999));
-        jobDescriptor.setWorkingDirectory(app.getStaticWorkingDirectory());
+        jobDescriptor.setWorkingDirectory(jobExecutionContext.getWorkingDir());
 
         List<String> inputValues = new ArrayList<String>();
         MessageContext input = jobExecutionContext.getInMessageContext();

http://git-wip-us.apache.org/repos/asf/airavata/blob/eb626fa7/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/impl/push/amqp/AMQPMonitor.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/impl/push/amqp/AMQPMonitor.java b/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/impl/push/amqp/AMQPMonitor.java
index baab7b4..28d13f2 100644
--- a/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/impl/push/amqp/AMQPMonitor.java
+++ b/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/impl/push/amqp/AMQPMonitor.java
@@ -30,12 +30,12 @@ import java.util.concurrent.BlockingQueue;
 
 import org.apache.airavata.common.utils.MonitorPublisher;
 import org.apache.airavata.common.utils.ServerSettings;
-import org.apache.airavata.commons.gfac.type.HostDescription;
 import org.apache.airavata.gfac.core.monitor.MonitorID;
 import org.apache.airavata.gfac.monitor.core.PushMonitor;
 import org.apache.airavata.gfac.monitor.exception.AiravataMonitorException;
 import org.apache.airavata.gfac.monitor.util.AMQPConnectionUtil;
 import org.apache.airavata.gfac.monitor.util.CommonUtils;
+import org.apache.airavata.model.appcatalog.computeresource.ComputeResourceDescription;
 import org.apache.airavata.model.messaging.event.JobIdentifier;
 import org.apache.airavata.model.messaging.event.JobStatusChangeEvent;
 import org.apache.airavata.model.workspace.experiment.JobState;
@@ -107,30 +107,37 @@ public class AMQPMonitor extends PushMonitor {
     @Override
     public boolean registerListener(MonitorID monitorID) throws AiravataMonitorException {
         // we subscribe to read user-host based subscription
-        HostDescription host = monitorID.getHost();
-        String hostAddress = host.getType().getHostAddress();
-        // in amqp case there are no multiple jobs per each host, because once a job is put in to the queue it
-        // will be picked by the Monitor, so jobs will not stay in this queueu but jobs will stay in finishQueue
-        String channelID = CommonUtils.getChannelID(monitorID);
-        if(availableChannels.get(channelID) == null){
-        try {
-            //todo need to fix this rather getting it from a file
-            Connection connection = AMQPConnectionUtil.connect(amqpHosts, connectionName, proxyPath);
-            Channel channel = null;
-            channel = connection.createChannel();
-            availableChannels.put(channelID, channel);
-            String queueName = channel.queueDeclare().getQueue();
-
-            BasicConsumer consumer = new
-                    BasicConsumer(new JSONMessageParser(), localPublisher);          // here we use local publisher
-            channel.basicConsume(queueName, true, consumer);
-            String filterString = CommonUtils.getRoutingKey(monitorID.getUserName(), hostAddress);
-            // here we queuebind to a particular user in a particular machine
-            channel.queueBind(queueName, "glue2.computing_activity", filterString);
-            logger.info("Using filtering string to monitor: " + filterString);
-        } catch (IOException e) {
-            logger.error("Error creating the connection to finishQueue the job:" + monitorID.getUserName());
-        }
+        ComputeResourceDescription computeResourceDescription = monitorID.getComputeResourceDescription();
+        if (computeResourceDescription.isSetIpAddresses() && computeResourceDescription.getIpAddresses().size() > 0) {
+            // we get first ip address for the moment
+            String hostAddress = computeResourceDescription.getIpAddresses().get(0);
+            // in amqp case there are no multiple jobs per each host, because once a job is put in to the queue it
+            // will be picked by the Monitor, so jobs will not stay in this queueu but jobs will stay in finishQueue
+            String channelID = CommonUtils.getChannelID(monitorID);
+            if (availableChannels.get(channelID) == null) {
+                try {
+                    //todo need to fix this rather getting it from a file
+                    Connection connection = AMQPConnectionUtil.connect(amqpHosts, connectionName, proxyPath);
+                    Channel channel = null;
+                    channel = connection.createChannel();
+                    availableChannels.put(channelID, channel);
+                    String queueName = channel.queueDeclare().getQueue();
+
+                    BasicConsumer consumer = new
+                            BasicConsumer(new JSONMessageParser(), localPublisher);          // here we use local publisher
+                    channel.basicConsume(queueName, true, consumer);
+                    String filterString = CommonUtils.getRoutingKey(monitorID.getUserName(), hostAddress);
+                    // here we queuebind to a particular user in a particular machine
+                    channel.queueBind(queueName, "glue2.computing_activity", filterString);
+                    logger.info("Using filtering string to monitor: " + filterString);
+                } catch (IOException e) {
+                    logger.error("Error creating the connection to finishQueue the job:" + monitorID.getUserName());
+                }
+            }
+        } else {
+            throw new AiravataMonitorException("Couldn't register monitor for jobId :" + monitorID.getJobID() +
+                    " , ComputeResourceDescription " + computeResourceDescription.getHostName() + " doesn't has an " +
+                    "IpAddress with it");
         }
         return true;
     }

http://git-wip-us.apache.org/repos/asf/airavata/blob/eb626fa7/modules/gfac/gfac-monitor/src/test/java/org/apache/airavata/job/AMQPMonitorTest.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-monitor/src/test/java/org/apache/airavata/job/AMQPMonitorTest.java b/modules/gfac/gfac-monitor/src/test/java/org/apache/airavata/job/AMQPMonitorTest.java
index 94528b9..a979890 100644
--- a/modules/gfac/gfac-monitor/src/test/java/org/apache/airavata/job/AMQPMonitorTest.java
+++ b/modules/gfac/gfac-monitor/src/test/java/org/apache/airavata/job/AMQPMonitorTest.java
@@ -20,15 +20,11 @@
 */
 package org.apache.airavata.job;
 
-import java.io.File;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import java.util.concurrent.BlockingQueue;
-import java.util.concurrent.LinkedBlockingQueue;
-
+import com.google.common.eventbus.EventBus;
+import com.google.common.eventbus.Subscribe;
+import org.airavata.appcatalog.cpi.AppCatalog;
+import org.apache.aiaravata.application.catalog.data.impl.AppCatalogFactory;
 import org.apache.airavata.common.utils.MonitorPublisher;
-import org.apache.airavata.commons.gfac.type.HostDescription;
 import org.apache.airavata.gfac.core.monitor.MonitorID;
 import org.apache.airavata.gfac.monitor.impl.push.amqp.AMQPMonitor;
 import org.apache.airavata.gsi.ssh.api.Cluster;
@@ -38,14 +34,29 @@ import org.apache.airavata.gsi.ssh.api.authentication.GSIAuthenticationInfo;
 import org.apache.airavata.gsi.ssh.api.job.JobDescriptor;
 import org.apache.airavata.gsi.ssh.impl.PBSCluster;
 import org.apache.airavata.gsi.ssh.impl.authentication.MyProxyAuthenticationInfo;
+import org.apache.airavata.model.appcatalog.computeresource.ComputeResourceDescription;
+import org.apache.airavata.model.appcatalog.computeresource.DataMovementInterface;
+import org.apache.airavata.model.appcatalog.computeresource.DataMovementProtocol;
+import org.apache.airavata.model.appcatalog.computeresource.JobManagerCommand;
+import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionInterface;
+import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol;
+import org.apache.airavata.model.appcatalog.computeresource.ResourceJobManager;
+import org.apache.airavata.model.appcatalog.computeresource.ResourceJobManagerType;
+import org.apache.airavata.model.appcatalog.computeresource.SSHJobSubmission;
+import org.apache.airavata.model.appcatalog.computeresource.SecurityProtocol;
 import org.apache.airavata.model.messaging.event.JobStatusChangeEvent;
-import org.apache.airavata.schemas.gfac.GsisshHostType;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
-import com.google.common.eventbus.EventBus;
-import com.google.common.eventbus.Subscribe;
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.LinkedBlockingQueue;
 
 public class AMQPMonitorTest {
 
@@ -54,12 +65,13 @@ public class AMQPMonitorTest {
     private String certificateLocation;
     private String pbsFilePath;
     private String workingDirectory;
-    private HostDescription hostDescription;
     private MonitorPublisher monitorPublisher;
     private BlockingQueue<MonitorID> finishQueue;
     private BlockingQueue<MonitorID> pushQueue;
     private Thread pushThread;
     private String proxyFilePath;
+    private ComputeResourceDescription computeResourceDescription;
+
     @Before
     public void setUp() throws Exception {
         System.setProperty("myproxy.username", "ogce");
@@ -98,14 +110,26 @@ public class AMQPMonitorTest {
         } catch (Exception e) {
             e.printStackTrace();
         }
+        computeResourceDescription = new ComputeResourceDescription("TestComputerResoruceId", "TestHostName");
+        computeResourceDescription.setHostName("stampede-host");
+        computeResourceDescription.addToIpAddresses("login1.stampede.tacc.utexas.edu");
+        ResourceJobManager resourceJobManager = new ResourceJobManager("1234", ResourceJobManagerType.SLURM);
+        Map<JobManagerCommand, String> commandMap = new HashMap<JobManagerCommand, String>();
+        commandMap.put(JobManagerCommand.SUBMISSION, "test");
+        resourceJobManager.setJobManagerCommands(commandMap);
+        resourceJobManager.setJobManagerBinPath("/usr/bin/");
+        resourceJobManager.setPushMonitoringEndpoint("push"); // TODO - add monitor mode
+        SSHJobSubmission sshJobSubmission = new SSHJobSubmission("TestSSHJobSubmissionInterfaceId", SecurityProtocol.GSI,
+                resourceJobManager);
+
+        AppCatalog appCatalog = AppCatalogFactory.getAppCatalog();
+        String jobSubmissionID = appCatalog.getComputeResource().addSSHJobSubmission(sshJobSubmission);
+
+        JobSubmissionInterface jobSubmissionInterface = new JobSubmissionInterface(jobSubmissionID, JobSubmissionProtocol.SSH, 1);
+
+        computeResourceDescription.addToJobSubmissionInterfaces(jobSubmissionInterface);
+        computeResourceDescription.addToDataMovementInterfaces(new DataMovementInterface("4532", DataMovementProtocol.SCP, 1));
 
-        hostDescription = new HostDescription(GsisshHostType.type);
-        hostDescription.getType().setHostAddress("login1.stampede.tacc.utexas.edu");
-        hostDescription.getType().setHostName("stampede-host");
-        ((GsisshHostType) hostDescription.getType()).setJobManager("slurm");
-        ((GsisshHostType) hostDescription.getType()).setInstalledPath("/usr/bin/");
-        ((GsisshHostType) hostDescription.getType()).setPort(2222);
-        ((GsisshHostType) hostDescription.getType()).setMonitorMode("push");
     }
 
     @Test
@@ -151,7 +175,7 @@ public class AMQPMonitorTest {
         String jobID = pbsCluster.submitBatchJob(jobDescriptor);
         System.out.println(jobID);
         try {
-            pushQueue.add(new MonitorID(hostDescription, jobID,null,null,null, "ogce", jobName));
+            pushQueue.add(new MonitorID(computeResourceDescription, jobID,null,null,null, "ogce", jobName));
         } catch (Exception e) {
             e.printStackTrace();
         }


[21/50] [abbrv] airavata git commit: adding EC2 provider changes

Posted by ch...@apache.org.
adding EC2 provider changes


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/d856d246
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/d856d246
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/d856d246

Branch: refs/heads/master
Commit: d856d246e2651c5034bc6c125a14551a01bdf40c
Parents: 3e584f8
Author: chathuriw <ka...@gmail.com>
Authored: Wed Nov 5 10:14:50 2014 -0500
Committer: Chathuri Wimalasena <ka...@gmail.com>
Committed: Wed Nov 5 13:10:24 2014 -0500

----------------------------------------------------------------------
 .../airavata/gfac/core/utils/GFacUtils.java     |  15 +-
 .../apache/airavata/gfac/ec2/EC2Provider.java   |  46 ++-
 .../airavata/gfac/ec2/EC2ProviderTest.java      | 366 ++++++++++---------
 3 files changed, 232 insertions(+), 195 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/d856d246/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/GFacUtils.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/GFacUtils.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/GFacUtils.java
index b38808b..6fb2115 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/GFacUtils.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/GFacUtils.java
@@ -39,10 +39,7 @@ import org.apache.airavata.gfac.core.context.JobExecutionContext;
 import org.apache.airavata.gfac.core.handler.GFacHandlerException;
 import org.apache.airavata.gfac.core.states.GfacExperimentState;
 import org.apache.airavata.gfac.core.states.GfacPluginState;
-import org.apache.airavata.model.appcatalog.computeresource.GlobusJobSubmission;
-import org.apache.airavata.model.appcatalog.computeresource.LOCALSubmission;
-import org.apache.airavata.model.appcatalog.computeresource.SSHJobSubmission;
-import org.apache.airavata.model.appcatalog.computeresource.UnicoreJobSubmission;
+import org.apache.airavata.model.appcatalog.computeresource.*;
 import org.apache.airavata.model.workspace.experiment.*;
 import org.apache.airavata.model.workspace.experiment.DataType;
 import org.apache.airavata.persistance.registry.jpa.impl.RegistryFactory;
@@ -1289,5 +1286,15 @@ public class GFacUtils {
         }
     }
 
+    public static CloudJobSubmission getCloudJobSubmission (String submissionId) throws AppCatalogException{
+        try {
+            AppCatalog appCatalog = AppCatalogFactory.getAppCatalog();
+            return appCatalog.getComputeResource().getCloudJobSubmission(submissionId);
+        }catch (Exception e){
+            String errorMsg = "Error while retrieving SSH job submission with submission id : " + submissionId;
+            log.error(errorMsg, e);
+            throw new AppCatalogException(errorMsg, e);
+        }
+    }
 
 }

http://git-wip-us.apache.org/repos/asf/airavata/blob/d856d246/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/EC2Provider.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/EC2Provider.java b/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/EC2Provider.java
index 5c5af53..53e0f93 100644
--- a/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/EC2Provider.java
+++ b/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/EC2Provider.java
@@ -28,6 +28,7 @@ import java.util.Calendar;
 import java.util.List;
 import java.util.Map;
 
+import org.airavata.appcatalog.cpi.AppCatalogException;
 import org.apache.airavata.commons.gfac.type.ActualParameter;
 import org.apache.airavata.commons.gfac.type.ApplicationDescription;
 import org.apache.airavata.gfac.GFacException;
@@ -39,6 +40,10 @@ import org.apache.airavata.gfac.core.utils.GFacUtils;
 import org.apache.airavata.gfac.ec2.util.AmazonEC2Util;
 import org.apache.airavata.gfac.ec2.util.EC2ProviderUtil;
 import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
+import org.apache.airavata.model.appcatalog.computeresource.CloudJobSubmission;
+import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionInterface;
+import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol;
+import org.apache.airavata.model.appcatalog.computeresource.ProviderName;
 import org.apache.airavata.model.workspace.experiment.JobState;
 import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
 import org.apache.airavata.schemas.gfac.Ec2ApplicationDeploymentType;
@@ -221,9 +226,8 @@ public class EC2Provider extends AbstractProvider {
                 /* Assuming that there is just a single result. If you want to add more results, update the necessary
                    logic below */
                 String paramName = outparamType.getName();
-                outParam.getType().changeType(StringParameterType.type);
-                ((StringParameterType) outParam.getType()).setValue(executionResult);
-                jobExecutionContext.getOutMessageContext().addParameter(paramName, outParam);
+                String value = outparamType.getValue();
+                jobExecutionContext.getOutMessageContext().addParameter(paramName, value);
             }
             GFacUtils.saveJobStatus(jobExecutionContext, details, JobState.COMPLETE);
         } catch (InvalidSshKeyException e) {
@@ -252,26 +256,28 @@ public class EC2Provider extends AbstractProvider {
      * @throws GFacProviderException GFacProviderException
      */
     private String createShellCmd(JobExecutionContext jobExecutionContext) throws GFacProviderException {
-        String command = "";
-        ApplicationDescription appDesc = jobExecutionContext.getApplicationContext().
-                getApplicationDeploymentDescription();
-
-        if(appDesc.getType() instanceof Ec2ApplicationDeploymentType) {
-            Ec2ApplicationDeploymentType type = (Ec2ApplicationDeploymentType) appDesc.getType();
-            if(type.getExecutable() != null) {
-                command = type.getExecutableType() + " " + type.getExecutable();
+        try {
+            String command = "";
+            JobSubmissionInterface submissionInterface = jobExecutionContext.getPreferredJobSubmissionInterface();
+            CloudJobSubmission cloudJobSubmission = GFacUtils.getCloudJobSubmission(submissionInterface.getJobSubmissionInterfaceId());
+            String executablePath = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription().getExecutablePath();
+            if (cloudJobSubmission.getProviderName().equals(ProviderName.EC2)) {
+                if (cloudJobSubmission.getExecutableType() != null) {
+                    command = cloudJobSubmission.getExecutableType() + " " + executablePath;
+                } else {
+                    command = "sh" + " " + executablePath;
+                }
+                command = setCmdParams(jobExecutionContext, command);
+
             } else {
-                command = "sh" + " " + type.getExecutable();
+                command = "sh" + " " + executablePath;
+                command = setCmdParams(jobExecutionContext, command);
             }
-            command = setCmdParams(jobExecutionContext, command);
-
-        } else {
-            ApplicationDeploymentDescriptionType type = appDesc.getType();
-            command = "sh" + " " + type.getExecutableLocation();
-            command = setCmdParams(jobExecutionContext, command);
+            return command + '\n';
+        } catch (AppCatalogException e) {
+            log.error("Error while retrieving cloud job submission", e);
+            throw new GFacProviderException("Error while retrieving cloud job submission", e);
         }
-
-        return command + '\n';
     }
 
     private String setCmdParams(JobExecutionContext jobExecutionContext, String command) throws GFacProviderException {

http://git-wip-us.apache.org/repos/asf/airavata/blob/d856d246/modules/gfac/gfac-ec2/src/test/java/org/apache/airavata/gfac/ec2/EC2ProviderTest.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ec2/src/test/java/org/apache/airavata/gfac/ec2/EC2ProviderTest.java b/modules/gfac/gfac-ec2/src/test/java/org/apache/airavata/gfac/ec2/EC2ProviderTest.java
index d558ab9..9f86197 100644
--- a/modules/gfac/gfac-ec2/src/test/java/org/apache/airavata/gfac/ec2/EC2ProviderTest.java
+++ b/modules/gfac/gfac-ec2/src/test/java/org/apache/airavata/gfac/ec2/EC2ProviderTest.java
@@ -1,171 +1,195 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.ec2;
-
-import org.apache.airavata.commons.gfac.type.*;
-import org.apache.airavata.gfac.GFacConfiguration;
-import org.apache.airavata.gfac.GFacException;
-import org.apache.airavata.gfac.core.context.ApplicationContext;
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.context.MessageContext;
-import org.apache.airavata.gfac.core.cpi.BetterGfacImpl;
-import org.apache.airavata.schemas.gfac.*;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-
-import java.io.File;
-import java.net.URL;
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * Your Amazon instance should be in a running state before running this test.
- */
-public class EC2ProviderTest {
-    private JobExecutionContext jobExecutionContext;
-
-    private static final String hostName = "ec2-host";
-
-    private static final String hostAddress = "ec2-address";
-
-    private static final String sequence1 = "RR042383.21413#CTGGCACGGAGTTAGCCGATCCTTATTCATAAAGTACATGCAAACGGGTATCCATA" +
-            "CTCGACTTTATTCCTTTATAAAAGAAGTTTACAACCCATAGGGCAGTCATCCTTCACGCTACTTGGCTGGTTCAGGCCTGCGCCCATTGACCAATATTCCTCA" +
-            "CTGCTGCCTCCCGTAGGAGTTTGGACCGTGTCTCAGTTCCAATGTGGGGGACCTTCCTCTCAGAACCCCTATCCATCGAAGACTAGGTGGGCCGTTACCCCGC" +
-            "CTACTATCTAATGGAACGCATCCCCATCGTCTACCGGAATACCTTTAATCATGTGAACATGCGGACTCATGATGCCATCTTGTATTAATCTTCCTTTCAGAAG" +
-            "GCTGTCCAAGAGTAGACGGCAGGTTGGATACGTGTTACTCACCGTGCCGCCGGTCGCCATCAGTCTTAGCAAGCTAAGACCATGCTGCCCCTGACTTGCATGT" +
-            "GTTAAGCCTGTAGCTTAGCGTTC";
-
-    private static final String sequence2 = "RR042383.31934#CTGGCACGGAGTTAGCCGATCCTTATTCATAAAGTACATGCAAACGGGTATCCATA" +
-            "CCCGACTTTATTCCTTTATAAAAGAAGTTTACAACCCATAGGGCAGTCATCCTTCACGCTACTTGGCTGGTTCAGGCTCTCGCCCATTGACCAATATTCCTCA" +
-            "CTGCTGCCTCCCGTAGGAGTTTGGACCGTGTCTCAGTTCCAATGTGGGGGACCTTCCTCTCAGAACCCCTATCCATCGAAGACTAGGTGGGCCGTTACCCCGC" +
-            "CTACTATCTAATGGAACGCATCCCCATCGTCTACCGGAATACCTTTAATCATGTGAACATGCGGACTCATGATGCCATCTTGTATTAAATCTTCCTTTCAGAA" +
-            "GGCTATCCAAGAGTAGACGGCAGGTTGGATACGTGTTACTCACCGTGCG";
-
-    /* Following variables are needed to be set in-order to run the test. Since these are account specific information,
-       I'm not adding the values here. It's the responsibility of the person who's running the test to update
-       these variables accordingly.
-       */
-
-    /* Username used to log into your ec2 instance eg.ec2-user */
-    private String userName = "";
-
-    /* Secret key used to connect to the image */
-    private String secretKey = "";
-
-    /* Access key used to connect to the image */
-    private String accessKey = "";
-
-    /* Instance id of the running instance of your image */
-    private String instanceId = "";
-
-    @Before
-    public void setUp() throws Exception {
-        URL resource = EC2ProviderTest.class.getClassLoader().getResource(org.apache.airavata.common.utils.Constants.GFAC_CONFIG_XML);
-        assert resource != null;
-        System.out.println(resource.getFile());
-        GFacConfiguration gFacConfiguration = GFacConfiguration.create(new File(resource.getPath()), null);
-
-        /* EC2 Host */
-        HostDescription host = new HostDescription(Ec2HostType.type);
-        host.getType().setHostName(hostName);
-        host.getType().setHostAddress(hostAddress);
-
-        /* App */
-        ApplicationDescription ec2Desc = new ApplicationDescription(Ec2ApplicationDeploymentType.type);
-        Ec2ApplicationDeploymentType ec2App = (Ec2ApplicationDeploymentType)ec2Desc.getType();
-
-        String serviceName = "Gnome_distance_calculation_workflow";
-        ec2Desc.getType().addNewApplicationName().setStringValue(serviceName);
-        ec2App.setJobType(JobTypeType.EC_2);
-        ec2App.setExecutable("/home/ec2-user/run.sh");
-        ec2App.setExecutableType("sh");
-
-        /* Service */
-        ServiceDescription serv = new ServiceDescription();
-        serv.getType().setName("GenomeEC2");
-
-        List<InputParameterType> inputList = new ArrayList<InputParameterType>();
-
-        InputParameterType input1 = InputParameterType.Factory.newInstance();
-        input1.setParameterName("genome_input1");
-        input1.setParameterType(StringParameterType.Factory.newInstance());
-        inputList.add(input1);
-
-        InputParameterType input2 = InputParameterType.Factory.newInstance();
-        input2.setParameterName("genome_input2");
-        input2.setParameterType(StringParameterType.Factory.newInstance());
-        inputList.add(input2);
-
-        InputParameterType[] inputParamList = inputList.toArray(new InputParameterType[inputList.size()]);
-
-        List<OutputParameterType> outputList = new ArrayList<OutputParameterType>();
-        OutputParameterType output = OutputParameterType.Factory.newInstance();
-        output.setParameterName("genome_output");
-        output.setParameterType(StringParameterType.Factory.newInstance());
-        outputList.add(output);
-
-        OutputParameterType[] outputParamList = outputList
-                .toArray(new OutputParameterType[outputList.size()]);
-
-        serv.getType().setInputParametersArray(inputParamList);
-        serv.getType().setOutputParametersArray(outputParamList);
-
-        jobExecutionContext = new JobExecutionContext(gFacConfiguration,serv.getType().getName());
-        ApplicationContext applicationContext = new ApplicationContext();
-        jobExecutionContext.setApplicationContext(applicationContext);
-        applicationContext.setServiceDescription(serv);
-        applicationContext.setApplicationDeploymentDescription(ec2Desc);
-        applicationContext.setHostDescription(host);
-
-        AmazonSecurityContext amazonSecurityContext =
-                new AmazonSecurityContext(userName, accessKey, secretKey, instanceId);
-        jobExecutionContext.addSecurityContext(AmazonSecurityContext.AMAZON_SECURITY_CONTEXT, amazonSecurityContext);
-
-        MessageContext inMessage = new MessageContext();
-        ActualParameter genomeInput1 = new ActualParameter();
-        ((StringParameterType)genomeInput1.getType()).setValue(sequence1);
-        inMessage.addParameter("genome_input1", genomeInput1);
-
-        ActualParameter genomeInput2 = new ActualParameter();
-        ((StringParameterType)genomeInput2.getType()).setValue(sequence2);
-        inMessage.addParameter("genome_input2", genomeInput2);
-
-        MessageContext outMessage = new MessageContext();
-        ActualParameter echo_out = new ActualParameter();
-        outMessage.addParameter("distance", echo_out);
-
-        jobExecutionContext.setInMessageContext(inMessage);
-        jobExecutionContext.setOutMessageContext(outMessage);
-    }
-
-    @Test
-    public void testGramProvider() throws GFacException {
-        BetterGfacImpl gFacAPI = new BetterGfacImpl();
-        gFacAPI.submitJob(jobExecutionContext.getExperimentID(), jobExecutionContext.getTaskData().getTaskID(), jobExecutionContext.getGatewayID());
-        MessageContext outMessageContext = jobExecutionContext.getOutMessageContext();
-        Assert.assertEquals(MappingFactory.
-                toString((ActualParameter) outMessageContext.getParameter("genome_output")), "476");
-    }
-}
-
-
+///*
+// *
+// * Licensed to the Apache Software Foundation (ASF) under one
+// * or more contributor license agreements.  See the NOTICE file
+// * distributed with this work for additional information
+// * regarding copyright ownership.  The ASF licenses this file
+// * to you under the Apache License, Version 2.0 (the
+// * "License"); you may not use this file except in compliance
+// * with the License.  You may obtain a copy of the License at
+// *
+// *   http://www.apache.org/licenses/LICENSE-2.0
+// *
+// * Unless required by applicable law or agreed to in writing,
+// * software distributed under the License is distributed on an
+// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// * KIND, either express or implied.  See the License for the
+// * specific language governing permissions and limitations
+// * under the License.
+// *
+// */
+//
+//package org.apache.airavata.gfac.ec2;
+//
+//import org.airavata.appcatalog.cpi.AppCatalog;
+//import org.apache.aiaravata.application.catalog.data.impl.AppCatalogFactory;
+//import org.apache.airavata.commons.gfac.type.*;
+//import org.apache.airavata.gfac.GFacConfiguration;
+//import org.apache.airavata.gfac.GFacException;
+//import org.apache.airavata.gfac.core.context.ApplicationContext;
+//import org.apache.airavata.gfac.core.context.JobExecutionContext;
+//import org.apache.airavata.gfac.core.context.MessageContext;
+//import org.apache.airavata.gfac.core.cpi.BetterGfacImpl;
+//import org.apache.airavata.model.appcatalog.computeresource.*;
+//import org.apache.airavata.schemas.gfac.*;
+//import org.junit.Assert;
+//import org.junit.Before;
+//import org.junit.Test;
+//
+//import java.io.File;
+//import java.net.URL;
+//import java.util.ArrayList;
+//import java.util.List;
+//
+///**
+// * Your Amazon instance should be in a running state before running this test.
+// */
+//public class EC2ProviderTest {
+//    private JobExecutionContext jobExecutionContext;
+//
+//    private static final String hostName = "ec2-host";
+//
+//    private static final String hostAddress = "ec2-address";
+//
+//    private static final String sequence1 = "RR042383.21413#CTGGCACGGAGTTAGCCGATCCTTATTCATAAAGTACATGCAAACGGGTATCCATA" +
+//            "CTCGACTTTATTCCTTTATAAAAGAAGTTTACAACCCATAGGGCAGTCATCCTTCACGCTACTTGGCTGGTTCAGGCCTGCGCCCATTGACCAATATTCCTCA" +
+//            "CTGCTGCCTCCCGTAGGAGTTTGGACCGTGTCTCAGTTCCAATGTGGGGGACCTTCCTCTCAGAACCCCTATCCATCGAAGACTAGGTGGGCCGTTACCCCGC" +
+//            "CTACTATCTAATGGAACGCATCCCCATCGTCTACCGGAATACCTTTAATCATGTGAACATGCGGACTCATGATGCCATCTTGTATTAATCTTCCTTTCAGAAG" +
+//            "GCTGTCCAAGAGTAGACGGCAGGTTGGATACGTGTTACTCACCGTGCCGCCGGTCGCCATCAGTCTTAGCAAGCTAAGACCATGCTGCCCCTGACTTGCATGT" +
+//            "GTTAAGCCTGTAGCTTAGCGTTC";
+//
+//    private static final String sequence2 = "RR042383.31934#CTGGCACGGAGTTAGCCGATCCTTATTCATAAAGTACATGCAAACGGGTATCCATA" +
+//            "CCCGACTTTATTCCTTTATAAAAGAAGTTTACAACCCATAGGGCAGTCATCCTTCACGCTACTTGGCTGGTTCAGGCTCTCGCCCATTGACCAATATTCCTCA" +
+//            "CTGCTGCCTCCCGTAGGAGTTTGGACCGTGTCTCAGTTCCAATGTGGGGGACCTTCCTCTCAGAACCCCTATCCATCGAAGACTAGGTGGGCCGTTACCCCGC" +
+//            "CTACTATCTAATGGAACGCATCCCCATCGTCTACCGGAATACCTTTAATCATGTGAACATGCGGACTCATGATGCCATCTTGTATTAAATCTTCCTTTCAGAA" +
+//            "GGCTATCCAAGAGTAGACGGCAGGTTGGATACGTGTTACTCACCGTGCG";
+//
+//    /* Following variables are needed to be set in-order to run the test. Since these are account specific information,
+//       I'm not adding the values here. It's the responsibility of the person who's running the test to update
+//       these variables accordingly.
+//       */
+//
+//    /* Username used to log into your ec2 instance eg.ec2-user */
+//    private String userName = "";
+//
+//    /* Secret key used to connect to the image */
+//    private String secretKey = "";
+//
+//    /* Access key used to connect to the image */
+//    private String accessKey = "";
+//
+//    /* Instance id of the running instance of your image */
+//    private String instanceId = "";
+//
+//    @Before
+//    public void setUp() throws Exception {
+//        URL resource = EC2ProviderTest.class.getClassLoader().getResource(org.apache.airavata.common.utils.Constants.GFAC_CONFIG_XML);
+//        assert resource != null;
+//        System.out.println(resource.getFile());
+//        GFacConfiguration gFacConfiguration = GFacConfiguration.create(new File(resource.getPath()), null);
+//
+//        /* EC2 Host */
+//        ComputeResourceDescription host = new ComputeResourceDescription();
+//        host.setHostName(hostName);
+//        host.setResourceDescription("EC2 compute resource");
+//        host.addToIpAddresses(hostAddress);
+//
+//        CloudJobSubmission cloudJobSubmission = new CloudJobSubmission();
+//        cloudJobSubmission.setProviderName(ProviderName.EC2);
+//        cloudJobSubmission.setExecutableType("sh");
+//        cloudJobSubmission.setNodeId(instanceId);
+//        cloudJobSubmission.setSecurityProtocol(SecurityProtocol.USERNAME_PASSWORD);
+//        cloudJobSubmission.setUserAccountName(userName);
+//
+//        AppCatalog appCatalog = AppCatalogFactory.getAppCatalog();
+//        String submissionId = appCatalog.getComputeResource().addCloudJobSubmission(cloudJobSubmission);
+//
+//        JobSubmissionInterface submissionInterface = new JobSubmissionInterface();
+//        submissionInterface.setJobSubmissionInterfaceId(submissionId);
+//        submissionInterface.setJobSubmissionProtocol(JobSubmissionProtocol.CLOUD);
+//        submissionInterface.setPriorityOrder(0);
+//
+//        host.addToJobSubmissionInterfaces(submissionInterface);
+//
+//        String computeResourceId = appCatalog.getComputeResource().addComputeResource(host);
+//
+//        /* App */
+//
+//        ApplicationDescription ec2Desc = new ApplicationDescription(Ec2ApplicationDeploymentType.type);
+//        Ec2ApplicationDeploymentType ec2App = (Ec2ApplicationDeploymentType)ec2Desc.getType();
+//
+//        String serviceName = "Gnome_distance_calculation_workflow";
+//        ec2Desc.getType().addNewApplicationName().setStringValue(serviceName);
+//        ec2App.setJobType(JobTypeType.EC_2);
+//        ec2App.setExecutable("/home/ec2-user/run.sh");
+//        ec2App.setExecutableType("sh");
+//
+//        /* Service */
+//        ServiceDescription serv = new ServiceDescription();
+//        serv.getType().setName("GenomeEC2");
+//
+//        List<InputParameterType> inputList = new ArrayList<InputParameterType>();
+//
+//        InputParameterType input1 = InputParameterType.Factory.newInstance();
+//        input1.setParameterName("genome_input1");
+//        input1.setParameterType(StringParameterType.Factory.newInstance());
+//        inputList.add(input1);
+//
+//        InputParameterType input2 = InputParameterType.Factory.newInstance();
+//        input2.setParameterName("genome_input2");
+//        input2.setParameterType(StringParameterType.Factory.newInstance());
+//        inputList.add(input2);
+//
+//        InputParameterType[] inputParamList = inputList.toArray(new InputParameterType[inputList.size()]);
+//
+//        List<OutputParameterType> outputList = new ArrayList<OutputParameterType>();
+//        OutputParameterType output = OutputParameterType.Factory.newInstance();
+//        output.setParameterName("genome_output");
+//        output.setParameterType(StringParameterType.Factory.newInstance());
+//        outputList.add(output);
+//
+//        OutputParameterType[] outputParamList = outputList
+//                .toArray(new OutputParameterType[outputList.size()]);
+//
+//        serv.getType().setInputParametersArray(inputParamList);
+//        serv.getType().setOutputParametersArray(outputParamList);
+//
+//        jobExecutionContext = new JobExecutionContext(gFacConfiguration,serv.getType().getName());
+//        ApplicationContext applicationContext = new ApplicationContext();
+//        jobExecutionContext.setApplicationContext(applicationContext);
+//        applicationContext.setServiceDescription(serv);
+//        applicationContext.setApplicationDeploymentDescription(ec2Desc);
+//        applicationContext.setHostDescription(host);
+//
+//        AmazonSecurityContext amazonSecurityContext =
+//                new AmazonSecurityContext(userName, accessKey, secretKey, instanceId);
+//        jobExecutionContext.addSecurityContext(AmazonSecurityContext.AMAZON_SECURITY_CONTEXT, amazonSecurityContext);
+//
+//        MessageContext inMessage = new MessageContext();
+//        ActualParameter genomeInput1 = new ActualParameter();
+//        ((StringParameterType)genomeInput1.getType()).setValue(sequence1);
+//        inMessage.addParameter("genome_input1", genomeInput1);
+//
+//        ActualParameter genomeInput2 = new ActualParameter();
+//        ((StringParameterType)genomeInput2.getType()).setValue(sequence2);
+//        inMessage.addParameter("genome_input2", genomeInput2);
+//
+//        MessageContext outMessage = new MessageContext();
+//        ActualParameter echo_out = new ActualParameter();
+//        outMessage.addParameter("distance", echo_out);
+//
+//        jobExecutionContext.setInMessageContext(inMessage);
+//        jobExecutionContext.setOutMessageContext(outMessage);
+//    }
+//
+//    @Test
+//    public void testGramProvider() throws GFacException {
+//        BetterGfacImpl gFacAPI = new BetterGfacImpl();
+//        gFacAPI.submitJob(jobExecutionContext.getExperimentID(), jobExecutionContext.getTaskData().getTaskID(), jobExecutionContext.getGatewayID());
+//        MessageContext outMessageContext = jobExecutionContext.getOutMessageContext();
+//        Assert.assertEquals(MappingFactory.
+//                toString((ActualParameter) outMessageContext.getParameter("genome_output")), "476");
+//    }
+//}
+//
+//


[14/50] [abbrv] airavata git commit: Removed MonitorMode from JobResourceManager and added it to SSHJobSubmission struct, changed MonitorModes enum values

Posted by ch...@apache.org.
Removed MonitorMode from JobResourceManager and added it to SSHJobSubmission struct, changed MonitorModes enum values


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/e28919c9
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/e28919c9
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/e28919c9

Branch: refs/heads/master
Commit: e28919c984b0b6a0f7f1203b66afb865a79b4e2b
Parents: 73e21be
Author: shamrath <sh...@gmail.com>
Authored: Thu Oct 30 16:34:35 2014 -0400
Committer: Chathuri Wimalasena <ka...@gmail.com>
Committed: Wed Nov 5 11:16:15 2014 -0500

----------------------------------------------------------------------
 .../lib/airavata/computeResourceModel_types.cpp | 116 +++++++++---------
 .../lib/airavata/computeResourceModel_types.h   |  60 ++++-----
 .../Model/AppCatalog/ComputeResource/Types.php  |  58 ++++-----
 .../appcatalog/computeresource/MonitorMode.java |  16 +--
 .../computeresource/ResourceJobManager.java     | 121 +------------------
 .../computeresource/SSHJobSubmission.java       | 121 ++++++++++++++++++-
 .../computeResourceModel.thrift                 |  35 +++---
 7 files changed, 264 insertions(+), 263 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/e28919c9/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/computeResourceModel_types.cpp
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/computeResourceModel_types.cpp b/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/computeResourceModel_types.cpp
index 27f62dd..25c7bf1 100644
--- a/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/computeResourceModel_types.cpp
+++ b/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/computeResourceModel_types.cpp
@@ -61,16 +61,6 @@ const char* _kJobManagerCommandNames[] = {
 };
 const std::map<int, const char*> _JobManagerCommand_VALUES_TO_NAMES(::apache::thrift::TEnumIterator(7, _kJobManagerCommandValues, _kJobManagerCommandNames), ::apache::thrift::TEnumIterator(-1, NULL, NULL));
 
-int _kMonitorModeValues[] = {
-  MonitorMode::PUSH,
-  MonitorMode::PULL
-};
-const char* _kMonitorModeNames[] = {
-  "PUSH",
-  "PULL"
-};
-const std::map<int, const char*> _MonitorMode_VALUES_TO_NAMES(::apache::thrift::TEnumIterator(2, _kMonitorModeValues, _kMonitorModeNames), ::apache::thrift::TEnumIterator(-1, NULL, NULL));
-
 int _kFileSystemsValues[] = {
   FileSystems::HOME,
   FileSystems::WORK,
@@ -119,6 +109,16 @@ const char* _kJobSubmissionProtocolNames[] = {
 };
 const std::map<int, const char*> _JobSubmissionProtocol_VALUES_TO_NAMES(::apache::thrift::TEnumIterator(5, _kJobSubmissionProtocolValues, _kJobSubmissionProtocolNames), ::apache::thrift::TEnumIterator(-1, NULL, NULL));
 
+int _kMonitorModeValues[] = {
+  MonitorMode::POLL_JOB_MANAGER,
+  MonitorMode::XSEDE_AMQP_SUBSCRIBE
+};
+const char* _kMonitorModeNames[] = {
+  "POLL_JOB_MANAGER",
+  "XSEDE_AMQP_SUBSCRIBE"
+};
+const std::map<int, const char*> _MonitorMode_VALUES_TO_NAMES(::apache::thrift::TEnumIterator(2, _kMonitorModeValues, _kMonitorModeNames), ::apache::thrift::TEnumIterator(-1, NULL, NULL));
+
 int _kDataMovementProtocolValues[] = {
   DataMovementProtocol::LOCAL,
   DataMovementProtocol::SCP,
@@ -147,8 +147,8 @@ const char* _kProviderNameNames[] = {
 };
 const std::map<int, const char*> _ProviderName_VALUES_TO_NAMES(::apache::thrift::TEnumIterator(3, _kProviderNameValues, _kProviderNameNames), ::apache::thrift::TEnumIterator(-1, NULL, NULL));
 
-const char* ResourceJobManager::ascii_fingerprint = "83F3E1FB1C076C79A1E733A1E531B938";
-const uint8_t ResourceJobManager::binary_fingerprint[16] = {0x83,0xF3,0xE1,0xFB,0x1C,0x07,0x6C,0x79,0xA1,0xE7,0x33,0xA1,0xE5,0x31,0xB9,0x38};
+const char* ResourceJobManager::ascii_fingerprint = "F61CAF80247D0E44C8D52504F3A43BED";
+const uint8_t ResourceJobManager::binary_fingerprint[16] = {0xF6,0x1C,0xAF,0x80,0x24,0x7D,0x0E,0x44,0xC8,0xD5,0x25,0x04,0xF3,0xA4,0x3B,0xED};
 
 uint32_t ResourceJobManager::read(::apache::thrift::protocol::TProtocol* iprot) {
 
@@ -231,16 +231,6 @@ uint32_t ResourceJobManager::read(::apache::thrift::protocol::TProtocol* iprot)
           xfer += iprot->skip(ftype);
         }
         break;
-      case 6:
-        if (ftype == ::apache::thrift::protocol::T_I32) {
-          int32_t ecast9;
-          xfer += iprot->readI32(ecast9);
-          this->monitorMode = (MonitorMode::type)ecast9;
-          this->__isset.monitorMode = true;
-        } else {
-          xfer += iprot->skip(ftype);
-        }
-        break;
       default:
         xfer += iprot->skip(ftype);
         break;
@@ -283,21 +273,16 @@ uint32_t ResourceJobManager::write(::apache::thrift::protocol::TProtocol* oprot)
     xfer += oprot->writeFieldBegin("jobManagerCommands", ::apache::thrift::protocol::T_MAP, 5);
     {
       xfer += oprot->writeMapBegin(::apache::thrift::protocol::T_I32, ::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->jobManagerCommands.size()));
-      std::map<JobManagerCommand::type, std::string> ::const_iterator _iter10;
-      for (_iter10 = this->jobManagerCommands.begin(); _iter10 != this->jobManagerCommands.end(); ++_iter10)
+      std::map<JobManagerCommand::type, std::string> ::const_iterator _iter9;
+      for (_iter9 = this->jobManagerCommands.begin(); _iter9 != this->jobManagerCommands.end(); ++_iter9)
       {
-        xfer += oprot->writeI32((int32_t)_iter10->first);
-        xfer += oprot->writeString(_iter10->second);
+        xfer += oprot->writeI32((int32_t)_iter9->first);
+        xfer += oprot->writeString(_iter9->second);
       }
       xfer += oprot->writeMapEnd();
     }
     xfer += oprot->writeFieldEnd();
   }
-  if (this->__isset.monitorMode) {
-    xfer += oprot->writeFieldBegin("monitorMode", ::apache::thrift::protocol::T_I32, 6);
-    xfer += oprot->writeI32((int32_t)this->monitorMode);
-    xfer += oprot->writeFieldEnd();
-  }
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
   return xfer;
@@ -310,7 +295,6 @@ void swap(ResourceJobManager &a, ResourceJobManager &b) {
   swap(a.pushMonitoringEndpoint, b.pushMonitoringEndpoint);
   swap(a.jobManagerBinPath, b.jobManagerBinPath);
   swap(a.jobManagerCommands, b.jobManagerCommands);
-  swap(a.monitorMode, b.monitorMode);
   swap(a.__isset, b.__isset);
 }
 
@@ -484,9 +468,9 @@ uint32_t SCPDataMovement::read(::apache::thrift::protocol::TProtocol* iprot) {
         break;
       case 2:
         if (ftype == ::apache::thrift::protocol::T_I32) {
-          int32_t ecast11;
-          xfer += iprot->readI32(ecast11);
-          this->securityProtocol = (SecurityProtocol::type)ecast11;
+          int32_t ecast10;
+          xfer += iprot->readI32(ecast10);
+          this->securityProtocol = (SecurityProtocol::type)ecast10;
           isset_securityProtocol = true;
         } else {
           xfer += iprot->skip(ftype);
@@ -596,9 +580,9 @@ uint32_t GridFTPDataMovement::read(::apache::thrift::protocol::TProtocol* iprot)
         break;
       case 2:
         if (ftype == ::apache::thrift::protocol::T_I32) {
-          int32_t ecast12;
-          xfer += iprot->readI32(ecast12);
-          this->securityProtocol = (SecurityProtocol::type)ecast12;
+          int32_t ecast11;
+          xfer += iprot->readI32(ecast11);
+          this->securityProtocol = (SecurityProtocol::type)ecast11;
           isset_securityProtocol = true;
         } else {
           xfer += iprot->skip(ftype);
@@ -608,14 +592,14 @@ uint32_t GridFTPDataMovement::read(::apache::thrift::protocol::TProtocol* iprot)
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->gridFTPEndPoints.clear();
-            uint32_t _size13;
-            ::apache::thrift::protocol::TType _etype16;
-            xfer += iprot->readListBegin(_etype16, _size13);
-            this->gridFTPEndPoints.resize(_size13);
-            uint32_t _i17;
-            for (_i17 = 0; _i17 < _size13; ++_i17)
+            uint32_t _size12;
+            ::apache::thrift::protocol::TType _etype15;
+            xfer += iprot->readListBegin(_etype15, _size12);
+            this->gridFTPEndPoints.resize(_size12);
+            uint32_t _i16;
+            for (_i16 = 0; _i16 < _size12; ++_i16)
             {
-              xfer += iprot->readString(this->gridFTPEndPoints[_i17]);
+              xfer += iprot->readString(this->gridFTPEndPoints[_i16]);
             }
             xfer += iprot->readListEnd();
           }
@@ -657,10 +641,10 @@ uint32_t GridFTPDataMovement::write(::apache::thrift::protocol::TProtocol* oprot
   xfer += oprot->writeFieldBegin("gridFTPEndPoints", ::apache::thrift::protocol::T_LIST, 3);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->gridFTPEndPoints.size()));
-    std::vector<std::string> ::const_iterator _iter18;
-    for (_iter18 = this->gridFTPEndPoints.begin(); _iter18 != this->gridFTPEndPoints.end(); ++_iter18)
+    std::vector<std::string> ::const_iterator _iter17;
+    for (_iter17 = this->gridFTPEndPoints.begin(); _iter17 != this->gridFTPEndPoints.end(); ++_iter17)
     {
-      xfer += oprot->writeString((*_iter18));
+      xfer += oprot->writeString((*_iter17));
     }
     xfer += oprot->writeListEnd();
   }
@@ -714,9 +698,9 @@ uint32_t UnicoreDataMovement::read(::apache::thrift::protocol::TProtocol* iprot)
         break;
       case 2:
         if (ftype == ::apache::thrift::protocol::T_I32) {
-          int32_t ecast19;
-          xfer += iprot->readI32(ecast19);
-          this->securityProtocol = (SecurityProtocol::type)ecast19;
+          int32_t ecast18;
+          xfer += iprot->readI32(ecast18);
+          this->securityProtocol = (SecurityProtocol::type)ecast18;
           isset_securityProtocol = true;
         } else {
           xfer += iprot->skip(ftype);
@@ -776,8 +760,8 @@ void swap(UnicoreDataMovement &a, UnicoreDataMovement &b) {
   swap(a.unicoreEndPointURL, b.unicoreEndPointURL);
 }
 
-const char* LOCALSubmission::ascii_fingerprint = "D51508D1A661370F4785A01334DB8637";
-const uint8_t LOCALSubmission::binary_fingerprint[16] = {0xD5,0x15,0x08,0xD1,0xA6,0x61,0x37,0x0F,0x47,0x85,0xA0,0x13,0x34,0xDB,0x86,0x37};
+const char* LOCALSubmission::ascii_fingerprint = "A5A35C842CBE1CA9D6A13C5974C6FB8F";
+const uint8_t LOCALSubmission::binary_fingerprint[16] = {0xA5,0xA3,0x5C,0x84,0x2C,0xBE,0x1C,0xA9,0xD6,0xA1,0x3C,0x59,0x74,0xC6,0xFB,0x8F};
 
 uint32_t LOCALSubmission::read(::apache::thrift::protocol::TProtocol* iprot) {
 
@@ -920,8 +904,8 @@ void swap(LOCALDataMovement &a, LOCALDataMovement &b) {
   swap(a.dataMovementInterfaceId, b.dataMovementInterfaceId);
 }
 
-const char* SSHJobSubmission::ascii_fingerprint = "BCAF073DD81C8F6A9ED716A45569D2B3";
-const uint8_t SSHJobSubmission::binary_fingerprint[16] = {0xBC,0xAF,0x07,0x3D,0xD8,0x1C,0x8F,0x6A,0x9E,0xD7,0x16,0xA4,0x55,0x69,0xD2,0xB3};
+const char* SSHJobSubmission::ascii_fingerprint = "A62183DAA7AFF027173705420A9D99D0";
+const uint8_t SSHJobSubmission::binary_fingerprint[16] = {0xA6,0x21,0x83,0xDA,0xA7,0xAF,0xF0,0x27,0x17,0x37,0x05,0x42,0x0A,0x9D,0x99,0xD0};
 
 uint32_t SSHJobSubmission::read(::apache::thrift::protocol::TProtocol* iprot) {
 
@@ -956,9 +940,9 @@ uint32_t SSHJobSubmission::read(::apache::thrift::protocol::TProtocol* iprot) {
         break;
       case 2:
         if (ftype == ::apache::thrift::protocol::T_I32) {
-          int32_t ecast20;
-          xfer += iprot->readI32(ecast20);
-          this->securityProtocol = (SecurityProtocol::type)ecast20;
+          int32_t ecast19;
+          xfer += iprot->readI32(ecast19);
+          this->securityProtocol = (SecurityProtocol::type)ecast19;
           isset_securityProtocol = true;
         } else {
           xfer += iprot->skip(ftype);
@@ -988,6 +972,16 @@ uint32_t SSHJobSubmission::read(::apache::thrift::protocol::TProtocol* iprot) {
           xfer += iprot->skip(ftype);
         }
         break;
+      case 6:
+        if (ftype == ::apache::thrift::protocol::T_I32) {
+          int32_t ecast20;
+          xfer += iprot->readI32(ecast20);
+          this->monitorMode = (MonitorMode::type)ecast20;
+          this->__isset.monitorMode = true;
+        } else {
+          xfer += iprot->skip(ftype);
+        }
+        break;
       default:
         xfer += iprot->skip(ftype);
         break;
@@ -1032,6 +1026,11 @@ uint32_t SSHJobSubmission::write(::apache::thrift::protocol::TProtocol* oprot) c
     xfer += oprot->writeI32(this->sshPort);
     xfer += oprot->writeFieldEnd();
   }
+  if (this->__isset.monitorMode) {
+    xfer += oprot->writeFieldBegin("monitorMode", ::apache::thrift::protocol::T_I32, 6);
+    xfer += oprot->writeI32((int32_t)this->monitorMode);
+    xfer += oprot->writeFieldEnd();
+  }
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
   return xfer;
@@ -1044,6 +1043,7 @@ void swap(SSHJobSubmission &a, SSHJobSubmission &b) {
   swap(a.resourceJobManager, b.resourceJobManager);
   swap(a.alternativeSSHHostName, b.alternativeSSHHostName);
   swap(a.sshPort, b.sshPort);
+  swap(a.monitorMode, b.monitorMode);
   swap(a.__isset, b.__isset);
 }
 

http://git-wip-us.apache.org/repos/asf/airavata/blob/e28919c9/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/computeResourceModel_types.h
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/computeResourceModel_types.h b/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/computeResourceModel_types.h
index e94520d..582b2d1 100644
--- a/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/computeResourceModel_types.h
+++ b/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/computeResourceModel_types.h
@@ -59,15 +59,6 @@ struct JobManagerCommand {
 
 extern const std::map<int, const char*> _JobManagerCommand_VALUES_TO_NAMES;
 
-struct MonitorMode {
-  enum type {
-    PUSH = 0,
-    PULL = 1
-  };
-};
-
-extern const std::map<int, const char*> _MonitorMode_VALUES_TO_NAMES;
-
 struct FileSystems {
   enum type {
     HOME = 0,
@@ -104,6 +95,15 @@ struct JobSubmissionProtocol {
 
 extern const std::map<int, const char*> _JobSubmissionProtocol_VALUES_TO_NAMES;
 
+struct MonitorMode {
+  enum type {
+    POLL_JOB_MANAGER = 0,
+    XSEDE_AMQP_SUBSCRIBE = 1
+  };
+};
+
+extern const std::map<int, const char*> _MonitorMode_VALUES_TO_NAMES;
+
 struct DataMovementProtocol {
   enum type {
     LOCAL = 0,
@@ -127,20 +127,19 @@ struct ProviderName {
 extern const std::map<int, const char*> _ProviderName_VALUES_TO_NAMES;
 
 typedef struct _ResourceJobManager__isset {
-  _ResourceJobManager__isset() : pushMonitoringEndpoint(false), jobManagerBinPath(false), jobManagerCommands(false), monitorMode(false) {}
+  _ResourceJobManager__isset() : pushMonitoringEndpoint(false), jobManagerBinPath(false), jobManagerCommands(false) {}
   bool pushMonitoringEndpoint;
   bool jobManagerBinPath;
   bool jobManagerCommands;
-  bool monitorMode;
 } _ResourceJobManager__isset;
 
 class ResourceJobManager {
  public:
 
-  static const char* ascii_fingerprint; // = "83F3E1FB1C076C79A1E733A1E531B938";
-  static const uint8_t binary_fingerprint[16]; // = {0x83,0xF3,0xE1,0xFB,0x1C,0x07,0x6C,0x79,0xA1,0xE7,0x33,0xA1,0xE5,0x31,0xB9,0x38};
+  static const char* ascii_fingerprint; // = "F61CAF80247D0E44C8D52504F3A43BED";
+  static const uint8_t binary_fingerprint[16]; // = {0xF6,0x1C,0xAF,0x80,0x24,0x7D,0x0E,0x44,0xC8,0xD5,0x25,0x04,0xF3,0xA4,0x3B,0xED};
 
-  ResourceJobManager() : resourceJobManagerId("DO_NOT_SET_AT_CLIENTS"), resourceJobManagerType((ResourceJobManagerType::type)0), pushMonitoringEndpoint(), jobManagerBinPath(), monitorMode((MonitorMode::type)0) {
+  ResourceJobManager() : resourceJobManagerId("DO_NOT_SET_AT_CLIENTS"), resourceJobManagerType((ResourceJobManagerType::type)0), pushMonitoringEndpoint(), jobManagerBinPath() {
   }
 
   virtual ~ResourceJobManager() throw() {}
@@ -150,7 +149,6 @@ class ResourceJobManager {
   std::string pushMonitoringEndpoint;
   std::string jobManagerBinPath;
   std::map<JobManagerCommand::type, std::string>  jobManagerCommands;
-  MonitorMode::type monitorMode;
 
   _ResourceJobManager__isset __isset;
 
@@ -177,11 +175,6 @@ class ResourceJobManager {
     __isset.jobManagerCommands = true;
   }
 
-  void __set_monitorMode(const MonitorMode::type val) {
-    monitorMode = val;
-    __isset.monitorMode = true;
-  }
-
   bool operator == (const ResourceJobManager & rhs) const
   {
     if (!(resourceJobManagerId == rhs.resourceJobManagerId))
@@ -200,10 +193,6 @@ class ResourceJobManager {
       return false;
     else if (__isset.jobManagerCommands && !(jobManagerCommands == rhs.jobManagerCommands))
       return false;
-    if (__isset.monitorMode != rhs.__isset.monitorMode)
-      return false;
-    else if (__isset.monitorMode && !(monitorMode == rhs.monitorMode))
-      return false;
     return true;
   }
   bool operator != (const ResourceJobManager &rhs) const {
@@ -493,8 +482,8 @@ void swap(UnicoreDataMovement &a, UnicoreDataMovement &b);
 class LOCALSubmission {
  public:
 
-  static const char* ascii_fingerprint; // = "D51508D1A661370F4785A01334DB8637";
-  static const uint8_t binary_fingerprint[16]; // = {0xD5,0x15,0x08,0xD1,0xA6,0x61,0x37,0x0F,0x47,0x85,0xA0,0x13,0x34,0xDB,0x86,0x37};
+  static const char* ascii_fingerprint; // = "A5A35C842CBE1CA9D6A13C5974C6FB8F";
+  static const uint8_t binary_fingerprint[16]; // = {0xA5,0xA3,0x5C,0x84,0x2C,0xBE,0x1C,0xA9,0xD6,0xA1,0x3C,0x59,0x74,0xC6,0xFB,0x8F};
 
   LOCALSubmission() : jobSubmissionInterfaceId("DO_NOT_SET_AT_CLIENTS") {
   }
@@ -571,18 +560,19 @@ class LOCALDataMovement {
 void swap(LOCALDataMovement &a, LOCALDataMovement &b);
 
 typedef struct _SSHJobSubmission__isset {
-  _SSHJobSubmission__isset() : alternativeSSHHostName(false), sshPort(true) {}
+  _SSHJobSubmission__isset() : alternativeSSHHostName(false), sshPort(true), monitorMode(false) {}
   bool alternativeSSHHostName;
   bool sshPort;
+  bool monitorMode;
 } _SSHJobSubmission__isset;
 
 class SSHJobSubmission {
  public:
 
-  static const char* ascii_fingerprint; // = "BCAF073DD81C8F6A9ED716A45569D2B3";
-  static const uint8_t binary_fingerprint[16]; // = {0xBC,0xAF,0x07,0x3D,0xD8,0x1C,0x8F,0x6A,0x9E,0xD7,0x16,0xA4,0x55,0x69,0xD2,0xB3};
+  static const char* ascii_fingerprint; // = "A62183DAA7AFF027173705420A9D99D0";
+  static const uint8_t binary_fingerprint[16]; // = {0xA6,0x21,0x83,0xDA,0xA7,0xAF,0xF0,0x27,0x17,0x37,0x05,0x42,0x0A,0x9D,0x99,0xD0};
 
-  SSHJobSubmission() : jobSubmissionInterfaceId("DO_NOT_SET_AT_CLIENTS"), securityProtocol((SecurityProtocol::type)0), alternativeSSHHostName(), sshPort(22) {
+  SSHJobSubmission() : jobSubmissionInterfaceId("DO_NOT_SET_AT_CLIENTS"), securityProtocol((SecurityProtocol::type)0), alternativeSSHHostName(), sshPort(22), monitorMode((MonitorMode::type)0) {
   }
 
   virtual ~SSHJobSubmission() throw() {}
@@ -592,6 +582,7 @@ class SSHJobSubmission {
   ResourceJobManager resourceJobManager;
   std::string alternativeSSHHostName;
   int32_t sshPort;
+  MonitorMode::type monitorMode;
 
   _SSHJobSubmission__isset __isset;
 
@@ -617,6 +608,11 @@ class SSHJobSubmission {
     __isset.sshPort = true;
   }
 
+  void __set_monitorMode(const MonitorMode::type val) {
+    monitorMode = val;
+    __isset.monitorMode = true;
+  }
+
   bool operator == (const SSHJobSubmission & rhs) const
   {
     if (!(jobSubmissionInterfaceId == rhs.jobSubmissionInterfaceId))
@@ -633,6 +629,10 @@ class SSHJobSubmission {
       return false;
     else if (__isset.sshPort && !(sshPort == rhs.sshPort))
       return false;
+    if (__isset.monitorMode != rhs.__isset.monitorMode)
+      return false;
+    else if (__isset.monitorMode && !(monitorMode == rhs.monitorMode))
+      return false;
     return true;
   }
   bool operator != (const SSHJobSubmission &rhs) const {

http://git-wip-us.apache.org/repos/asf/airavata/blob/e28919c9/airavata-api/airavata-client-sdks/airavata-php-sdk/src/main/resources/lib/Airavata/Model/AppCatalog/ComputeResource/Types.php
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-php-sdk/src/main/resources/lib/Airavata/Model/AppCatalog/ComputeResource/Types.php b/airavata-api/airavata-client-sdks/airavata-php-sdk/src/main/resources/lib/Airavata/Model/AppCatalog/ComputeResource/Types.php
index 3d7b921..9623821 100644
--- a/airavata-api/airavata-client-sdks/airavata-php-sdk/src/main/resources/lib/Airavata/Model/AppCatalog/ComputeResource/Types.php
+++ b/airavata-api/airavata-client-sdks/airavata-php-sdk/src/main/resources/lib/Airavata/Model/AppCatalog/ComputeResource/Types.php
@@ -49,15 +49,6 @@ final class JobManagerCommand {
   );
 }
 
-final class MonitorMode {
-  const PUSH = 0;
-  const PULL = 1;
-  static public $__names = array(
-    0 => 'PUSH',
-    1 => 'PULL',
-  );
-}
-
 final class FileSystems {
   const HOME = 0;
   const WORK = 1;
@@ -103,6 +94,15 @@ final class JobSubmissionProtocol {
   );
 }
 
+final class MonitorMode {
+  const POLL_JOB_MANAGER = 0;
+  const XSEDE_AMQP_SUBSCRIBE = 1;
+  static public $__names = array(
+    0 => 'POLL_JOB_MANAGER',
+    1 => 'XSEDE_AMQP_SUBSCRIBE',
+  );
+}
+
 final class DataMovementProtocol {
   const LOCAL = 0;
   const SCP = 1;
@@ -137,7 +137,6 @@ class ResourceJobManager {
   public $pushMonitoringEndpoint = null;
   public $jobManagerBinPath = null;
   public $jobManagerCommands = null;
-  public $monitorMode = null;
 
   public function __construct($vals=null) {
     if (!isset(self::$_TSPEC)) {
@@ -170,10 +169,6 @@ class ResourceJobManager {
             'type' => TType::STRING,
             ),
           ),
-        6 => array(
-          'var' => 'monitorMode',
-          'type' => TType::I32,
-          ),
         );
     }
     if (is_array($vals)) {
@@ -192,9 +187,6 @@ class ResourceJobManager {
       if (isset($vals['jobManagerCommands'])) {
         $this->jobManagerCommands = $vals['jobManagerCommands'];
       }
-      if (isset($vals['monitorMode'])) {
-        $this->monitorMode = $vals['monitorMode'];
-      }
     }
   }
 
@@ -265,13 +257,6 @@ class ResourceJobManager {
             $xfer += $input->skip($ftype);
           }
           break;
-        case 6:
-          if ($ftype == TType::I32) {
-            $xfer += $input->readI32($this->monitorMode);
-          } else {
-            $xfer += $input->skip($ftype);
-          }
-          break;
         default:
           $xfer += $input->skip($ftype);
           break;
@@ -323,11 +308,6 @@ class ResourceJobManager {
       }
       $xfer += $output->writeFieldEnd();
     }
-    if ($this->monitorMode !== null) {
-      $xfer += $output->writeFieldBegin('monitorMode', TType::I32, 6);
-      $xfer += $output->writeI32($this->monitorMode);
-      $xfer += $output->writeFieldEnd();
-    }
     $xfer += $output->writeFieldStop();
     $xfer += $output->writeStructEnd();
     return $xfer;
@@ -1066,6 +1046,7 @@ class SSHJobSubmission {
   public $resourceJobManager = null;
   public $alternativeSSHHostName = null;
   public $sshPort = 22;
+  public $monitorMode = null;
 
   public function __construct($vals=null) {
     if (!isset(self::$_TSPEC)) {
@@ -1091,6 +1072,10 @@ class SSHJobSubmission {
           'var' => 'sshPort',
           'type' => TType::I32,
           ),
+        6 => array(
+          'var' => 'monitorMode',
+          'type' => TType::I32,
+          ),
         );
     }
     if (is_array($vals)) {
@@ -1109,6 +1094,9 @@ class SSHJobSubmission {
       if (isset($vals['sshPort'])) {
         $this->sshPort = $vals['sshPort'];
       }
+      if (isset($vals['monitorMode'])) {
+        $this->monitorMode = $vals['monitorMode'];
+      }
     }
   }
 
@@ -1167,6 +1155,13 @@ class SSHJobSubmission {
             $xfer += $input->skip($ftype);
           }
           break;
+        case 6:
+          if ($ftype == TType::I32) {
+            $xfer += $input->readI32($this->monitorMode);
+          } else {
+            $xfer += $input->skip($ftype);
+          }
+          break;
         default:
           $xfer += $input->skip($ftype);
           break;
@@ -1208,6 +1203,11 @@ class SSHJobSubmission {
       $xfer += $output->writeI32($this->sshPort);
       $xfer += $output->writeFieldEnd();
     }
+    if ($this->monitorMode !== null) {
+      $xfer += $output->writeFieldBegin('monitorMode', TType::I32, 6);
+      $xfer += $output->writeI32($this->monitorMode);
+      $xfer += $output->writeFieldEnd();
+    }
     $xfer += $output->writeFieldStop();
     $xfer += $output->writeStructEnd();
     return $xfer;

http://git-wip-us.apache.org/repos/asf/airavata/blob/e28919c9/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/appcatalog/computeresource/MonitorMode.java
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/appcatalog/computeresource/MonitorMode.java b/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/appcatalog/computeresource/MonitorMode.java
index 30528b7..2545711 100644
--- a/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/appcatalog/computeresource/MonitorMode.java
+++ b/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/appcatalog/computeresource/MonitorMode.java
@@ -31,17 +31,17 @@ import org.apache.thrift.TEnum;
 /**
  * Monitoring modes
  * 
- * PUSH:
- * Server will push job status changes.
+ * POLL_JOB_MANAGER:
+ * GFac need to pull job status changes.
  * 
- * PULL:
- * Need to pull and get the Job status changes.
+ * XSEDE_AMQP_SUBSCRIBE:
+ * Server will publish job status changes to amqp servert.
  * 
  * 
  */
 @SuppressWarnings("all") public enum MonitorMode implements org.apache.thrift.TEnum {
-  PUSH(0),
-  PULL(1);
+  POLL_JOB_MANAGER(0),
+  XSEDE_AMQP_SUBSCRIBE(1);
 
   private final int value;
 
@@ -63,9 +63,9 @@ import org.apache.thrift.TEnum;
   public static MonitorMode findByValue(int value) { 
     switch (value) {
       case 0:
-        return PUSH;
+        return POLL_JOB_MANAGER;
       case 1:
-        return PULL;
+        return XSEDE_AMQP_SUBSCRIBE;
       default:
         return null;
     }

http://git-wip-us.apache.org/repos/asf/airavata/blob/e28919c9/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/appcatalog/computeresource/ResourceJobManager.java
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/appcatalog/computeresource/ResourceJobManager.java b/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/appcatalog/computeresource/ResourceJobManager.java
index d0487b1..680a40a 100644
--- a/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/appcatalog/computeresource/ResourceJobManager.java
+++ b/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/appcatalog/computeresource/ResourceJobManager.java
@@ -74,7 +74,6 @@ import org.slf4j.LoggerFactory;
   private static final org.apache.thrift.protocol.TField PUSH_MONITORING_ENDPOINT_FIELD_DESC = new org.apache.thrift.protocol.TField("pushMonitoringEndpoint", org.apache.thrift.protocol.TType.STRING, (short)3);
   private static final org.apache.thrift.protocol.TField JOB_MANAGER_BIN_PATH_FIELD_DESC = new org.apache.thrift.protocol.TField("jobManagerBinPath", org.apache.thrift.protocol.TType.STRING, (short)4);
   private static final org.apache.thrift.protocol.TField JOB_MANAGER_COMMANDS_FIELD_DESC = new org.apache.thrift.protocol.TField("jobManagerCommands", org.apache.thrift.protocol.TType.MAP, (short)5);
-  private static final org.apache.thrift.protocol.TField MONITOR_MODE_FIELD_DESC = new org.apache.thrift.protocol.TField("monitorMode", org.apache.thrift.protocol.TType.I32, (short)6);
 
   private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
   static {
@@ -87,7 +86,6 @@ import org.slf4j.LoggerFactory;
   private String pushMonitoringEndpoint; // optional
   private String jobManagerBinPath; // optional
   private Map<JobManagerCommand,String> jobManagerCommands; // optional
-  private MonitorMode monitorMode; // optional
 
   /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
   @SuppressWarnings("all") public enum _Fields implements org.apache.thrift.TFieldIdEnum {
@@ -99,12 +97,7 @@ import org.slf4j.LoggerFactory;
     RESOURCE_JOB_MANAGER_TYPE((short)2, "resourceJobManagerType"),
     PUSH_MONITORING_ENDPOINT((short)3, "pushMonitoringEndpoint"),
     JOB_MANAGER_BIN_PATH((short)4, "jobManagerBinPath"),
-    JOB_MANAGER_COMMANDS((short)5, "jobManagerCommands"),
-    /**
-     * 
-     * @see MonitorMode
-     */
-    MONITOR_MODE((short)6, "monitorMode");
+    JOB_MANAGER_COMMANDS((short)5, "jobManagerCommands");
 
     private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
 
@@ -129,8 +122,6 @@ import org.slf4j.LoggerFactory;
           return JOB_MANAGER_BIN_PATH;
         case 5: // JOB_MANAGER_COMMANDS
           return JOB_MANAGER_COMMANDS;
-        case 6: // MONITOR_MODE
-          return MONITOR_MODE;
         default:
           return null;
       }
@@ -171,7 +162,7 @@ import org.slf4j.LoggerFactory;
   }
 
   // isset id assignments
-  private _Fields optionals[] = {_Fields.PUSH_MONITORING_ENDPOINT,_Fields.JOB_MANAGER_BIN_PATH,_Fields.JOB_MANAGER_COMMANDS,_Fields.MONITOR_MODE};
+  private _Fields optionals[] = {_Fields.PUSH_MONITORING_ENDPOINT,_Fields.JOB_MANAGER_BIN_PATH,_Fields.JOB_MANAGER_COMMANDS};
   public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
   static {
     Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
@@ -187,8 +178,6 @@ import org.slf4j.LoggerFactory;
         new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP, 
             new org.apache.thrift.meta_data.EnumMetaData(org.apache.thrift.protocol.TType.ENUM, JobManagerCommand.class), 
             new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))));
-    tmpMap.put(_Fields.MONITOR_MODE, new org.apache.thrift.meta_data.FieldMetaData("monitorMode", org.apache.thrift.TFieldRequirementType.OPTIONAL, 
-        new org.apache.thrift.meta_data.EnumMetaData(org.apache.thrift.protocol.TType.ENUM, MonitorMode.class)));
     metaDataMap = Collections.unmodifiableMap(tmpMap);
     org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(ResourceJobManager.class, metaDataMap);
   }
@@ -238,9 +227,6 @@ import org.slf4j.LoggerFactory;
       }
       this.jobManagerCommands = __this__jobManagerCommands;
     }
-    if (other.isSetMonitorMode()) {
-      this.monitorMode = other.monitorMode;
-    }
   }
 
   public ResourceJobManager deepCopy() {
@@ -255,7 +241,6 @@ import org.slf4j.LoggerFactory;
     this.pushMonitoringEndpoint = null;
     this.jobManagerBinPath = null;
     this.jobManagerCommands = null;
-    this.monitorMode = null;
   }
 
   public String getResourceJobManagerId() {
@@ -392,37 +377,6 @@ import org.slf4j.LoggerFactory;
     }
   }
 
-  /**
-   * 
-   * @see MonitorMode
-   */
-  public MonitorMode getMonitorMode() {
-    return this.monitorMode;
-  }
-
-  /**
-   * 
-   * @see MonitorMode
-   */
-  public void setMonitorMode(MonitorMode monitorMode) {
-    this.monitorMode = monitorMode;
-  }
-
-  public void unsetMonitorMode() {
-    this.monitorMode = null;
-  }
-
-  /** Returns true if field monitorMode is set (has been assigned a value) and false otherwise */
-  public boolean isSetMonitorMode() {
-    return this.monitorMode != null;
-  }
-
-  public void setMonitorModeIsSet(boolean value) {
-    if (!value) {
-      this.monitorMode = null;
-    }
-  }
-
   public void setFieldValue(_Fields field, Object value) {
     switch (field) {
     case RESOURCE_JOB_MANAGER_ID:
@@ -465,14 +419,6 @@ import org.slf4j.LoggerFactory;
       }
       break;
 
-    case MONITOR_MODE:
-      if (value == null) {
-        unsetMonitorMode();
-      } else {
-        setMonitorMode((MonitorMode)value);
-      }
-      break;
-
     }
   }
 
@@ -493,9 +439,6 @@ import org.slf4j.LoggerFactory;
     case JOB_MANAGER_COMMANDS:
       return getJobManagerCommands();
 
-    case MONITOR_MODE:
-      return getMonitorMode();
-
     }
     throw new IllegalStateException();
   }
@@ -517,8 +460,6 @@ import org.slf4j.LoggerFactory;
       return isSetJobManagerBinPath();
     case JOB_MANAGER_COMMANDS:
       return isSetJobManagerCommands();
-    case MONITOR_MODE:
-      return isSetMonitorMode();
     }
     throw new IllegalStateException();
   }
@@ -581,15 +522,6 @@ import org.slf4j.LoggerFactory;
         return false;
     }
 
-    boolean this_present_monitorMode = true && this.isSetMonitorMode();
-    boolean that_present_monitorMode = true && that.isSetMonitorMode();
-    if (this_present_monitorMode || that_present_monitorMode) {
-      if (!(this_present_monitorMode && that_present_monitorMode))
-        return false;
-      if (!this.monitorMode.equals(that.monitorMode))
-        return false;
-    }
-
     return true;
   }
 
@@ -656,16 +588,6 @@ import org.slf4j.LoggerFactory;
         return lastComparison;
       }
     }
-    lastComparison = Boolean.valueOf(isSetMonitorMode()).compareTo(other.isSetMonitorMode());
-    if (lastComparison != 0) {
-      return lastComparison;
-    }
-    if (isSetMonitorMode()) {
-      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.monitorMode, other.monitorMode);
-      if (lastComparison != 0) {
-        return lastComparison;
-      }
-    }
     return 0;
   }
 
@@ -731,16 +653,6 @@ import org.slf4j.LoggerFactory;
       }
       first = false;
     }
-    if (isSetMonitorMode()) {
-      if (!first) sb.append(", ");
-      sb.append("monitorMode:");
-      if (this.monitorMode == null) {
-        sb.append("null");
-      } else {
-        sb.append(this.monitorMode);
-      }
-      first = false;
-    }
     sb.append(")");
     return sb.toString();
   }
@@ -844,14 +756,6 @@ import org.slf4j.LoggerFactory;
               org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
             }
             break;
-          case 6: // MONITOR_MODE
-            if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
-              struct.monitorMode = MonitorMode.findByValue(iprot.readI32());
-              struct.setMonitorModeIsSet(true);
-            } else { 
-              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
-            }
-            break;
           default:
             org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
         }
@@ -904,13 +808,6 @@ import org.slf4j.LoggerFactory;
           oprot.writeFieldEnd();
         }
       }
-      if (struct.monitorMode != null) {
-        if (struct.isSetMonitorMode()) {
-          oprot.writeFieldBegin(MONITOR_MODE_FIELD_DESC);
-          oprot.writeI32(struct.monitorMode.getValue());
-          oprot.writeFieldEnd();
-        }
-      }
       oprot.writeFieldStop();
       oprot.writeStructEnd();
     }
@@ -940,10 +837,7 @@ import org.slf4j.LoggerFactory;
       if (struct.isSetJobManagerCommands()) {
         optionals.set(2);
       }
-      if (struct.isSetMonitorMode()) {
-        optionals.set(3);
-      }
-      oprot.writeBitSet(optionals, 4);
+      oprot.writeBitSet(optionals, 3);
       if (struct.isSetPushMonitoringEndpoint()) {
         oprot.writeString(struct.pushMonitoringEndpoint);
       }
@@ -960,9 +854,6 @@ import org.slf4j.LoggerFactory;
           }
         }
       }
-      if (struct.isSetMonitorMode()) {
-        oprot.writeI32(struct.monitorMode.getValue());
-      }
     }
 
     @Override
@@ -972,7 +863,7 @@ import org.slf4j.LoggerFactory;
       struct.setResourceJobManagerIdIsSet(true);
       struct.resourceJobManagerType = ResourceJobManagerType.findByValue(iprot.readI32());
       struct.setResourceJobManagerTypeIsSet(true);
-      BitSet incoming = iprot.readBitSet(4);
+      BitSet incoming = iprot.readBitSet(3);
       if (incoming.get(0)) {
         struct.pushMonitoringEndpoint = iprot.readString();
         struct.setPushMonitoringEndpointIsSet(true);
@@ -996,10 +887,6 @@ import org.slf4j.LoggerFactory;
         }
         struct.setJobManagerCommandsIsSet(true);
       }
-      if (incoming.get(3)) {
-        struct.monitorMode = MonitorMode.findByValue(iprot.readI32());
-        struct.setMonitorModeIsSet(true);
-      }
     }
   }
 

http://git-wip-us.apache.org/repos/asf/airavata/blob/e28919c9/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/appcatalog/computeresource/SSHJobSubmission.java
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/appcatalog/computeresource/SSHJobSubmission.java b/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/appcatalog/computeresource/SSHJobSubmission.java
index ef786de..4c19d31 100644
--- a/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/appcatalog/computeresource/SSHJobSubmission.java
+++ b/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/appcatalog/computeresource/SSHJobSubmission.java
@@ -66,6 +66,7 @@ import org.slf4j.LoggerFactory;
   private static final org.apache.thrift.protocol.TField RESOURCE_JOB_MANAGER_FIELD_DESC = new org.apache.thrift.protocol.TField("resourceJobManager", org.apache.thrift.protocol.TType.STRUCT, (short)3);
   private static final org.apache.thrift.protocol.TField ALTERNATIVE_SSHHOST_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("alternativeSSHHostName", org.apache.thrift.protocol.TType.STRING, (short)4);
   private static final org.apache.thrift.protocol.TField SSH_PORT_FIELD_DESC = new org.apache.thrift.protocol.TField("sshPort", org.apache.thrift.protocol.TType.I32, (short)5);
+  private static final org.apache.thrift.protocol.TField MONITOR_MODE_FIELD_DESC = new org.apache.thrift.protocol.TField("monitorMode", org.apache.thrift.protocol.TType.I32, (short)6);
 
   private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
   static {
@@ -78,6 +79,7 @@ import org.slf4j.LoggerFactory;
   private ResourceJobManager resourceJobManager; // required
   private String alternativeSSHHostName; // optional
   private int sshPort; // optional
+  private MonitorMode monitorMode; // optional
 
   /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
   @SuppressWarnings("all") public enum _Fields implements org.apache.thrift.TFieldIdEnum {
@@ -89,7 +91,12 @@ import org.slf4j.LoggerFactory;
     SECURITY_PROTOCOL((short)2, "securityProtocol"),
     RESOURCE_JOB_MANAGER((short)3, "resourceJobManager"),
     ALTERNATIVE_SSHHOST_NAME((short)4, "alternativeSSHHostName"),
-    SSH_PORT((short)5, "sshPort");
+    SSH_PORT((short)5, "sshPort"),
+    /**
+     * 
+     * @see MonitorMode
+     */
+    MONITOR_MODE((short)6, "monitorMode");
 
     private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
 
@@ -114,6 +121,8 @@ import org.slf4j.LoggerFactory;
           return ALTERNATIVE_SSHHOST_NAME;
         case 5: // SSH_PORT
           return SSH_PORT;
+        case 6: // MONITOR_MODE
+          return MONITOR_MODE;
         default:
           return null;
       }
@@ -156,7 +165,7 @@ import org.slf4j.LoggerFactory;
   // isset id assignments
   private static final int __SSHPORT_ISSET_ID = 0;
   private byte __isset_bitfield = 0;
-  private _Fields optionals[] = {_Fields.ALTERNATIVE_SSHHOST_NAME,_Fields.SSH_PORT};
+  private _Fields optionals[] = {_Fields.ALTERNATIVE_SSHHOST_NAME,_Fields.SSH_PORT,_Fields.MONITOR_MODE};
   public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
   static {
     Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
@@ -170,6 +179,8 @@ import org.slf4j.LoggerFactory;
         new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
     tmpMap.put(_Fields.SSH_PORT, new org.apache.thrift.meta_data.FieldMetaData("sshPort", org.apache.thrift.TFieldRequirementType.OPTIONAL, 
         new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
+    tmpMap.put(_Fields.MONITOR_MODE, new org.apache.thrift.meta_data.FieldMetaData("monitorMode", org.apache.thrift.TFieldRequirementType.OPTIONAL, 
+        new org.apache.thrift.meta_data.EnumMetaData(org.apache.thrift.protocol.TType.ENUM, MonitorMode.class)));
     metaDataMap = Collections.unmodifiableMap(tmpMap);
     org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(SSHJobSubmission.class, metaDataMap);
   }
@@ -210,6 +221,9 @@ import org.slf4j.LoggerFactory;
       this.alternativeSSHHostName = other.alternativeSSHHostName;
     }
     this.sshPort = other.sshPort;
+    if (other.isSetMonitorMode()) {
+      this.monitorMode = other.monitorMode;
+    }
   }
 
   public SSHJobSubmission deepCopy() {
@@ -225,6 +239,7 @@ import org.slf4j.LoggerFactory;
     this.alternativeSSHHostName = null;
     this.sshPort = 22;
 
+    this.monitorMode = null;
   }
 
   public String getJobSubmissionInterfaceId() {
@@ -349,6 +364,37 @@ import org.slf4j.LoggerFactory;
     __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __SSHPORT_ISSET_ID, value);
   }
 
+  /**
+   * 
+   * @see MonitorMode
+   */
+  public MonitorMode getMonitorMode() {
+    return this.monitorMode;
+  }
+
+  /**
+   * 
+   * @see MonitorMode
+   */
+  public void setMonitorMode(MonitorMode monitorMode) {
+    this.monitorMode = monitorMode;
+  }
+
+  public void unsetMonitorMode() {
+    this.monitorMode = null;
+  }
+
+  /** Returns true if field monitorMode is set (has been assigned a value) and false otherwise */
+  public boolean isSetMonitorMode() {
+    return this.monitorMode != null;
+  }
+
+  public void setMonitorModeIsSet(boolean value) {
+    if (!value) {
+      this.monitorMode = null;
+    }
+  }
+
   public void setFieldValue(_Fields field, Object value) {
     switch (field) {
     case JOB_SUBMISSION_INTERFACE_ID:
@@ -391,6 +437,14 @@ import org.slf4j.LoggerFactory;
       }
       break;
 
+    case MONITOR_MODE:
+      if (value == null) {
+        unsetMonitorMode();
+      } else {
+        setMonitorMode((MonitorMode)value);
+      }
+      break;
+
     }
   }
 
@@ -411,6 +465,9 @@ import org.slf4j.LoggerFactory;
     case SSH_PORT:
       return Integer.valueOf(getSshPort());
 
+    case MONITOR_MODE:
+      return getMonitorMode();
+
     }
     throw new IllegalStateException();
   }
@@ -432,6 +489,8 @@ import org.slf4j.LoggerFactory;
       return isSetAlternativeSSHHostName();
     case SSH_PORT:
       return isSetSshPort();
+    case MONITOR_MODE:
+      return isSetMonitorMode();
     }
     throw new IllegalStateException();
   }
@@ -494,6 +553,15 @@ import org.slf4j.LoggerFactory;
         return false;
     }
 
+    boolean this_present_monitorMode = true && this.isSetMonitorMode();
+    boolean that_present_monitorMode = true && that.isSetMonitorMode();
+    if (this_present_monitorMode || that_present_monitorMode) {
+      if (!(this_present_monitorMode && that_present_monitorMode))
+        return false;
+      if (!this.monitorMode.equals(that.monitorMode))
+        return false;
+    }
+
     return true;
   }
 
@@ -560,6 +628,16 @@ import org.slf4j.LoggerFactory;
         return lastComparison;
       }
     }
+    lastComparison = Boolean.valueOf(isSetMonitorMode()).compareTo(other.isSetMonitorMode());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetMonitorMode()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.monitorMode, other.monitorMode);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
     return 0;
   }
 
@@ -619,6 +697,16 @@ import org.slf4j.LoggerFactory;
       sb.append(this.sshPort);
       first = false;
     }
+    if (isSetMonitorMode()) {
+      if (!first) sb.append(", ");
+      sb.append("monitorMode:");
+      if (this.monitorMode == null) {
+        sb.append("null");
+      } else {
+        sb.append(this.monitorMode);
+      }
+      first = false;
+    }
     sb.append(")");
     return sb.toString();
   }
@@ -720,6 +808,14 @@ import org.slf4j.LoggerFactory;
               org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
             }
             break;
+          case 6: // MONITOR_MODE
+            if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
+              struct.monitorMode = MonitorMode.findByValue(iprot.readI32());
+              struct.setMonitorModeIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
           default:
             org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
         }
@@ -760,6 +856,13 @@ import org.slf4j.LoggerFactory;
         oprot.writeI32(struct.sshPort);
         oprot.writeFieldEnd();
       }
+      if (struct.monitorMode != null) {
+        if (struct.isSetMonitorMode()) {
+          oprot.writeFieldBegin(MONITOR_MODE_FIELD_DESC);
+          oprot.writeI32(struct.monitorMode.getValue());
+          oprot.writeFieldEnd();
+        }
+      }
       oprot.writeFieldStop();
       oprot.writeStructEnd();
     }
@@ -787,13 +890,19 @@ import org.slf4j.LoggerFactory;
       if (struct.isSetSshPort()) {
         optionals.set(1);
       }
-      oprot.writeBitSet(optionals, 2);
+      if (struct.isSetMonitorMode()) {
+        optionals.set(2);
+      }
+      oprot.writeBitSet(optionals, 3);
       if (struct.isSetAlternativeSSHHostName()) {
         oprot.writeString(struct.alternativeSSHHostName);
       }
       if (struct.isSetSshPort()) {
         oprot.writeI32(struct.sshPort);
       }
+      if (struct.isSetMonitorMode()) {
+        oprot.writeI32(struct.monitorMode.getValue());
+      }
     }
 
     @Override
@@ -806,7 +915,7 @@ import org.slf4j.LoggerFactory;
       struct.resourceJobManager = new ResourceJobManager();
       struct.resourceJobManager.read(iprot);
       struct.setResourceJobManagerIsSet(true);
-      BitSet incoming = iprot.readBitSet(2);
+      BitSet incoming = iprot.readBitSet(3);
       if (incoming.get(0)) {
         struct.alternativeSSHHostName = iprot.readString();
         struct.setAlternativeSSHHostNameIsSet(true);
@@ -815,6 +924,10 @@ import org.slf4j.LoggerFactory;
         struct.sshPort = iprot.readI32();
         struct.setSshPortIsSet(true);
       }
+      if (incoming.get(2)) {
+        struct.monitorMode = MonitorMode.findByValue(iprot.readI32());
+        struct.setMonitorModeIsSet(true);
+      }
     }
   }
 

http://git-wip-us.apache.org/repos/asf/airavata/blob/e28919c9/airavata-api/thrift-interface-descriptions/computeResourceModel.thrift
----------------------------------------------------------------------
diff --git a/airavata-api/thrift-interface-descriptions/computeResourceModel.thrift b/airavata-api/thrift-interface-descriptions/computeResourceModel.thrift
index 80a70df..3d0472d 100644
--- a/airavata-api/thrift-interface-descriptions/computeResourceModel.thrift
+++ b/airavata-api/thrift-interface-descriptions/computeResourceModel.thrift
@@ -82,20 +82,6 @@ enum JobManagerCommand {
     SHOW_START
 }
 
-/**
-* Monitoring modes
-*
-* PUSH:
-* Server will push job status changes.
-*
-* PULL:
-* Need to pull and get the Job status changes.
-*
-**/
-enum MonitorMode {
-   PUSH,
-   PULL
-}
 
 /**
  * Resource Job Manager Information
@@ -119,8 +105,7 @@ struct ResourceJobManager {
     2: required ResourceJobManagerType resourceJobManagerType,
     3: optional string pushMonitoringEndpoint,
     4: optional string jobManagerBinPath,
-    5: optional map<JobManagerCommand, string> jobManagerCommands,
-    6: optional MonitorMode monitorMode
+    5: optional map<JobManagerCommand, string> jobManagerCommands
 }
 
 /**
@@ -206,6 +191,21 @@ enum JobSubmissionProtocol {
 }
 
 /**
+* Monitoring modes
+*
+* POLL_JOB_MANAGER:
+* GFac need to pull job status changes.
+*
+* XSEDE_AMQP_SUBSCRIBE:
+* Server will publish job status changes to amqp servert.
+*
+**/
+enum MonitorMode {
+   POLL_JOB_MANAGER,
+   XSEDE_AMQP_SUBSCRIBE
+}
+
+/**
  * Enumeration of data movement supported by Airavata
  *
  * SCP:
@@ -313,7 +313,8 @@ struct SSHJobSubmission {
     2: required SecurityProtocol securityProtocol,
     3: required ResourceJobManager resourceJobManager,
     4: optional string alternativeSSHHostName,
-    5: optional i32 sshPort = 22
+    5: optional i32 sshPort = 22,
+    6: optional MonitorMode monitorMode
 }
 
 struct GlobusJobSubmission {


[40/50] [abbrv] airavata git commit: Merged master branch

Posted by ch...@apache.org.
Merged master branch


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/a3351b70
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/a3351b70
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/a3351b70

Branch: refs/heads/master
Commit: a3351b7000384a1a28f969fa5d0473d238c1cbcc
Parents: b52499e f7c25ca
Author: shamrath <sh...@gmail.com>
Authored: Thu Nov 6 19:03:13 2014 -0500
Committer: shamrath <sh...@gmail.com>
Committed: Thu Nov 6 19:03:13 2014 -0500

----------------------------------------------------------------------
 airavata-api/airavata-api-server/pom.xml        |    2 +-
 .../airavata/api/server/WorkflowServer.java     |  164 -
 .../server/handler/AiravataServerHandler.java   |  357 +-
 .../server/handler/WorkflowServerHandler.java   |  158 -
 .../AiravataExperimentStatusUpdator.java        |    7 +-
 .../java/org/apache/airavata/api/Workflow.java  | 8191 ------------------
 .../api/client/AiravataClientFactory.java       |   30 +-
 .../apache/airavata/api/workflow/Workflow.java  | 8191 ------------------
 .../api/workflow/workflowAPIConstants.java      |   56 -
 modules/distribution/api-server/pom.xml         |  203 +
 .../src/main/assembly/bin-assembly.xml          |  137 +
 .../src/main/assembly/src-assembly.xml          |   74 +
 .../api-server/src/main/resources/INSTALL       |   55 +
 .../api-server/src/main/resources/LICENSE       | 2387 +++++
 .../api-server/src/main/resources/NOTICE        |  163 +
 .../api-server/src/main/resources/README        |  121 +
 .../src/main/resources/bin/api-server.sh        |  118 +
 .../api-server/src/main/resources/bin/logo.txt  |   34 +
 modules/distribution/gfac-server/pom.xml        |  486 ++
 .../src/main/assembly/bin-assembly.xml          |  305 +
 .../src/main/assembly/src-assembly.xml          |   75 +
 .../gfac-server/src/main/resources/INSTALL      |   55 +
 .../gfac-server/src/main/resources/LICENSE      | 2387 +++++
 .../gfac-server/src/main/resources/NOTICE       |  163 +
 .../gfac-server/src/main/resources/README       |  121 +
 .../src/main/resources/bin/gfac-server.sh       |  118 +
 .../gfac-server/src/main/resources/bin/logo.txt |   34 +
 .../distribution/orchestrator-server/pom.xml    |  325 +
 .../src/main/assembly/bin-assembly.xml          |  230 +
 .../src/main/assembly/src-assembly.xml          |   75 +
 .../src/main/resources/INSTALL                  |   55 +
 .../src/main/resources/LICENSE                  | 2387 +++++
 .../src/main/resources/NOTICE                   |  163 +
 .../src/main/resources/README                   |  121 +
 .../src/main/resources/bin/logo.txt             |   34 +
 .../main/resources/bin/orchestrator-server.sh   |  118 +
 modules/distribution/pom.xml                    |    5 +-
 modules/distribution/server/pom.xml             |   22 +-
 .../server/src/main/assembly/bin-assembly.xml   |    3 +
 modules/gfac/airavata-gfac-service/pom.xml      |    5 +
 modules/gfac/airavata-gfac-stubs/pom.xml        |   60 +
 .../airavata/gfac/client/GFACInstance.java      |   62 +
 .../airavata/gfac/client/GFacClientFactory.java |   42 +
 .../apache/airavata/gfac/cpi/GfacService.java   | 2867 ++++++
 .../gfac/cpi/gfac_cpi_serviceConstants.java     |   55 +
 .../security/TokenizedMyProxyAuthInfo.java      |    2 +-
 .../monitor/impl/pull/qstat/HPCPullMonitor.java |    6 +-
 modules/gfac/pom.xml                            |    1 +
 .../airavata-orchestrator-service/pom.xml       |   10 +
 .../client/OrchestratorClientFactory.java       |   45 -
 .../orchestrator/cpi/OrchestratorService.java   | 4467 ----------
 .../cpi/orchestrator_cpi_serviceConstants.java  |   55 -
 .../server/OrchestratorServerHandler.java       |  180 +-
 .../orchestrator/util/DataModelUtils.java       |   53 +
 .../util/OrchestratorRecoveryHandler.java       |    2 +-
 .../OrchestratorServerThreadPoolExecutor.java   |   35 +
 .../airavata-orchestrator-stubs/pom.xml         |   60 +
 .../client/OrchestratorClientFactory.java       |   44 +
 .../orchestrator/cpi/OrchestratorService.java   | 4568 ++++++++++
 .../cpi/orchestrator_cpi_serviceConstants.java  |   55 +
 .../sample/OrchestratorClientSample.java        |  134 +
 .../orchestrator-client-sdks/pom.xml            |   70 -
 .../client/sample/OrchestratorClientSample.java |  138 -
 modules/orchestrator/orchestrator-core/pom.xml  |    7 +-
 .../core/context/OrchestratorContext.java       |    3 +-
 .../orchestrator/core/gfac/GFACInstance.java    |   62 -
 .../core/gfac/GFacClientFactory.java            |   42 -
 .../core/impl/GFACEmbeddedJobSubmitter.java     |    6 +-
 .../core/impl/GFACServiceJobSubmitter.java      |    6 +-
 .../orchestrator/core/job/JobSubmitter.java     |    3 +-
 .../orchestrator/core/BaseOrchestratorTest.java |    9 -
 .../orchestrator.cpi.service.thrift             |    2 +-
 modules/orchestrator/pom.xml                    |    2 +-
 modules/workflow-model/workflow-engine/pom.xml  |    7 +-
 .../workflow/engine/WorkflowEngineImpl.java     |    5 +-
 modules/xbaya-gui/pom.xml                       |    7 +-
 .../ui/dialogs/registry/RegistryWindow.java     |    4 +-
 .../dialogs/workflow/WorkflowImportWindow.java  |    4 +-
 .../RegistryWorkflowPublisherWindow.java        |    4 +-
 .../WorkflowInterpreterLaunchWindow.java        |    9 +-
 .../apache/airavata/xbaya/util/XBayaUtil.java   |    4 +-
 81 files changed, 18919 insertions(+), 22013 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/a3351b70/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/HPCPullMonitor.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/airavata/blob/a3351b70/modules/gfac/pom.xml
----------------------------------------------------------------------
diff --cc modules/gfac/pom.xml
index dd38b6f,6698d31..34253b9
--- a/modules/gfac/pom.xml
+++ b/modules/gfac/pom.xml
@@@ -35,12 -35,13 +35,13 @@@
                  <module>gfac-ec2</module>
                  <module>gfac-ssh</module>
                  <module>gfac-local</module>
 -                <module>gfac-hadoop</module>
 -                <module>gfac-gram</module>
 +                <!--<module>gfac-hadoop</module>-->
 +                <!--<module>gfac-gram</module>-->
                  <module>gfac-gsissh</module>
 -                <module>gfac-bes</module>
 +                <!--<module>gfac-bes</module>-->
                  <module>gfac-monitor</module>
                  <module>airavata-gfac-service</module>
+                 <module>airavata-gfac-stubs</module>
              </modules>
          </profile>
      </profiles>


[27/50] [abbrv] airavata git commit: Changed job submission and data movement protocol to their enums instead of String which will cause to NPE with null values

Posted by ch...@apache.org.
Changed job submission and data movement protocol to their enums instead of String which will cause to NPE with null values


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/3eea1a30
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/3eea1a30
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/3eea1a30

Branch: refs/heads/master
Commit: 3eea1a3095f735f7db3b37e65f92b653f0f28f03
Parents: ce1354e
Author: shamrath <sh...@gmail.com>
Authored: Wed Nov 5 15:05:37 2014 -0500
Committer: shamrath <sh...@gmail.com>
Committed: Wed Nov 5 15:05:37 2014 -0500

----------------------------------------------------------------------
 .../client/tools/RegisterSampleApplicationsUtils.java        | 7 ++++---
 .../airavata/integration/tools/DocumentCreatorUtils.java     | 8 ++++----
 2 files changed, 8 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/3eea1a30/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/tools/RegisterSampleApplicationsUtils.java
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/tools/RegisterSampleApplicationsUtils.java b/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/tools/RegisterSampleApplicationsUtils.java
index 24c0688..184f3e9 100644
--- a/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/tools/RegisterSampleApplicationsUtils.java
+++ b/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/tools/RegisterSampleApplicationsUtils.java
@@ -41,15 +41,16 @@ public class RegisterSampleApplicationsUtils {
     public static ComputeResourcePreference
         createComputeResourcePreference(String computeResourceId, String allocationProjectNumber,
                                         boolean overridebyAiravata, String preferredBatchQueue,
-                                        String preferredJobSubmissionProtocol,String preferredDataMovementProtocol,
+                                        JobSubmissionProtocol preferredJobSubmissionProtocol,
+                                        DataMovementProtocol preferredDataMovementProtocol,
                                         String scratchLocation) {
         ComputeResourcePreference computeResourcePreference = new ComputeResourcePreference();
         computeResourcePreference.setComputeResourceId(computeResourceId);
         computeResourcePreference.setOverridebyAiravata(overridebyAiravata);
         computeResourcePreference.setAllocationProjectNumber(allocationProjectNumber);
         computeResourcePreference.setPreferredBatchQueue(preferredBatchQueue);
-        computeResourcePreference.setPreferredDataMovementProtocol(DataMovementProtocol.valueOf(preferredDataMovementProtocol));
-        computeResourcePreference.setPreferredJobSubmissionProtocol(JobSubmissionProtocol.valueOf(preferredJobSubmissionProtocol));
+        computeResourcePreference.setPreferredDataMovementProtocol(preferredDataMovementProtocol);
+        computeResourcePreference.setPreferredJobSubmissionProtocol(preferredJobSubmissionProtocol);
         computeResourcePreference.setScratchLocation(scratchLocation);
         return computeResourcePreference;
     }

http://git-wip-us.apache.org/repos/asf/airavata/blob/3eea1a30/modules/integration-tests/src/test/java/org/apache/airavata/integration/tools/DocumentCreatorUtils.java
----------------------------------------------------------------------
diff --git a/modules/integration-tests/src/test/java/org/apache/airavata/integration/tools/DocumentCreatorUtils.java b/modules/integration-tests/src/test/java/org/apache/airavata/integration/tools/DocumentCreatorUtils.java
index 2fc3ac2..a49d22f 100644
--- a/modules/integration-tests/src/test/java/org/apache/airavata/integration/tools/DocumentCreatorUtils.java
+++ b/modules/integration-tests/src/test/java/org/apache/airavata/integration/tools/DocumentCreatorUtils.java
@@ -45,15 +45,15 @@ public class DocumentCreatorUtils {
 
 	public static ComputeResourcePreference createComputeResourcePreference(String computeResourceId, String scratchLocation,
 			String allocationProjectNumber, boolean overridebyAiravata,
-			String preferredBatchQueue, String preferredJobSubmissionProtocol,
-			String preferredDataMovementProtocol) throws AppCatalogException {
+			String preferredBatchQueue, JobSubmissionProtocol preferredJobSubmissionProtocol,
+			DataMovementProtocol preferredDataMovementProtocol) throws AppCatalogException {
 		ComputeResourcePreference computeResourcePreference = new ComputeResourcePreference();
 		computeResourcePreference.setComputeResourceId(computeResourceId);
 		computeResourcePreference.setOverridebyAiravata(overridebyAiravata);
 		computeResourcePreference.setAllocationProjectNumber(allocationProjectNumber);
 		computeResourcePreference.setPreferredBatchQueue(preferredBatchQueue);
-		computeResourcePreference.setPreferredDataMovementProtocol(DataMovementProtocol.valueOf(preferredDataMovementProtocol));
-		computeResourcePreference.setPreferredJobSubmissionProtocol(JobSubmissionProtocol.valueOf(preferredJobSubmissionProtocol));
+		computeResourcePreference.setPreferredDataMovementProtocol(preferredDataMovementProtocol);
+		computeResourcePreference.setPreferredJobSubmissionProtocol(preferredJobSubmissionProtocol);
 		computeResourcePreference.setScratchLocation(scratchLocation);
 		return computeResourcePreference;
 	}


[50/50] [abbrv] airavata git commit: AIRAVATA-1511

Posted by ch...@apache.org.
AIRAVATA-1511


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/c36ab241
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/c36ab241
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/c36ab241

Branch: refs/heads/master
Commit: c36ab24191a5e1d549a745c7456a056e889bd156
Parents: 65ad586
Author: Chathuri Wimalasena <ka...@gmail.com>
Authored: Tue Nov 11 14:08:48 2014 -0500
Committer: Chathuri Wimalasena <ka...@gmail.com>
Committed: Tue Nov 11 14:08:48 2014 -0500

----------------------------------------------------------------------
 .../apache/airavata/client/samples/CreateLaunchExperiment.java | 4 ++--
 .../airavata/gfac/monitor/impl/pull/qstat/HPCPullMonitor.java  | 6 ++----
 2 files changed, 4 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/c36ab241/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperiment.java
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperiment.java b/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperiment.java
index dbb4a0c..2d0f4fa 100644
--- a/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperiment.java
+++ b/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperiment.java
@@ -58,7 +58,7 @@ public class CreateLaunchExperiment {
     private static final String DEFAULT_GATEWAY = "default.registry.gateway";
     private static Airavata.Client airavataClient;
 
-    private static String echoAppId = "Echo_636b4530-6fb2-4c9e-998a-b41e648aa70f";
+    private static String echoAppId = "Echo_7b3cfedd-efe4-4a6f-8201-9ee1891d8ced";
     private static String wrfAppId = "WRF_d41bdc86-e280-4eb6-a045-708f69a8c116";
     private static String amberAppId = "Amber_b23ee051-90d6-4892-827e-622a2f6c95ee";
 
@@ -75,7 +75,7 @@ public class CreateLaunchExperiment {
     public static void main(String[] args) throws Exception {
                 airavataClient = AiravataClientFactory.createAiravataClient(THRIFT_SERVER_HOST, THRIFT_SERVER_PORT);
                 System.out.println("API version is " + airavataClient.getAPIVersion());
-//                registerApplications(); // run this only the first time
+//                regist?erApplications(); // run this only the first time
                 createAndLaunchExp();
     }
     

http://git-wip-us.apache.org/repos/asf/airavata/blob/c36ab241/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/HPCPullMonitor.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/HPCPullMonitor.java b/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/HPCPullMonitor.java
index 171ca07..824ab71 100644
--- a/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/HPCPullMonitor.java
+++ b/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/HPCPullMonitor.java
@@ -250,8 +250,7 @@ public class HPCPullMonitor extends PullMonitor {
                         MonitorID iMonitorID = iterator.next();
                         if (iMonitorID.getFailedCount() > FAILED_COUNT) {
                             iMonitorID.setLastMonitored(new Timestamp((new Date()).getTime()));
-                            String outputDir = iMonitorID.getJobExecutionContext().getApplicationContext()
-                                    .getApplicationDeploymentDescription().getType().getOutputDataDirectory();
+                            String outputDir = iMonitorID.getJobExecutionContext().getOutputDir();
                             List<String> stdOut = null;
                             try {
                                 stdOut = connection.getCluster().listDirectory(outputDir); // check the outputs directory
@@ -299,8 +298,7 @@ public class HPCPullMonitor extends PullMonitor {
                 HostMonitorData iHostMonitorID = iterator1.next();
                 if (iHostMonitorID.getMonitorIDs().size() == 0) {
                     iterator1.remove();
-                    logger.debug("Removed host {} from monitoring queue", iHostMonitorID.getHost()
-                            .getType().getHostAddress());
+                    logger.debug("Removed host {} from monitoring queue", iHostMonitorID.getComputeResourceDescription().getHostName());
                 }
             }
             if(take.getHostMonitorData().size()!=0) {


[30/50] [abbrv] airavata git commit: Fixed NPE of security context and replaced ActualParameter objects with DataObjectType obejects

Posted by ch...@apache.org.
Fixed NPE of security context and replaced ActualParameter objects  with DataObjectType obejects


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/95354eab
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/95354eab
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/95354eab

Branch: refs/heads/master
Commit: 95354eab81fdfe4f2d17c5c7f8f7a2deaf9a2b21
Parents: 38bc592
Author: shamrath <sh...@gmail.com>
Authored: Thu Nov 6 15:04:21 2014 -0500
Committer: shamrath <sh...@gmail.com>
Committed: Thu Nov 6 15:04:21 2014 -0500

----------------------------------------------------------------------
 .../gfac/core/context/JobExecutionContext.java  |  2 +-
 .../gfac/gsissh/handler/GSISSHInputHandler.java | 66 ++++++++++----------
 2 files changed, 33 insertions(+), 35 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/95354eab/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
index a9d1bb4..ff764a0 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
@@ -279,7 +279,7 @@ public class JobExecutionContext extends AbstractContext implements Serializable
     }
 
 	public SecurityContext getSecurityContext(String name) throws GFacException{
-		SecurityContext secContext = securityContext.get(name+"-"+this.getHostName());
+		SecurityContext secContext = securityContext.get(name);
 		return secContext;
 	}
 

http://git-wip-us.apache.org/repos/asf/airavata/blob/95354eab/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHInputHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHInputHandler.java b/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHInputHandler.java
index b882be6..d9e669d 100644
--- a/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHInputHandler.java
+++ b/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHInputHandler.java
@@ -21,9 +21,6 @@
 package org.apache.airavata.gfac.gsissh.handler;
 
 import org.apache.airavata.common.exception.ApplicationSettingsException;
-import org.apache.airavata.common.utils.StringUtil;
-import org.apache.airavata.commons.gfac.type.ActualParameter;
-import org.apache.airavata.commons.gfac.type.MappingFactory;
 import org.apache.airavata.gfac.GFacException;
 import org.apache.airavata.gfac.core.context.JobExecutionContext;
 import org.apache.airavata.gfac.core.context.MessageContext;
@@ -34,13 +31,13 @@ import org.apache.airavata.gfac.gsissh.security.GSISecurityContext;
 import org.apache.airavata.gfac.gsissh.util.GFACGSISSHUtils;
 import org.apache.airavata.gsi.ssh.api.Cluster;
 import org.apache.airavata.model.workspace.experiment.CorrectiveAction;
+import org.apache.airavata.model.workspace.experiment.DataObjectType;
 import org.apache.airavata.model.workspace.experiment.DataTransferDetails;
+import org.apache.airavata.model.workspace.experiment.DataType;
 import org.apache.airavata.model.workspace.experiment.ErrorCategory;
 import org.apache.airavata.model.workspace.experiment.TransferState;
 import org.apache.airavata.model.workspace.experiment.TransferStatus;
 import org.apache.airavata.registry.cpi.ChildDataType;
-import org.apache.airavata.schemas.gfac.URIArrayType;
-import org.apache.airavata.schemas.gfac.URIParameterType;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -116,48 +113,49 @@ public class GSISSHInputHandler extends AbstractRecoverableHandler {
             MessageContext input = jobExecutionContext.getInMessageContext();
             Set<String> parameters = input.getParameters().keySet();
             for (String paramName : parameters) {
-                ActualParameter actualParameter = (ActualParameter) input.getParameters().get(paramName);
-                String paramValue = MappingFactory.toString(actualParameter);
+                DataObjectType inputParamType = (DataObjectType) input.getParameters().get(paramName);
+                String paramValue = inputParamType.getKey();
                 //TODO: Review this with type
-                if ("URI".equals(actualParameter.getType().getType().toString())) {
+                if (inputParamType.getType() == DataType.URI) {
                     if (index < oldIndex) {
                         log.info("Input File: " + paramValue + " is already transfered, so we skip this operation !!!");
-                        ((URIParameterType) actualParameter.getType()).setValue(oldFiles.get(index));
+                        inputParamType.setValue(oldFiles.get(index));
                         data.append(oldFiles.get(index++)).append(","); // we get already transfered file and increment the index
                     } else {
                         String stageInputFile = stageInputFiles(cluster, jobExecutionContext, paramValue);
-                        ((URIParameterType) actualParameter.getType()).setValue(stageInputFile);
+                        inputParamType.setValue(stageInputFile);
                         StringBuffer temp = new StringBuffer(data.append(stageInputFile).append(",").toString());
                         status.setTransferState(TransferState.UPLOAD);
                         detail.setTransferStatus(status);
                         detail.setTransferDescription("Input Data Staged: " + stageInputFile);
                         registry.add(ChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
-                
-                        GFacUtils.savePluginData(jobExecutionContext, temp.insert(0, ++index), this.getClass().getName());
-                    }
-                } else if ("URIArray".equals(actualParameter.getType().getType().toString())) {
-                    List<String> split = Arrays.asList(StringUtil.getElementsFromString(paramValue));
-                    List<String> newFiles = new ArrayList<String>();
-                    for (String paramValueEach : split) {
-                        if (index < oldIndex) {
-                            log.info("Input File: " + paramValue + " is already transfered, so we skip this operation !!!");
-                            newFiles.add(oldFiles.get(index));
-                            data.append(oldFiles.get(index++)).append(",");
-                        } else {
-                            String stageInputFiles = stageInputFiles(cluster, jobExecutionContext, paramValueEach);
-                            status.setTransferState(TransferState.UPLOAD);
-                            detail.setTransferStatus(status);
-                            detail.setTransferDescription("Input Data Staged: " + stageInputFiles);
-                            registry.add(ChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
-                            StringBuffer temp = new StringBuffer(data.append(stageInputFiles).append(",").toString());
-                            GFacUtils.savePluginData(jobExecutionContext, temp.insert(0, ++index), this.getClass().getName());
-                            newFiles.add(stageInputFiles);
-                        }
 
+                        GFacUtils.savePluginData(jobExecutionContext, temp.insert(0, ++index), this.getClass().getName());
                     }
-                    ((URIArrayType) actualParameter.getType()).setValueArray(newFiles.toArray(new String[newFiles.size()]));
-                }
-                inputNew.getParameters().put(paramName, actualParameter);
+                } // FIXME: what is the thrift model DataType equivalent for URIArray type?
+//                else if ("URIArray".equals(inputParamType.getType().getType().toString())) {
+//                    List<String> split = Arrays.asList(StringUtil.getElementsFromString(paramValue));
+//                    List<String> newFiles = new ArrayList<String>();
+//                    for (String paramValueEach : split) {
+//                        if (index < oldIndex) {
+//                            log.info("Input File: " + paramValue + " is already transfered, so we skip this operation !!!");
+//                            newFiles.add(oldFiles.get(index));
+//                            data.append(oldFiles.get(index++)).append(",");
+//                        } else {
+//                            String stageInputFiles = stageInputFiles(cluster, jobExecutionContext, paramValueEach);
+//                            status.setTransferState(TransferState.UPLOAD);
+//                            detail.setTransferStatus(status);
+//                            detail.setTransferDescription("Input Data Staged: " + stageInputFiles);
+//                            registry.add(ChildDataType.DATA_TRANSFER_DETAIL, detail, jobExecutionContext.getTaskData().getTaskID());
+//                            StringBuffer temp = new StringBuffer(data.append(stageInputFiles).append(",").toString());
+//                            GFacUtils.savePluginData(jobExecutionContext, temp.insert(0, ++index), this.getClass().getName());
+//                            newFiles.add(stageInputFiles);
+//                        }
+//
+//                    }
+//                    ((URIArrayType) inputParamType.getType()).setValueArray(newFiles.toArray(new String[newFiles.size()]));
+//                }
+                inputNew.getParameters().put(paramName, inputParamType);
             }
         } catch (Exception e) {
 			log.error(e.getMessage());


[31/50] [abbrv] airavata git commit: Removed DataObjectType thift struct from experimentModel and replace it with Input and Output DataObjectType define in applicationInterfaceModel thrift file

Posted by ch...@apache.org.
http://git-wip-us.apache.org/repos/asf/airavata/blob/198de990/airavata-api/airavata-client-sdks/airavata-php-sdk/src/main/resources/lib/Airavata/Model/Workspace/Experiment/Types.php
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-php-sdk/src/main/resources/lib/Airavata/Model/Workspace/Experiment/Types.php b/airavata-api/airavata-client-sdks/airavata-php-sdk/src/main/resources/lib/Airavata/Model/Workspace/Experiment/Types.php
index 19a8b37..c6037f7 100644
--- a/airavata-api/airavata-client-sdks/airavata-php-sdk/src/main/resources/lib/Airavata/Model/Workspace/Experiment/Types.php
+++ b/airavata-api/airavata-client-sdks/airavata-php-sdk/src/main/resources/lib/Airavata/Model/Workspace/Experiment/Types.php
@@ -203,21 +203,6 @@ final class CorrectiveAction {
   );
 }
 
-final class DataType {
-  const STRING = 0;
-  const INTEGER = 1;
-  const URI = 2;
-  const STDOUT = 3;
-  const STDERR = 4;
-  static public $__names = array(
-    0 => 'STRING',
-    1 => 'INTEGER',
-    2 => 'URI',
-    3 => 'STDOUT',
-    4 => 'STDERR',
-  );
-}
-
 final class ExecutionUnit {
   const INPUT = 0;
   const APPLICATION = 1;
@@ -783,138 +768,6 @@ class ApplicationStatus {
 
 }
 
-class DataObjectType {
-  static $_TSPEC;
-
-  public $key = null;
-  public $value = null;
-  public $type = null;
-  public $metaData = null;
-
-  public function __construct($vals=null) {
-    if (!isset(self::$_TSPEC)) {
-      self::$_TSPEC = array(
-        1 => array(
-          'var' => 'key',
-          'type' => TType::STRING,
-          ),
-        2 => array(
-          'var' => 'value',
-          'type' => TType::STRING,
-          ),
-        3 => array(
-          'var' => 'type',
-          'type' => TType::I32,
-          ),
-        4 => array(
-          'var' => 'metaData',
-          'type' => TType::STRING,
-          ),
-        );
-    }
-    if (is_array($vals)) {
-      if (isset($vals['key'])) {
-        $this->key = $vals['key'];
-      }
-      if (isset($vals['value'])) {
-        $this->value = $vals['value'];
-      }
-      if (isset($vals['type'])) {
-        $this->type = $vals['type'];
-      }
-      if (isset($vals['metaData'])) {
-        $this->metaData = $vals['metaData'];
-      }
-    }
-  }
-
-  public function getName() {
-    return 'DataObjectType';
-  }
-
-  public function read($input)
-  {
-    $xfer = 0;
-    $fname = null;
-    $ftype = 0;
-    $fid = 0;
-    $xfer += $input->readStructBegin($fname);
-    while (true)
-    {
-      $xfer += $input->readFieldBegin($fname, $ftype, $fid);
-      if ($ftype == TType::STOP) {
-        break;
-      }
-      switch ($fid)
-      {
-        case 1:
-          if ($ftype == TType::STRING) {
-            $xfer += $input->readString($this->key);
-          } else {
-            $xfer += $input->skip($ftype);
-          }
-          break;
-        case 2:
-          if ($ftype == TType::STRING) {
-            $xfer += $input->readString($this->value);
-          } else {
-            $xfer += $input->skip($ftype);
-          }
-          break;
-        case 3:
-          if ($ftype == TType::I32) {
-            $xfer += $input->readI32($this->type);
-          } else {
-            $xfer += $input->skip($ftype);
-          }
-          break;
-        case 4:
-          if ($ftype == TType::STRING) {
-            $xfer += $input->readString($this->metaData);
-          } else {
-            $xfer += $input->skip($ftype);
-          }
-          break;
-        default:
-          $xfer += $input->skip($ftype);
-          break;
-      }
-      $xfer += $input->readFieldEnd();
-    }
-    $xfer += $input->readStructEnd();
-    return $xfer;
-  }
-
-  public function write($output) {
-    $xfer = 0;
-    $xfer += $output->writeStructBegin('DataObjectType');
-    if ($this->key !== null) {
-      $xfer += $output->writeFieldBegin('key', TType::STRING, 1);
-      $xfer += $output->writeString($this->key);
-      $xfer += $output->writeFieldEnd();
-    }
-    if ($this->value !== null) {
-      $xfer += $output->writeFieldBegin('value', TType::STRING, 2);
-      $xfer += $output->writeString($this->value);
-      $xfer += $output->writeFieldEnd();
-    }
-    if ($this->type !== null) {
-      $xfer += $output->writeFieldBegin('type', TType::I32, 3);
-      $xfer += $output->writeI32($this->type);
-      $xfer += $output->writeFieldEnd();
-    }
-    if ($this->metaData !== null) {
-      $xfer += $output->writeFieldBegin('metaData', TType::STRING, 4);
-      $xfer += $output->writeString($this->metaData);
-      $xfer += $output->writeFieldEnd();
-    }
-    $xfer += $output->writeFieldStop();
-    $xfer += $output->writeStructEnd();
-    return $xfer;
-  }
-
-}
-
 class ComputationalResourceScheduling {
   static $_TSPEC;
 
@@ -2427,7 +2280,7 @@ class TaskDetails {
           'etype' => TType::STRUCT,
           'elem' => array(
             'type' => TType::STRUCT,
-            'class' => '\Airavata\Model\Workspace\Experiment\DataObjectType',
+            'class' => '\Airavata\Model\AppCatalog\AppInterface\InputDataObjectType',
             ),
           ),
         7 => array(
@@ -2436,7 +2289,7 @@ class TaskDetails {
           'etype' => TType::STRUCT,
           'elem' => array(
             'type' => TType::STRUCT,
-            'class' => '\Airavata\Model\Workspace\Experiment\DataObjectType',
+            'class' => '\Airavata\Model\AppCatalog\AppInterface\OutputDataObjectType',
             ),
           ),
         8 => array(
@@ -2597,7 +2450,7 @@ class TaskDetails {
             for ($_i18 = 0; $_i18 < $_size14; ++$_i18)
             {
               $elem19 = null;
-              $elem19 = new \Airavata\Model\Workspace\Experiment\DataObjectType();
+              $elem19 = new \Airavata\Model\AppCatalog\AppInterface\InputDataObjectType();
               $xfer += $elem19->read($input);
               $this->applicationInputs []= $elem19;
             }
@@ -2615,7 +2468,7 @@ class TaskDetails {
             for ($_i24 = 0; $_i24 < $_size20; ++$_i24)
             {
               $elem25 = null;
-              $elem25 = new \Airavata\Model\Workspace\Experiment\DataObjectType();
+              $elem25 = new \Airavata\Model\AppCatalog\AppInterface\OutputDataObjectType();
               $xfer += $elem25->read($input);
               $this->applicationOutputs []= $elem25;
             }
@@ -2915,7 +2768,7 @@ class WorkflowNodeDetails {
           'etype' => TType::STRUCT,
           'elem' => array(
             'type' => TType::STRUCT,
-            'class' => '\Airavata\Model\Workspace\Experiment\DataObjectType',
+            'class' => '\Airavata\Model\AppCatalog\AppInterface\InputDataObjectType',
             ),
           ),
         7 => array(
@@ -2924,7 +2777,7 @@ class WorkflowNodeDetails {
           'etype' => TType::STRUCT,
           'elem' => array(
             'type' => TType::STRUCT,
-            'class' => '\Airavata\Model\Workspace\Experiment\DataObjectType',
+            'class' => '\Airavata\Model\AppCatalog\AppInterface\OutputDataObjectType',
             ),
           ),
         8 => array(
@@ -3049,7 +2902,7 @@ class WorkflowNodeDetails {
             for ($_i53 = 0; $_i53 < $_size49; ++$_i53)
             {
               $elem54 = null;
-              $elem54 = new \Airavata\Model\Workspace\Experiment\DataObjectType();
+              $elem54 = new \Airavata\Model\AppCatalog\AppInterface\InputDataObjectType();
               $xfer += $elem54->read($input);
               $this->nodeInputs []= $elem54;
             }
@@ -3067,7 +2920,7 @@ class WorkflowNodeDetails {
             for ($_i59 = 0; $_i59 < $_size55; ++$_i59)
             {
               $elem60 = null;
-              $elem60 = new \Airavata\Model\Workspace\Experiment\DataObjectType();
+              $elem60 = new \Airavata\Model\AppCatalog\AppInterface\OutputDataObjectType();
               $xfer += $elem60->read($input);
               $this->nodeOutputs []= $elem60;
             }
@@ -3533,7 +3386,7 @@ class Experiment {
           'etype' => TType::STRUCT,
           'elem' => array(
             'type' => TType::STRUCT,
-            'class' => '\Airavata\Model\Workspace\Experiment\DataObjectType',
+            'class' => '\Airavata\Model\AppCatalog\AppInterface\InputDataObjectType',
             ),
           ),
         14 => array(
@@ -3542,7 +3395,7 @@ class Experiment {
           'etype' => TType::STRUCT,
           'elem' => array(
             'type' => TType::STRUCT,
-            'class' => '\Airavata\Model\Workspace\Experiment\DataObjectType',
+            'class' => '\Airavata\Model\AppCatalog\AppInterface\OutputDataObjectType',
             ),
           ),
         15 => array(
@@ -3750,7 +3603,7 @@ class Experiment {
             for ($_i88 = 0; $_i88 < $_size84; ++$_i88)
             {
               $elem89 = null;
-              $elem89 = new \Airavata\Model\Workspace\Experiment\DataObjectType();
+              $elem89 = new \Airavata\Model\AppCatalog\AppInterface\InputDataObjectType();
               $xfer += $elem89->read($input);
               $this->experimentInputs []= $elem89;
             }
@@ -3768,7 +3621,7 @@ class Experiment {
             for ($_i94 = 0; $_i94 < $_size90; ++$_i94)
             {
               $elem95 = null;
-              $elem95 = new \Airavata\Model\Workspace\Experiment\DataObjectType();
+              $elem95 = new \Airavata\Model\AppCatalog\AppInterface\OutputDataObjectType();
               $xfer += $elem95->read($input);
               $this->experimentOutputs []= $elem95;
             }

http://git-wip-us.apache.org/repos/asf/airavata/blob/198de990/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperiment.java
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperiment.java b/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperiment.java
index a96cba7..daaf4cc 100644
--- a/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperiment.java
+++ b/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperiment.java
@@ -53,7 +53,7 @@ public class CreateLaunchExperiment {
     private static final String DEFAULT_GATEWAY = "default.registry.gateway";
     private static Airavata.Client airavataClient;
 
-    private static String echoAppId = "Echo_b6782be4-315b-4cbd-9403-aa7ce564548a";
+    private static String echoAppId = "Echo_37c26231-8784-4a40-b184-ae00f6330113";
     private static String wrfAppId = "WRF_5f097c9c-7066-49ec-aed7-4e39607b3adc";
     private static String amberAppId = "Amber_89906be6-5678-49a6-9d04-a0604fbdef2e";
 

http://git-wip-us.apache.org/repos/asf/airavata/blob/198de990/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperimentForLocalhost.java
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperimentForLocalhost.java b/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperimentForLocalhost.java
new file mode 100644
index 0000000..dcd684c
--- /dev/null
+++ b/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperimentForLocalhost.java
@@ -0,0 +1,269 @@
+package org.apache.airavata.client.samples;
+
+import org.apache.airavata.api.Airavata;
+import org.apache.airavata.api.client.AiravataClientFactory;
+import org.apache.airavata.client.tools.RegisterSampleApplicationsUtils;
+import org.apache.airavata.model.appcatalog.appdeployment.ApplicationParallelismType;
+import org.apache.airavata.model.appcatalog.appinterface.DataType;
+import org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType;
+import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
+import org.apache.airavata.model.appcatalog.computeresource.ComputeResourceDescription;
+import org.apache.airavata.model.appcatalog.computeresource.LOCALSubmission;
+import org.apache.airavata.model.appcatalog.computeresource.ResourceJobManager;
+import org.apache.airavata.model.appcatalog.computeresource.ResourceJobManagerType;
+import org.apache.airavata.model.appcatalog.gatewayprofile.ComputeResourcePreference;
+import org.apache.airavata.model.appcatalog.gatewayprofile.GatewayResourceProfile;
+import org.apache.airavata.model.error.AiravataClientConnectException;
+import org.apache.airavata.model.error.AiravataClientException;
+import org.apache.thrift.TException;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Created by shameera on 9/30/14.
+ */
+public class CreateLaunchExperimentForLocalhost {
+
+    private static final String THRIFT_SERVER_HOST = "127.0.0.1";
+    private static final int THRIFT_SERVER_PORT = 8930;
+    private static final String DEFAULT_GATEWAY = "Sample";
+
+    private Airavata.Client airavataClient;
+    private String localhostId;
+    private String echoModuleId;
+    private String addModuleId;
+    private String multiplyModuleId;
+    private String subtractModuleId;
+
+    public static void main(String[] args) throws AiravataClientConnectException, TException {
+        CreateLaunchExperimentForLocalhost worker = new CreateLaunchExperimentForLocalhost();
+        worker.register();
+    }
+
+
+    public void register() throws AiravataClientConnectException, TException {
+        airavataClient = AiravataClientFactory.createAiravataClient(THRIFT_SERVER_HOST, THRIFT_SERVER_PORT);
+
+        registerLocalhost();
+//        registerGatewayProfile();
+        registerApplicationModules();
+        registerApplicationDeployments();
+        registerApplicationInterfaces();
+    }
+
+    private void registerGatewayProfile() throws TException {
+        ComputeResourcePreference localhostResourcePreference = RegisterSampleApplicationsUtils.
+             createComputeResourcePreference("localhost", "test", false, null, null, null,
+                "/Users/shameera/work/source/git_airavata/modules/distribution/server/target/apache-airavata-server-0.14-SNAPSHOT/tmp");
+        GatewayResourceProfile gatewayResourceProfile = new GatewayResourceProfile();
+        gatewayResourceProfile.setGatewayID(DEFAULT_GATEWAY);
+        gatewayResourceProfile.setGatewayName(DEFAULT_GATEWAY);
+        gatewayResourceProfile.addToComputeResourcePreferences(localhostResourcePreference);
+        airavataClient.registerGatewayResourceProfile(gatewayResourceProfile);
+    }
+
+    private void registerLocalhost() {
+//        try {
+//            System.out.println("\n #### Registering Localhost Computational Resource #### \n");
+//
+//            ComputeResourceDescription computeResourceDescription = RegisterSampleApplicationsUtils.
+//                    createComputeResourceDescription("localhost", "LocalHost", null, null);
+//            localhostId = airavataClient.registerComputeResource(computeResourceDescription);
+//            ResourceJobManager resourceJobManager = RegisterSampleApplicationsUtils.
+//                    createResourceJobManager(ResourceJobManagerType.FORK, null, null, null);
+//            LOCALSubmission submission = new LOCALSubmission();
+//            submission.setResourceJobManager(resourceJobManager);
+//            boolean localSubmission = airavataClient.addLocalSubmissionDetails(localhostId, 1, submission);
+//            if (!localSubmission) throw new AiravataClientException();
+//            System.out.println("LocalHost Resource Id is " + localhostId);
+//
+//        } catch (TException e) {
+//            e.printStackTrace();
+//        }
+    }
+
+    private void registerApplicationInterfaces() {
+         registerAddApplicationInterface();
+        registerSubtractApplicationInterface();
+        registerMultiplyApplicationInterface();
+        registerEchoInterface();
+    }
+
+    private void registerApplicationDeployments() throws TException {
+        System.out.println("#### Registering Application Deployments on Localhost #### \n");
+        //Register Echo
+        String echoAppDeployId = airavataClient.registerApplicationDeployment(
+                RegisterSampleApplicationsUtils.createApplicationDeployment(echoModuleId, localhostId,
+                        "/Users/shameera/work/tryout/scripts/echo.sh", ApplicationParallelismType.SERIAL, "Echo application description"));
+        System.out.println("Echo on localhost Id " + echoAppDeployId);
+
+        //Register Add application
+        String addAppDeployId = airavataClient.registerApplicationDeployment(
+                RegisterSampleApplicationsUtils.createApplicationDeployment(addModuleId, localhostId,
+                        "/Users/shameera/work/tryout/scripts/add.sh", ApplicationParallelismType.SERIAL, "Add application description"));
+        System.out.println("Add on localhost Id " + addAppDeployId);
+
+        //Register Multiply application
+        String multiplyAppDeployId = airavataClient.registerApplicationDeployment(
+                RegisterSampleApplicationsUtils.createApplicationDeployment(multiplyModuleId, localhostId,
+                        "/Users/shameera/work/tryout/scripts/multiply.sh", ApplicationParallelismType.SERIAL, "Multiply application description"));
+        System.out.println("Echo on localhost Id " + multiplyAppDeployId);
+
+        //Register Subtract application
+        String subtractAppDeployId = airavataClient.registerApplicationDeployment(
+                RegisterSampleApplicationsUtils.createApplicationDeployment(subtractModuleId, localhostId,
+                        "/Users/shameera/work/tryout/scripts/subtract.sh", ApplicationParallelismType.SERIAL, "Subtract application description "));
+        System.out.println("Echo on localhost Id " + subtractAppDeployId);
+    }
+
+    private void registerApplicationModules() throws TException {
+        //Register Echo
+        echoModuleId = airavataClient.registerApplicationModule(
+                RegisterSampleApplicationsUtils.createApplicationModule(
+                        "Echo", "1.0", "Echo application description"));
+        System.out.println("Echo Module Id " + echoModuleId);
+        //Register Echo
+        addModuleId = airavataClient.registerApplicationModule(
+                RegisterSampleApplicationsUtils.createApplicationModule(
+                        "Add", "1.0", "Add application description"));
+        System.out.println("Add Module Id " + addModuleId);
+        //Register Echo
+        multiplyModuleId = airavataClient.registerApplicationModule(
+                RegisterSampleApplicationsUtils.createApplicationModule(
+                        "Multiply", "1.0", "Multiply application description"));
+        System.out.println("Multiply Module Id " + multiplyModuleId);
+        //Register Echo
+        subtractModuleId = airavataClient.registerApplicationModule(
+                RegisterSampleApplicationsUtils.createApplicationModule(
+                        "Subtract", "1.0", "Subtract application description"));
+        System.out.println("Subtract Module Id " + subtractModuleId);
+
+    }
+
+
+    public void registerEchoInterface() {
+        try {
+            System.out.println("#### Registering Echo Interface #### \n");
+
+            List<String> appModules = new ArrayList<String>();
+            appModules.add(echoModuleId);
+
+            InputDataObjectType input1 = RegisterSampleApplicationsUtils.createAppInput("Input_to_Echo", "Hello World",
+                    DataType.STRING, null, false, "A test string to Echo", null);
+
+            List<InputDataObjectType> applicationInputs = new ArrayList<InputDataObjectType>();
+            applicationInputs.add(input1);
+
+            OutputDataObjectType output1 = RegisterSampleApplicationsUtils.createAppOutput("Echoed_Output",
+                    "", DataType.STRING);
+
+            List<OutputDataObjectType> applicationOutputs = new ArrayList<OutputDataObjectType>();
+            applicationOutputs.add(output1);
+
+            String echoInterfaceId = airavataClient.registerApplicationInterface(
+                    RegisterSampleApplicationsUtils.createApplicationInterfaceDescription("Echo", "Echo application description",
+                            appModules, applicationInputs, applicationOutputs));
+            System.out.println("Echo Application Interface Id " + echoInterfaceId);
+
+        } catch (TException e) {
+            e.printStackTrace();
+        }
+    }
+
+    public void registerAddApplicationInterface() {
+        try {
+            System.out.println("#### Registering Add Application Interface #### \n");
+
+            List<String> appModules = new ArrayList<String>();
+            appModules.add(addModuleId);
+
+            InputDataObjectType input1 = RegisterSampleApplicationsUtils.createAppInput("x", "2",
+                    DataType.STRING, null, false, "Add operation input_1", null);
+            InputDataObjectType input2 = RegisterSampleApplicationsUtils.createAppInput("y", "3",
+                    DataType.STRING, null, false, "Add operation input_2", null);
+
+            List<InputDataObjectType> applicationInputs = new ArrayList<InputDataObjectType>();
+            applicationInputs.add(input1);
+            applicationInputs.add(input2);
+
+            OutputDataObjectType output1 = RegisterSampleApplicationsUtils.createAppOutput("Result",
+                    "0", DataType.STRING);
+
+            List<OutputDataObjectType> applicationOutputs = new ArrayList<OutputDataObjectType>();
+            applicationOutputs.add(output1);
+
+            String addApplicationInterfaceId = airavataClient.registerApplicationInterface(
+                    RegisterSampleApplicationsUtils.createApplicationInterfaceDescription("Add", "Add two numbers",
+                            appModules, applicationInputs, applicationOutputs));
+            System.out.println("Add Application Interface Id " + addApplicationInterfaceId);
+
+        } catch (TException e) {
+            e.printStackTrace();
+        }
+    }
+
+    public void registerMultiplyApplicationInterface() {
+        try {
+            System.out.println("#### Registering Multiply Application Interface #### \n");
+
+            List<String> appModules = new ArrayList<String>();
+            appModules.add(multiplyModuleId);
+
+            InputDataObjectType input1 = RegisterSampleApplicationsUtils.createAppInput("x", "4",
+                    DataType.STRING, null, false, "Multiply operation input_1", null);
+            InputDataObjectType input2 = RegisterSampleApplicationsUtils.createAppInput("y", "5",
+                    DataType.STRING, null, false, "Multiply operation input_2", null);
+
+            List<InputDataObjectType> applicationInputs = new ArrayList<InputDataObjectType>();
+            applicationInputs.add(input1);
+            applicationInputs.add(input2);
+
+            OutputDataObjectType output1 = RegisterSampleApplicationsUtils.createAppOutput("Result",
+                    "0", DataType.STRING);
+
+            List<OutputDataObjectType> applicationOutputs = new ArrayList<OutputDataObjectType>();
+            applicationOutputs.add(output1);
+
+            String multiplyApplicationInterfaceId = airavataClient.registerApplicationInterface(
+                    RegisterSampleApplicationsUtils.createApplicationInterfaceDescription("Multiply", "Multiply two numbers",
+                            appModules, applicationInputs, applicationOutputs));
+            System.out.println("Multiply Application Interface Id " + multiplyApplicationInterfaceId);
+
+        } catch (TException e) {
+            e.printStackTrace();
+        }
+    }
+
+    public void registerSubtractApplicationInterface() {
+        try {
+            System.out.println("#### Registering Subtract Application Interface #### \n");
+
+            List<String> appModules = new ArrayList<String>();
+            appModules.add(subtractModuleId);
+
+            InputDataObjectType input1 = RegisterSampleApplicationsUtils.createAppInput("x", "6",
+                    DataType.STRING, null, false, "Subtract operation input_1", null);
+            InputDataObjectType input2 = RegisterSampleApplicationsUtils.createAppInput("y", "7",
+                    DataType.STRING, null, false, "Subtract operation input_2", null);
+
+            List<InputDataObjectType> applicationInputs = new ArrayList<InputDataObjectType>();
+            applicationInputs.add(input1);
+            applicationInputs.add(input2);
+
+            OutputDataObjectType output1 = RegisterSampleApplicationsUtils.createAppOutput("Result",
+                    "0", DataType.STRING);
+
+            List<OutputDataObjectType> applicationOutputs = new ArrayList<OutputDataObjectType>();
+            applicationOutputs.add(output1);
+
+            String subtractApplicationInterfaceId = airavataClient.registerApplicationInterface(
+                    RegisterSampleApplicationsUtils.createApplicationInterfaceDescription("Subtract", "Subtract two numbers",
+                            appModules, applicationInputs, applicationOutputs));
+            System.out.println("Subtract Application Interface Id " + subtractApplicationInterfaceId);
+
+        } catch (TException e) {
+            e.printStackTrace();
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/airavata/blob/198de990/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/messaging/event/TaskOutputChangeEvent.java
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/messaging/event/TaskOutputChangeEvent.java b/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/messaging/event/TaskOutputChangeEvent.java
index fbbd6c0..a86cfb7 100644
--- a/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/messaging/event/TaskOutputChangeEvent.java
+++ b/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/messaging/event/TaskOutputChangeEvent.java
@@ -61,7 +61,7 @@ import org.slf4j.LoggerFactory;
     schemes.put(TupleScheme.class, new TaskOutputChangeEventTupleSchemeFactory());
   }
 
-  private List<org.apache.airavata.model.workspace.experiment.DataObjectType> output; // required
+  private List<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType> output; // required
   private TaskIdentifier taskIdentity; // required
 
   /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
@@ -131,7 +131,7 @@ import org.slf4j.LoggerFactory;
     Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
     tmpMap.put(_Fields.OUTPUT, new org.apache.thrift.meta_data.FieldMetaData("output", org.apache.thrift.TFieldRequirementType.REQUIRED, 
         new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, 
-            new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, org.apache.airavata.model.workspace.experiment.DataObjectType.class))));
+            new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType.class))));
     tmpMap.put(_Fields.TASK_IDENTITY, new org.apache.thrift.meta_data.FieldMetaData("taskIdentity", org.apache.thrift.TFieldRequirementType.REQUIRED, 
         new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, TaskIdentifier.class)));
     metaDataMap = Collections.unmodifiableMap(tmpMap);
@@ -142,7 +142,7 @@ import org.slf4j.LoggerFactory;
   }
 
   public TaskOutputChangeEvent(
-    List<org.apache.airavata.model.workspace.experiment.DataObjectType> output,
+    List<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType> output,
     TaskIdentifier taskIdentity)
   {
     this();
@@ -155,9 +155,9 @@ import org.slf4j.LoggerFactory;
    */
   public TaskOutputChangeEvent(TaskOutputChangeEvent other) {
     if (other.isSetOutput()) {
-      List<org.apache.airavata.model.workspace.experiment.DataObjectType> __this__output = new ArrayList<org.apache.airavata.model.workspace.experiment.DataObjectType>(other.output.size());
-      for (org.apache.airavata.model.workspace.experiment.DataObjectType other_element : other.output) {
-        __this__output.add(new org.apache.airavata.model.workspace.experiment.DataObjectType(other_element));
+      List<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType> __this__output = new ArrayList<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType>(other.output.size());
+      for (org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType other_element : other.output) {
+        __this__output.add(new org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType(other_element));
       }
       this.output = __this__output;
     }
@@ -180,22 +180,22 @@ import org.slf4j.LoggerFactory;
     return (this.output == null) ? 0 : this.output.size();
   }
 
-  public java.util.Iterator<org.apache.airavata.model.workspace.experiment.DataObjectType> getOutputIterator() {
+  public java.util.Iterator<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType> getOutputIterator() {
     return (this.output == null) ? null : this.output.iterator();
   }
 
-  public void addToOutput(org.apache.airavata.model.workspace.experiment.DataObjectType elem) {
+  public void addToOutput(org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType elem) {
     if (this.output == null) {
-      this.output = new ArrayList<org.apache.airavata.model.workspace.experiment.DataObjectType>();
+      this.output = new ArrayList<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType>();
     }
     this.output.add(elem);
   }
 
-  public List<org.apache.airavata.model.workspace.experiment.DataObjectType> getOutput() {
+  public List<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType> getOutput() {
     return this.output;
   }
 
-  public void setOutput(List<org.apache.airavata.model.workspace.experiment.DataObjectType> output) {
+  public void setOutput(List<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType> output) {
     this.output = output;
   }
 
@@ -243,7 +243,7 @@ import org.slf4j.LoggerFactory;
       if (value == null) {
         unsetOutput();
       } else {
-        setOutput((List<org.apache.airavata.model.workspace.experiment.DataObjectType>)value);
+        setOutput((List<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType>)value);
       }
       break;
 
@@ -445,11 +445,11 @@ import org.slf4j.LoggerFactory;
             if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
               {
                 org.apache.thrift.protocol.TList _list0 = iprot.readListBegin();
-                struct.output = new ArrayList<org.apache.airavata.model.workspace.experiment.DataObjectType>(_list0.size);
+                struct.output = new ArrayList<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType>(_list0.size);
                 for (int _i1 = 0; _i1 < _list0.size; ++_i1)
                 {
-                  org.apache.airavata.model.workspace.experiment.DataObjectType _elem2;
-                  _elem2 = new org.apache.airavata.model.workspace.experiment.DataObjectType();
+                  org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType _elem2;
+                  _elem2 = new org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType();
                   _elem2.read(iprot);
                   struct.output.add(_elem2);
                 }
@@ -486,7 +486,7 @@ import org.slf4j.LoggerFactory;
         oprot.writeFieldBegin(OUTPUT_FIELD_DESC);
         {
           oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.output.size()));
-          for (org.apache.airavata.model.workspace.experiment.DataObjectType _iter3 : struct.output)
+          for (org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType _iter3 : struct.output)
           {
             _iter3.write(oprot);
           }
@@ -518,7 +518,7 @@ import org.slf4j.LoggerFactory;
       TTupleProtocol oprot = (TTupleProtocol) prot;
       {
         oprot.writeI32(struct.output.size());
-        for (org.apache.airavata.model.workspace.experiment.DataObjectType _iter4 : struct.output)
+        for (org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType _iter4 : struct.output)
         {
           _iter4.write(oprot);
         }
@@ -531,11 +531,11 @@ import org.slf4j.LoggerFactory;
       TTupleProtocol iprot = (TTupleProtocol) prot;
       {
         org.apache.thrift.protocol.TList _list5 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
-        struct.output = new ArrayList<org.apache.airavata.model.workspace.experiment.DataObjectType>(_list5.size);
+        struct.output = new ArrayList<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType>(_list5.size);
         for (int _i6 = 0; _i6 < _list5.size; ++_i6)
         {
-          org.apache.airavata.model.workspace.experiment.DataObjectType _elem7;
-          _elem7 = new org.apache.airavata.model.workspace.experiment.DataObjectType();
+          org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType _elem7;
+          _elem7 = new org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType();
           _elem7.read(iprot);
           struct.output.add(_elem7);
         }

http://git-wip-us.apache.org/repos/asf/airavata/blob/198de990/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/workspace/experiment/Experiment.java
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/workspace/experiment/Experiment.java b/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/workspace/experiment/Experiment.java
index 7b674cd..7f974cd 100644
--- a/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/workspace/experiment/Experiment.java
+++ b/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/workspace/experiment/Experiment.java
@@ -105,8 +105,8 @@ import org.slf4j.LoggerFactory;
   private String workflowTemplateVersion; // optional
   private UserConfigurationData userConfigurationData; // optional
   private String workflowExecutionInstanceId; // optional
-  private List<DataObjectType> experimentInputs; // optional
-  private List<DataObjectType> experimentOutputs; // optional
+  private List<org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType> experimentInputs; // optional
+  private List<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType> experimentOutputs; // optional
   private ExperimentStatus experimentStatus; // optional
   private List<WorkflowNodeStatus> stateChangeList; // optional
   private List<WorkflowNodeDetails> workflowNodeDetailsList; // optional
@@ -254,10 +254,10 @@ import org.slf4j.LoggerFactory;
         new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
     tmpMap.put(_Fields.EXPERIMENT_INPUTS, new org.apache.thrift.meta_data.FieldMetaData("experimentInputs", org.apache.thrift.TFieldRequirementType.OPTIONAL, 
         new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, 
-            new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, DataObjectType.class))));
+            new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType.class))));
     tmpMap.put(_Fields.EXPERIMENT_OUTPUTS, new org.apache.thrift.meta_data.FieldMetaData("experimentOutputs", org.apache.thrift.TFieldRequirementType.OPTIONAL, 
         new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, 
-            new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, DataObjectType.class))));
+            new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType.class))));
     tmpMap.put(_Fields.EXPERIMENT_STATUS, new org.apache.thrift.meta_data.FieldMetaData("experimentStatus", org.apache.thrift.TFieldRequirementType.OPTIONAL, 
         new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, ExperimentStatus.class)));
     tmpMap.put(_Fields.STATE_CHANGE_LIST, new org.apache.thrift.meta_data.FieldMetaData("stateChangeList", org.apache.thrift.TFieldRequirementType.OPTIONAL, 
@@ -333,16 +333,16 @@ import org.slf4j.LoggerFactory;
       this.workflowExecutionInstanceId = other.workflowExecutionInstanceId;
     }
     if (other.isSetExperimentInputs()) {
-      List<DataObjectType> __this__experimentInputs = new ArrayList<DataObjectType>(other.experimentInputs.size());
-      for (DataObjectType other_element : other.experimentInputs) {
-        __this__experimentInputs.add(new DataObjectType(other_element));
+      List<org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType> __this__experimentInputs = new ArrayList<org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType>(other.experimentInputs.size());
+      for (org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType other_element : other.experimentInputs) {
+        __this__experimentInputs.add(new org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType(other_element));
       }
       this.experimentInputs = __this__experimentInputs;
     }
     if (other.isSetExperimentOutputs()) {
-      List<DataObjectType> __this__experimentOutputs = new ArrayList<DataObjectType>(other.experimentOutputs.size());
-      for (DataObjectType other_element : other.experimentOutputs) {
-        __this__experimentOutputs.add(new DataObjectType(other_element));
+      List<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType> __this__experimentOutputs = new ArrayList<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType>(other.experimentOutputs.size());
+      for (org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType other_element : other.experimentOutputs) {
+        __this__experimentOutputs.add(new org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType(other_element));
       }
       this.experimentOutputs = __this__experimentOutputs;
     }
@@ -680,22 +680,22 @@ import org.slf4j.LoggerFactory;
     return (this.experimentInputs == null) ? 0 : this.experimentInputs.size();
   }
 
-  public java.util.Iterator<DataObjectType> getExperimentInputsIterator() {
+  public java.util.Iterator<org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType> getExperimentInputsIterator() {
     return (this.experimentInputs == null) ? null : this.experimentInputs.iterator();
   }
 
-  public void addToExperimentInputs(DataObjectType elem) {
+  public void addToExperimentInputs(org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType elem) {
     if (this.experimentInputs == null) {
-      this.experimentInputs = new ArrayList<DataObjectType>();
+      this.experimentInputs = new ArrayList<org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType>();
     }
     this.experimentInputs.add(elem);
   }
 
-  public List<DataObjectType> getExperimentInputs() {
+  public List<org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType> getExperimentInputs() {
     return this.experimentInputs;
   }
 
-  public void setExperimentInputs(List<DataObjectType> experimentInputs) {
+  public void setExperimentInputs(List<org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType> experimentInputs) {
     this.experimentInputs = experimentInputs;
   }
 
@@ -718,22 +718,22 @@ import org.slf4j.LoggerFactory;
     return (this.experimentOutputs == null) ? 0 : this.experimentOutputs.size();
   }
 
-  public java.util.Iterator<DataObjectType> getExperimentOutputsIterator() {
+  public java.util.Iterator<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType> getExperimentOutputsIterator() {
     return (this.experimentOutputs == null) ? null : this.experimentOutputs.iterator();
   }
 
-  public void addToExperimentOutputs(DataObjectType elem) {
+  public void addToExperimentOutputs(org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType elem) {
     if (this.experimentOutputs == null) {
-      this.experimentOutputs = new ArrayList<DataObjectType>();
+      this.experimentOutputs = new ArrayList<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType>();
     }
     this.experimentOutputs.add(elem);
   }
 
-  public List<DataObjectType> getExperimentOutputs() {
+  public List<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType> getExperimentOutputs() {
     return this.experimentOutputs;
   }
 
-  public void setExperimentOutputs(List<DataObjectType> experimentOutputs) {
+  public void setExperimentOutputs(List<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType> experimentOutputs) {
     this.experimentOutputs = experimentOutputs;
   }
 
@@ -991,7 +991,7 @@ import org.slf4j.LoggerFactory;
       if (value == null) {
         unsetExperimentInputs();
       } else {
-        setExperimentInputs((List<DataObjectType>)value);
+        setExperimentInputs((List<org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType>)value);
       }
       break;
 
@@ -999,7 +999,7 @@ import org.slf4j.LoggerFactory;
       if (value == null) {
         unsetExperimentOutputs();
       } else {
-        setExperimentOutputs((List<DataObjectType>)value);
+        setExperimentOutputs((List<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType>)value);
       }
       break;
 
@@ -1871,11 +1871,11 @@ import org.slf4j.LoggerFactory;
             if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
               {
                 org.apache.thrift.protocol.TList _list96 = iprot.readListBegin();
-                struct.experimentInputs = new ArrayList<DataObjectType>(_list96.size);
+                struct.experimentInputs = new ArrayList<org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType>(_list96.size);
                 for (int _i97 = 0; _i97 < _list96.size; ++_i97)
                 {
-                  DataObjectType _elem98;
-                  _elem98 = new DataObjectType();
+                  org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType _elem98;
+                  _elem98 = new org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType();
                   _elem98.read(iprot);
                   struct.experimentInputs.add(_elem98);
                 }
@@ -1890,11 +1890,11 @@ import org.slf4j.LoggerFactory;
             if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
               {
                 org.apache.thrift.protocol.TList _list99 = iprot.readListBegin();
-                struct.experimentOutputs = new ArrayList<DataObjectType>(_list99.size);
+                struct.experimentOutputs = new ArrayList<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType>(_list99.size);
                 for (int _i100 = 0; _i100 < _list99.size; ++_i100)
                 {
-                  DataObjectType _elem101;
-                  _elem101 = new DataObjectType();
+                  org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType _elem101;
+                  _elem101 = new org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType();
                   _elem101.read(iprot);
                   struct.experimentOutputs.add(_elem101);
                 }
@@ -2063,7 +2063,7 @@ import org.slf4j.LoggerFactory;
           oprot.writeFieldBegin(EXPERIMENT_INPUTS_FIELD_DESC);
           {
             oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.experimentInputs.size()));
-            for (DataObjectType _iter111 : struct.experimentInputs)
+            for (org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType _iter111 : struct.experimentInputs)
             {
               _iter111.write(oprot);
             }
@@ -2077,7 +2077,7 @@ import org.slf4j.LoggerFactory;
           oprot.writeFieldBegin(EXPERIMENT_OUTPUTS_FIELD_DESC);
           {
             oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.experimentOutputs.size()));
-            for (DataObjectType _iter112 : struct.experimentOutputs)
+            for (org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType _iter112 : struct.experimentOutputs)
             {
               _iter112.write(oprot);
             }
@@ -2227,7 +2227,7 @@ import org.slf4j.LoggerFactory;
       if (struct.isSetExperimentInputs()) {
         {
           oprot.writeI32(struct.experimentInputs.size());
-          for (DataObjectType _iter116 : struct.experimentInputs)
+          for (org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType _iter116 : struct.experimentInputs)
           {
             _iter116.write(oprot);
           }
@@ -2236,7 +2236,7 @@ import org.slf4j.LoggerFactory;
       if (struct.isSetExperimentOutputs()) {
         {
           oprot.writeI32(struct.experimentOutputs.size());
-          for (DataObjectType _iter117 : struct.experimentOutputs)
+          for (org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType _iter117 : struct.experimentOutputs)
           {
             _iter117.write(oprot);
           }
@@ -2322,11 +2322,11 @@ import org.slf4j.LoggerFactory;
       if (incoming.get(8)) {
         {
           org.apache.thrift.protocol.TList _list121 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
-          struct.experimentInputs = new ArrayList<DataObjectType>(_list121.size);
+          struct.experimentInputs = new ArrayList<org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType>(_list121.size);
           for (int _i122 = 0; _i122 < _list121.size; ++_i122)
           {
-            DataObjectType _elem123;
-            _elem123 = new DataObjectType();
+            org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType _elem123;
+            _elem123 = new org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType();
             _elem123.read(iprot);
             struct.experimentInputs.add(_elem123);
           }
@@ -2336,11 +2336,11 @@ import org.slf4j.LoggerFactory;
       if (incoming.get(9)) {
         {
           org.apache.thrift.protocol.TList _list124 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
-          struct.experimentOutputs = new ArrayList<DataObjectType>(_list124.size);
+          struct.experimentOutputs = new ArrayList<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType>(_list124.size);
           for (int _i125 = 0; _i125 < _list124.size; ++_i125)
           {
-            DataObjectType _elem126;
-            _elem126 = new DataObjectType();
+            org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType _elem126;
+            _elem126 = new org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType();
             _elem126.read(iprot);
             struct.experimentOutputs.add(_elem126);
           }

http://git-wip-us.apache.org/repos/asf/airavata/blob/198de990/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/workspace/experiment/JobDetails.java
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/workspace/experiment/JobDetails.java b/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/workspace/experiment/JobDetails.java
index c1034a0..d1cbe5e 100644
--- a/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/workspace/experiment/JobDetails.java
+++ b/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/workspace/experiment/JobDetails.java
@@ -271,14 +271,9 @@ import org.slf4j.LoggerFactory;
     }
   }
 
-    /**
-     * this method is deprecated after we introduce new thirft model with appcatalog
-     * @return
-     */
-    @Deprecated
-    public String getJobDescription() {
-        return this.jobDescription;
-    }
+  public String getJobDescription() {
+    return this.jobDescription;
+  }
 
   public void setJobDescription(String jobDescription) {
     this.jobDescription = jobDescription;

http://git-wip-us.apache.org/repos/asf/airavata/blob/198de990/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/workspace/experiment/TaskDetails.java
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/workspace/experiment/TaskDetails.java b/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/workspace/experiment/TaskDetails.java
index aabc989..2312b1a 100644
--- a/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/workspace/experiment/TaskDetails.java
+++ b/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/workspace/experiment/TaskDetails.java
@@ -84,8 +84,8 @@ import org.slf4j.LoggerFactory;
   private String applicationId; // optional
   private String applicationVersion; // optional
   private String applicationDeploymentId; // optional
-  private List<DataObjectType> applicationInputs; // optional
-  private List<DataObjectType> applicationOutputs; // optional
+  private List<org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType> applicationInputs; // optional
+  private List<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType> applicationOutputs; // optional
   private ComputationalResourceScheduling taskScheduling; // optional
   private AdvancedInputDataHandling advancedInputDataHandling; // optional
   private AdvancedOutputDataHandling advancedOutputDataHandling; // optional
@@ -210,10 +210,10 @@ import org.slf4j.LoggerFactory;
         new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
     tmpMap.put(_Fields.APPLICATION_INPUTS, new org.apache.thrift.meta_data.FieldMetaData("applicationInputs", org.apache.thrift.TFieldRequirementType.OPTIONAL, 
         new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, 
-            new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, DataObjectType.class))));
+            new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType.class))));
     tmpMap.put(_Fields.APPLICATION_OUTPUTS, new org.apache.thrift.meta_data.FieldMetaData("applicationOutputs", org.apache.thrift.TFieldRequirementType.OPTIONAL, 
         new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, 
-            new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, DataObjectType.class))));
+            new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType.class))));
     tmpMap.put(_Fields.TASK_SCHEDULING, new org.apache.thrift.meta_data.FieldMetaData("taskScheduling", org.apache.thrift.TFieldRequirementType.OPTIONAL, 
         new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, ComputationalResourceScheduling.class)));
     tmpMap.put(_Fields.ADVANCED_INPUT_DATA_HANDLING, new org.apache.thrift.meta_data.FieldMetaData("advancedInputDataHandling", org.apache.thrift.TFieldRequirementType.OPTIONAL, 
@@ -266,16 +266,16 @@ import org.slf4j.LoggerFactory;
       this.applicationDeploymentId = other.applicationDeploymentId;
     }
     if (other.isSetApplicationInputs()) {
-      List<DataObjectType> __this__applicationInputs = new ArrayList<DataObjectType>(other.applicationInputs.size());
-      for (DataObjectType other_element : other.applicationInputs) {
-        __this__applicationInputs.add(new DataObjectType(other_element));
+      List<org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType> __this__applicationInputs = new ArrayList<org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType>(other.applicationInputs.size());
+      for (org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType other_element : other.applicationInputs) {
+        __this__applicationInputs.add(new org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType(other_element));
       }
       this.applicationInputs = __this__applicationInputs;
     }
     if (other.isSetApplicationOutputs()) {
-      List<DataObjectType> __this__applicationOutputs = new ArrayList<DataObjectType>(other.applicationOutputs.size());
-      for (DataObjectType other_element : other.applicationOutputs) {
-        __this__applicationOutputs.add(new DataObjectType(other_element));
+      List<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType> __this__applicationOutputs = new ArrayList<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType>(other.applicationOutputs.size());
+      for (org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType other_element : other.applicationOutputs) {
+        __this__applicationOutputs.add(new org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType(other_element));
       }
       this.applicationOutputs = __this__applicationOutputs;
     }
@@ -456,22 +456,22 @@ import org.slf4j.LoggerFactory;
     return (this.applicationInputs == null) ? 0 : this.applicationInputs.size();
   }
 
-  public java.util.Iterator<DataObjectType> getApplicationInputsIterator() {
+  public java.util.Iterator<org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType> getApplicationInputsIterator() {
     return (this.applicationInputs == null) ? null : this.applicationInputs.iterator();
   }
 
-  public void addToApplicationInputs(DataObjectType elem) {
+  public void addToApplicationInputs(org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType elem) {
     if (this.applicationInputs == null) {
-      this.applicationInputs = new ArrayList<DataObjectType>();
+      this.applicationInputs = new ArrayList<org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType>();
     }
     this.applicationInputs.add(elem);
   }
 
-  public List<DataObjectType> getApplicationInputs() {
+  public List<org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType> getApplicationInputs() {
     return this.applicationInputs;
   }
 
-  public void setApplicationInputs(List<DataObjectType> applicationInputs) {
+  public void setApplicationInputs(List<org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType> applicationInputs) {
     this.applicationInputs = applicationInputs;
   }
 
@@ -494,22 +494,22 @@ import org.slf4j.LoggerFactory;
     return (this.applicationOutputs == null) ? 0 : this.applicationOutputs.size();
   }
 
-  public java.util.Iterator<DataObjectType> getApplicationOutputsIterator() {
+  public java.util.Iterator<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType> getApplicationOutputsIterator() {
     return (this.applicationOutputs == null) ? null : this.applicationOutputs.iterator();
   }
 
-  public void addToApplicationOutputs(DataObjectType elem) {
+  public void addToApplicationOutputs(org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType elem) {
     if (this.applicationOutputs == null) {
-      this.applicationOutputs = new ArrayList<DataObjectType>();
+      this.applicationOutputs = new ArrayList<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType>();
     }
     this.applicationOutputs.add(elem);
   }
 
-  public List<DataObjectType> getApplicationOutputs() {
+  public List<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType> getApplicationOutputs() {
     return this.applicationOutputs;
   }
 
-  public void setApplicationOutputs(List<DataObjectType> applicationOutputs) {
+  public void setApplicationOutputs(List<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType> applicationOutputs) {
     this.applicationOutputs = applicationOutputs;
   }
 
@@ -780,7 +780,7 @@ import org.slf4j.LoggerFactory;
       if (value == null) {
         unsetApplicationInputs();
       } else {
-        setApplicationInputs((List<DataObjectType>)value);
+        setApplicationInputs((List<org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType>)value);
       }
       break;
 
@@ -788,7 +788,7 @@ import org.slf4j.LoggerFactory;
       if (value == null) {
         unsetApplicationOutputs();
       } else {
-        setApplicationOutputs((List<DataObjectType>)value);
+        setApplicationOutputs((List<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType>)value);
       }
       break;
 
@@ -1491,11 +1491,11 @@ import org.slf4j.LoggerFactory;
             if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
               {
                 org.apache.thrift.protocol.TList _list16 = iprot.readListBegin();
-                struct.applicationInputs = new ArrayList<DataObjectType>(_list16.size);
+                struct.applicationInputs = new ArrayList<org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType>(_list16.size);
                 for (int _i17 = 0; _i17 < _list16.size; ++_i17)
                 {
-                  DataObjectType _elem18;
-                  _elem18 = new DataObjectType();
+                  org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType _elem18;
+                  _elem18 = new org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType();
                   _elem18.read(iprot);
                   struct.applicationInputs.add(_elem18);
                 }
@@ -1510,11 +1510,11 @@ import org.slf4j.LoggerFactory;
             if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
               {
                 org.apache.thrift.protocol.TList _list19 = iprot.readListBegin();
-                struct.applicationOutputs = new ArrayList<DataObjectType>(_list19.size);
+                struct.applicationOutputs = new ArrayList<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType>(_list19.size);
                 for (int _i20 = 0; _i20 < _list19.size; ++_i20)
                 {
-                  DataObjectType _elem21;
-                  _elem21 = new DataObjectType();
+                  org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType _elem21;
+                  _elem21 = new org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType();
                   _elem21.read(iprot);
                   struct.applicationOutputs.add(_elem21);
                 }
@@ -1667,7 +1667,7 @@ import org.slf4j.LoggerFactory;
           oprot.writeFieldBegin(APPLICATION_INPUTS_FIELD_DESC);
           {
             oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.applicationInputs.size()));
-            for (DataObjectType _iter31 : struct.applicationInputs)
+            for (org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType _iter31 : struct.applicationInputs)
             {
               _iter31.write(oprot);
             }
@@ -1681,7 +1681,7 @@ import org.slf4j.LoggerFactory;
           oprot.writeFieldBegin(APPLICATION_OUTPUTS_FIELD_DESC);
           {
             oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.applicationOutputs.size()));
-            for (DataObjectType _iter32 : struct.applicationOutputs)
+            for (org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType _iter32 : struct.applicationOutputs)
             {
               _iter32.write(oprot);
             }
@@ -1834,7 +1834,7 @@ import org.slf4j.LoggerFactory;
       if (struct.isSetApplicationInputs()) {
         {
           oprot.writeI32(struct.applicationInputs.size());
-          for (DataObjectType _iter36 : struct.applicationInputs)
+          for (org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType _iter36 : struct.applicationInputs)
           {
             _iter36.write(oprot);
           }
@@ -1843,7 +1843,7 @@ import org.slf4j.LoggerFactory;
       if (struct.isSetApplicationOutputs()) {
         {
           oprot.writeI32(struct.applicationOutputs.size());
-          for (DataObjectType _iter37 : struct.applicationOutputs)
+          for (org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType _iter37 : struct.applicationOutputs)
           {
             _iter37.write(oprot);
           }
@@ -1915,11 +1915,11 @@ import org.slf4j.LoggerFactory;
       if (incoming.get(4)) {
         {
           org.apache.thrift.protocol.TList _list41 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
-          struct.applicationInputs = new ArrayList<DataObjectType>(_list41.size);
+          struct.applicationInputs = new ArrayList<org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType>(_list41.size);
           for (int _i42 = 0; _i42 < _list41.size; ++_i42)
           {
-            DataObjectType _elem43;
-            _elem43 = new DataObjectType();
+            org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType _elem43;
+            _elem43 = new org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType();
             _elem43.read(iprot);
             struct.applicationInputs.add(_elem43);
           }
@@ -1929,11 +1929,11 @@ import org.slf4j.LoggerFactory;
       if (incoming.get(5)) {
         {
           org.apache.thrift.protocol.TList _list44 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
-          struct.applicationOutputs = new ArrayList<DataObjectType>(_list44.size);
+          struct.applicationOutputs = new ArrayList<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType>(_list44.size);
           for (int _i45 = 0; _i45 < _list44.size; ++_i45)
           {
-            DataObjectType _elem46;
-            _elem46 = new DataObjectType();
+            org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType _elem46;
+            _elem46 = new org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType();
             _elem46.read(iprot);
             struct.applicationOutputs.add(_elem46);
           }

http://git-wip-us.apache.org/repos/asf/airavata/blob/198de990/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/workspace/experiment/WorkflowNodeDetails.java
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/workspace/experiment/WorkflowNodeDetails.java b/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/workspace/experiment/WorkflowNodeDetails.java
index 62e5a7e..bab25cd 100644
--- a/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/workspace/experiment/WorkflowNodeDetails.java
+++ b/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/workspace/experiment/WorkflowNodeDetails.java
@@ -78,8 +78,8 @@ import org.slf4j.LoggerFactory;
   private String nodeName; // required
   private ExecutionUnit executionUnit; // required
   private String executionUnitData; // optional
-  private List<DataObjectType> nodeInputs; // optional
-  private List<DataObjectType> nodeOutputs; // optional
+  private List<org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType> nodeInputs; // optional
+  private List<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType> nodeOutputs; // optional
   private WorkflowNodeStatus workflowNodeStatus; // optional
   private List<TaskDetails> taskDetailsList; // optional
   private List<ErrorDetails> errors; // optional
@@ -192,10 +192,10 @@ import org.slf4j.LoggerFactory;
         new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
     tmpMap.put(_Fields.NODE_INPUTS, new org.apache.thrift.meta_data.FieldMetaData("nodeInputs", org.apache.thrift.TFieldRequirementType.OPTIONAL, 
         new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, 
-            new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, DataObjectType.class))));
+            new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType.class))));
     tmpMap.put(_Fields.NODE_OUTPUTS, new org.apache.thrift.meta_data.FieldMetaData("nodeOutputs", org.apache.thrift.TFieldRequirementType.OPTIONAL, 
         new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, 
-            new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, DataObjectType.class))));
+            new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType.class))));
     tmpMap.put(_Fields.WORKFLOW_NODE_STATUS, new org.apache.thrift.meta_data.FieldMetaData("workflowNodeStatus", org.apache.thrift.TFieldRequirementType.OPTIONAL, 
         new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, WorkflowNodeStatus.class)));
     tmpMap.put(_Fields.TASK_DETAILS_LIST, new org.apache.thrift.meta_data.FieldMetaData("taskDetailsList", org.apache.thrift.TFieldRequirementType.OPTIONAL, 
@@ -247,16 +247,16 @@ import org.slf4j.LoggerFactory;
       this.executionUnitData = other.executionUnitData;
     }
     if (other.isSetNodeInputs()) {
-      List<DataObjectType> __this__nodeInputs = new ArrayList<DataObjectType>(other.nodeInputs.size());
-      for (DataObjectType other_element : other.nodeInputs) {
-        __this__nodeInputs.add(new DataObjectType(other_element));
+      List<org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType> __this__nodeInputs = new ArrayList<org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType>(other.nodeInputs.size());
+      for (org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType other_element : other.nodeInputs) {
+        __this__nodeInputs.add(new org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType(other_element));
       }
       this.nodeInputs = __this__nodeInputs;
     }
     if (other.isSetNodeOutputs()) {
-      List<DataObjectType> __this__nodeOutputs = new ArrayList<DataObjectType>(other.nodeOutputs.size());
-      for (DataObjectType other_element : other.nodeOutputs) {
-        __this__nodeOutputs.add(new DataObjectType(other_element));
+      List<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType> __this__nodeOutputs = new ArrayList<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType>(other.nodeOutputs.size());
+      for (org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType other_element : other.nodeOutputs) {
+        __this__nodeOutputs.add(new org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType(other_element));
       }
       this.nodeOutputs = __this__nodeOutputs;
     }
@@ -427,22 +427,22 @@ import org.slf4j.LoggerFactory;
     return (this.nodeInputs == null) ? 0 : this.nodeInputs.size();
   }
 
-  public java.util.Iterator<DataObjectType> getNodeInputsIterator() {
+  public java.util.Iterator<org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType> getNodeInputsIterator() {
     return (this.nodeInputs == null) ? null : this.nodeInputs.iterator();
   }
 
-  public void addToNodeInputs(DataObjectType elem) {
+  public void addToNodeInputs(org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType elem) {
     if (this.nodeInputs == null) {
-      this.nodeInputs = new ArrayList<DataObjectType>();
+      this.nodeInputs = new ArrayList<org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType>();
     }
     this.nodeInputs.add(elem);
   }
 
-  public List<DataObjectType> getNodeInputs() {
+  public List<org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType> getNodeInputs() {
     return this.nodeInputs;
   }
 
-  public void setNodeInputs(List<DataObjectType> nodeInputs) {
+  public void setNodeInputs(List<org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType> nodeInputs) {
     this.nodeInputs = nodeInputs;
   }
 
@@ -465,22 +465,22 @@ import org.slf4j.LoggerFactory;
     return (this.nodeOutputs == null) ? 0 : this.nodeOutputs.size();
   }
 
-  public java.util.Iterator<DataObjectType> getNodeOutputsIterator() {
+  public java.util.Iterator<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType> getNodeOutputsIterator() {
     return (this.nodeOutputs == null) ? null : this.nodeOutputs.iterator();
   }
 
-  public void addToNodeOutputs(DataObjectType elem) {
+  public void addToNodeOutputs(org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType elem) {
     if (this.nodeOutputs == null) {
-      this.nodeOutputs = new ArrayList<DataObjectType>();
+      this.nodeOutputs = new ArrayList<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType>();
     }
     this.nodeOutputs.add(elem);
   }
 
-  public List<DataObjectType> getNodeOutputs() {
+  public List<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType> getNodeOutputs() {
     return this.nodeOutputs;
   }
 
-  public void setNodeOutputs(List<DataObjectType> nodeOutputs) {
+  public void setNodeOutputs(List<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType> nodeOutputs) {
     this.nodeOutputs = nodeOutputs;
   }
 
@@ -644,7 +644,7 @@ import org.slf4j.LoggerFactory;
       if (value == null) {
         unsetNodeInputs();
       } else {
-        setNodeInputs((List<DataObjectType>)value);
+        setNodeInputs((List<org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType>)value);
       }
       break;
 
@@ -652,7 +652,7 @@ import org.slf4j.LoggerFactory;
       if (value == null) {
         unsetNodeOutputs();
       } else {
-        setNodeOutputs((List<DataObjectType>)value);
+        setNodeOutputs((List<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType>)value);
       }
       break;
 
@@ -1182,11 +1182,11 @@ import org.slf4j.LoggerFactory;
             if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
               {
                 org.apache.thrift.protocol.TList _list56 = iprot.readListBegin();
-                struct.nodeInputs = new ArrayList<DataObjectType>(_list56.size);
+                struct.nodeInputs = new ArrayList<org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType>(_list56.size);
                 for (int _i57 = 0; _i57 < _list56.size; ++_i57)
                 {
-                  DataObjectType _elem58;
-                  _elem58 = new DataObjectType();
+                  org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType _elem58;
+                  _elem58 = new org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType();
                   _elem58.read(iprot);
                   struct.nodeInputs.add(_elem58);
                 }
@@ -1201,11 +1201,11 @@ import org.slf4j.LoggerFactory;
             if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
               {
                 org.apache.thrift.protocol.TList _list59 = iprot.readListBegin();
-                struct.nodeOutputs = new ArrayList<DataObjectType>(_list59.size);
+                struct.nodeOutputs = new ArrayList<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType>(_list59.size);
                 for (int _i60 = 0; _i60 < _list59.size; ++_i60)
                 {
-                  DataObjectType _elem61;
-                  _elem61 = new DataObjectType();
+                  org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType _elem61;
+                  _elem61 = new org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType();
                   _elem61.read(iprot);
                   struct.nodeOutputs.add(_elem61);
                 }
@@ -1308,7 +1308,7 @@ import org.slf4j.LoggerFactory;
           oprot.writeFieldBegin(NODE_INPUTS_FIELD_DESC);
           {
             oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.nodeInputs.size()));
-            for (DataObjectType _iter68 : struct.nodeInputs)
+            for (org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType _iter68 : struct.nodeInputs)
             {
               _iter68.write(oprot);
             }
@@ -1322,7 +1322,7 @@ import org.slf4j.LoggerFactory;
           oprot.writeFieldBegin(NODE_OUTPUTS_FIELD_DESC);
           {
             oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.nodeOutputs.size()));
-            for (DataObjectType _iter69 : struct.nodeOutputs)
+            for (org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType _iter69 : struct.nodeOutputs)
             {
               _iter69.write(oprot);
             }
@@ -1418,7 +1418,7 @@ import org.slf4j.LoggerFactory;
       if (struct.isSetNodeInputs()) {
         {
           oprot.writeI32(struct.nodeInputs.size());
-          for (DataObjectType _iter72 : struct.nodeInputs)
+          for (org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType _iter72 : struct.nodeInputs)
           {
             _iter72.write(oprot);
           }
@@ -1427,7 +1427,7 @@ import org.slf4j.LoggerFactory;
       if (struct.isSetNodeOutputs()) {
         {
           oprot.writeI32(struct.nodeOutputs.size());
-          for (DataObjectType _iter73 : struct.nodeOutputs)
+          for (org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType _iter73 : struct.nodeOutputs)
           {
             _iter73.write(oprot);
           }
@@ -1477,11 +1477,11 @@ import org.slf4j.LoggerFactory;
       if (incoming.get(2)) {
         {
           org.apache.thrift.protocol.TList _list76 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
-          struct.nodeInputs = new ArrayList<DataObjectType>(_list76.size);
+          struct.nodeInputs = new ArrayList<org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType>(_list76.size);
           for (int _i77 = 0; _i77 < _list76.size; ++_i77)
           {
-            DataObjectType _elem78;
-            _elem78 = new DataObjectType();
+            org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType _elem78;
+            _elem78 = new org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType();
             _elem78.read(iprot);
             struct.nodeInputs.add(_elem78);
           }
@@ -1491,11 +1491,11 @@ import org.slf4j.LoggerFactory;
       if (incoming.get(3)) {
         {
           org.apache.thrift.protocol.TList _list79 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
-          struct.nodeOutputs = new ArrayList<DataObjectType>(_list79.size);
+          struct.nodeOutputs = new ArrayList<org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType>(_list79.size);
           for (int _i80 = 0; _i80 < _list79.size; ++_i80)
           {
-            DataObjectType _elem81;
-            _elem81 = new DataObjectType();
+            org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType _elem81;
+            _elem81 = new org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType();
             _elem81.read(iprot);
             struct.nodeOutputs.add(_elem81);
           }

http://git-wip-us.apache.org/repos/asf/airavata/blob/198de990/airavata-api/thrift-interface-descriptions/airavataAPI.thrift
----------------------------------------------------------------------
diff --git a/airavata-api/thrift-interface-descriptions/airavataAPI.thrift b/airavata-api/thrift-interface-descriptions/airavataAPI.thrift
index 59b3e7b..2598082 100644
--- a/airavata-api/thrift-interface-descriptions/airavataAPI.thrift
+++ b/airavata-api/thrift-interface-descriptions/airavataAPI.thrift
@@ -397,7 +397,7 @@ service Airavata {
               3: airavataErrors.AiravataClientException ace,
               4: airavataErrors.AiravataSystemException ase)
 
-  list<experimentModel.DataObjectType> getExperimentOutputs (1: required string airavataExperimentId)
+  list<applicationInterfaceModel.OutputDataObjectType> getExperimentOutputs (1: required string airavataExperimentId)
       throws (1: airavataErrors.InvalidRequestException ire,
               2: airavataErrors.ExperimentNotFoundException enf,
               3: airavataErrors.AiravataClientException ace,

http://git-wip-us.apache.org/repos/asf/airavata/blob/198de990/airavata-api/thrift-interface-descriptions/experimentModel.thrift
----------------------------------------------------------------------
diff --git a/airavata-api/thrift-interface-descriptions/experimentModel.thrift b/airavata-api/thrift-interface-descriptions/experimentModel.thrift
index 61ba12b..512cfe8 100644
--- a/airavata-api/thrift-interface-descriptions/experimentModel.thrift
+++ b/airavata-api/thrift-interface-descriptions/experimentModel.thrift
@@ -19,6 +19,7 @@
  */
 
 include "computeResourceModel.thrift"
+include "applicationInterfaceModel.thrift"
 
 namespace java org.apache.airavata.model.workspace.experiment
 namespace php Airavata.Model.Workspace.Experiment
@@ -183,25 +184,6 @@ enum CorrectiveAction {
     CANNOT_BE_DETERMINED
 }
 
-enum DataType{
-	STRING,
-	INTEGER,
-	URI,
-	STDOUT,
-	STDERR
-}
-
-/**
-* A structure  hold experiment input output
-*
-*/
-struct DataObjectType {
-    1: required string key,
-    2: optional string value,
-    3: optional DataType type,
-    4: optional string metaData
-}
-
 /**
  * A structure holding the Computational Resource Scheduling.
  *
@@ -307,8 +289,8 @@ struct TaskDetails {
     3: optional string applicationId,
     4: optional string applicationVersion,
     5: optional string applicationDeploymentId,
-    6: optional list<DataObjectType> applicationInputs,
-    7: optional list<DataObjectType> applicationOutputs,
+    6: optional list<applicationInterfaceModel.InputDataObjectType> applicationInputs,
+    7: optional list<applicationInterfaceModel.OutputDataObjectType> applicationOutputs,
     8: optional ComputationalResourceScheduling taskScheduling,
     9: optional AdvancedInputDataHandling advancedInputDataHandling,
     10: optional AdvancedOutputDataHandling advancedOutputDataHandling,
@@ -336,8 +318,8 @@ struct WorkflowNodeDetails {
     3: required string nodeName = SINGLE_APP_NODE_NAME,
     4: required ExecutionUnit executionUnit = ExecutionUnit.APPLICATION,
     5: optional string executionUnitData,
-    6: optional list<DataObjectType> nodeInputs,
-    7: optional list<DataObjectType> nodeOutputs,
+    6: optional list<applicationInterfaceModel.InputDataObjectType> nodeInputs,
+    7: optional list<applicationInterfaceModel.OutputDataObjectType> nodeOutputs,
     8: optional WorkflowNodeStatus workflowNodeStatus,
     9: optional list<TaskDetails> taskDetailsList,
     10: optional list<ErrorDetails> errors
@@ -390,8 +372,8 @@ struct Experiment {
     10: optional string workflowTemplateVersion,
     11: optional UserConfigurationData userConfigurationData,
     12: optional string workflowExecutionInstanceId,
-    13: optional list<DataObjectType> experimentInputs,
-    14: optional list<DataObjectType> experimentOutputs,
+    13: optional list<applicationInterfaceModel.InputDataObjectType> experimentInputs,
+    14: optional list<applicationInterfaceModel.OutputDataObjectType> experimentOutputs,
     15: optional ExperimentStatus experimentStatus,
     16: optional list<WorkflowNodeStatus> stateChangeList,
     17: optional list<WorkflowNodeDetails> workflowNodeDetailsList,

http://git-wip-us.apache.org/repos/asf/airavata/blob/198de990/airavata-api/thrift-interface-descriptions/messagingEvents.thrift
----------------------------------------------------------------------
diff --git a/airavata-api/thrift-interface-descriptions/messagingEvents.thrift b/airavata-api/thrift-interface-descriptions/messagingEvents.thrift
index d7178cc..c9f3808 100644
--- a/airavata-api/thrift-interface-descriptions/messagingEvents.thrift
+++ b/airavata-api/thrift-interface-descriptions/messagingEvents.thrift
@@ -19,6 +19,7 @@
  */
 
 include "experimentModel.thrift"
+include "applicationInterfaceModel.thrift"
 
 namespace java org.apache.airavata.model.messaging.event
 namespace php Airavata.Model.Messaging.Event
@@ -75,7 +76,7 @@ struct TaskStatusChangeRequestEvent {
 }
 
 struct TaskOutputChangeEvent {
-    1: required list<experimentModel.DataObjectType> output;
+    1: required list<applicationInterfaceModel.OutputDataObjectType> output;
     2: required TaskIdentifier taskIdentity;
 }
 


[09/50] [abbrv] airavata git commit: adding EC2 provider changes

Posted by ch...@apache.org.
adding EC2 provider changes


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/e9ee22b9
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/e9ee22b9
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/e9ee22b9

Branch: refs/heads/master
Commit: e9ee22b97d3c3d2a9f409ec1f8b99957792d6d68
Parents: 83ecde9
Author: chathuriw <ka...@gmail.com>
Authored: Wed Nov 5 10:14:50 2014 -0500
Committer: Chathuri Wimalasena <ka...@gmail.com>
Committed: Wed Nov 5 10:14:50 2014 -0500

----------------------------------------------------------------------
 .../airavata/gfac/core/utils/GFacUtils.java     |  15 +-
 .../apache/airavata/gfac/ec2/EC2Provider.java   |  46 ++-
 .../airavata/gfac/ec2/EC2ProviderTest.java      | 366 ++++++++++---------
 3 files changed, 232 insertions(+), 195 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/e9ee22b9/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/GFacUtils.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/GFacUtils.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/GFacUtils.java
index 1cb1250..be0d756 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/GFacUtils.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/GFacUtils.java
@@ -39,10 +39,7 @@ import org.apache.airavata.gfac.core.context.JobExecutionContext;
 import org.apache.airavata.gfac.core.handler.GFacHandlerException;
 import org.apache.airavata.gfac.core.states.GfacExperimentState;
 import org.apache.airavata.gfac.core.states.GfacPluginState;
-import org.apache.airavata.model.appcatalog.computeresource.GlobusJobSubmission;
-import org.apache.airavata.model.appcatalog.computeresource.LOCALSubmission;
-import org.apache.airavata.model.appcatalog.computeresource.SSHJobSubmission;
-import org.apache.airavata.model.appcatalog.computeresource.UnicoreJobSubmission;
+import org.apache.airavata.model.appcatalog.computeresource.*;
 import org.apache.airavata.model.workspace.experiment.*;
 import org.apache.airavata.model.workspace.experiment.DataType;
 import org.apache.airavata.persistance.registry.jpa.impl.RegistryFactory;
@@ -1288,5 +1285,15 @@ public class GFacUtils {
         }
     }
 
+    public static CloudJobSubmission getCloudJobSubmission (String submissionId) throws AppCatalogException{
+        try {
+            AppCatalog appCatalog = AppCatalogFactory.getAppCatalog();
+            return appCatalog.getComputeResource().getCloudJobSubmission(submissionId);
+        }catch (Exception e){
+            String errorMsg = "Error while retrieving SSH job submission with submission id : " + submissionId;
+            log.error(errorMsg, e);
+            throw new AppCatalogException(errorMsg, e);
+        }
+    }
 
 }

http://git-wip-us.apache.org/repos/asf/airavata/blob/e9ee22b9/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/EC2Provider.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/EC2Provider.java b/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/EC2Provider.java
index 5c5af53..53e0f93 100644
--- a/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/EC2Provider.java
+++ b/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/EC2Provider.java
@@ -28,6 +28,7 @@ import java.util.Calendar;
 import java.util.List;
 import java.util.Map;
 
+import org.airavata.appcatalog.cpi.AppCatalogException;
 import org.apache.airavata.commons.gfac.type.ActualParameter;
 import org.apache.airavata.commons.gfac.type.ApplicationDescription;
 import org.apache.airavata.gfac.GFacException;
@@ -39,6 +40,10 @@ import org.apache.airavata.gfac.core.utils.GFacUtils;
 import org.apache.airavata.gfac.ec2.util.AmazonEC2Util;
 import org.apache.airavata.gfac.ec2.util.EC2ProviderUtil;
 import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
+import org.apache.airavata.model.appcatalog.computeresource.CloudJobSubmission;
+import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionInterface;
+import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol;
+import org.apache.airavata.model.appcatalog.computeresource.ProviderName;
 import org.apache.airavata.model.workspace.experiment.JobState;
 import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
 import org.apache.airavata.schemas.gfac.Ec2ApplicationDeploymentType;
@@ -221,9 +226,8 @@ public class EC2Provider extends AbstractProvider {
                 /* Assuming that there is just a single result. If you want to add more results, update the necessary
                    logic below */
                 String paramName = outparamType.getName();
-                outParam.getType().changeType(StringParameterType.type);
-                ((StringParameterType) outParam.getType()).setValue(executionResult);
-                jobExecutionContext.getOutMessageContext().addParameter(paramName, outParam);
+                String value = outparamType.getValue();
+                jobExecutionContext.getOutMessageContext().addParameter(paramName, value);
             }
             GFacUtils.saveJobStatus(jobExecutionContext, details, JobState.COMPLETE);
         } catch (InvalidSshKeyException e) {
@@ -252,26 +256,28 @@ public class EC2Provider extends AbstractProvider {
      * @throws GFacProviderException GFacProviderException
      */
     private String createShellCmd(JobExecutionContext jobExecutionContext) throws GFacProviderException {
-        String command = "";
-        ApplicationDescription appDesc = jobExecutionContext.getApplicationContext().
-                getApplicationDeploymentDescription();
-
-        if(appDesc.getType() instanceof Ec2ApplicationDeploymentType) {
-            Ec2ApplicationDeploymentType type = (Ec2ApplicationDeploymentType) appDesc.getType();
-            if(type.getExecutable() != null) {
-                command = type.getExecutableType() + " " + type.getExecutable();
+        try {
+            String command = "";
+            JobSubmissionInterface submissionInterface = jobExecutionContext.getPreferredJobSubmissionInterface();
+            CloudJobSubmission cloudJobSubmission = GFacUtils.getCloudJobSubmission(submissionInterface.getJobSubmissionInterfaceId());
+            String executablePath = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription().getExecutablePath();
+            if (cloudJobSubmission.getProviderName().equals(ProviderName.EC2)) {
+                if (cloudJobSubmission.getExecutableType() != null) {
+                    command = cloudJobSubmission.getExecutableType() + " " + executablePath;
+                } else {
+                    command = "sh" + " " + executablePath;
+                }
+                command = setCmdParams(jobExecutionContext, command);
+
             } else {
-                command = "sh" + " " + type.getExecutable();
+                command = "sh" + " " + executablePath;
+                command = setCmdParams(jobExecutionContext, command);
             }
-            command = setCmdParams(jobExecutionContext, command);
-
-        } else {
-            ApplicationDeploymentDescriptionType type = appDesc.getType();
-            command = "sh" + " " + type.getExecutableLocation();
-            command = setCmdParams(jobExecutionContext, command);
+            return command + '\n';
+        } catch (AppCatalogException e) {
+            log.error("Error while retrieving cloud job submission", e);
+            throw new GFacProviderException("Error while retrieving cloud job submission", e);
         }
-
-        return command + '\n';
     }
 
     private String setCmdParams(JobExecutionContext jobExecutionContext, String command) throws GFacProviderException {

http://git-wip-us.apache.org/repos/asf/airavata/blob/e9ee22b9/modules/gfac/gfac-ec2/src/test/java/org/apache/airavata/gfac/ec2/EC2ProviderTest.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ec2/src/test/java/org/apache/airavata/gfac/ec2/EC2ProviderTest.java b/modules/gfac/gfac-ec2/src/test/java/org/apache/airavata/gfac/ec2/EC2ProviderTest.java
index d558ab9..9f86197 100644
--- a/modules/gfac/gfac-ec2/src/test/java/org/apache/airavata/gfac/ec2/EC2ProviderTest.java
+++ b/modules/gfac/gfac-ec2/src/test/java/org/apache/airavata/gfac/ec2/EC2ProviderTest.java
@@ -1,171 +1,195 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-
-package org.apache.airavata.gfac.ec2;
-
-import org.apache.airavata.commons.gfac.type.*;
-import org.apache.airavata.gfac.GFacConfiguration;
-import org.apache.airavata.gfac.GFacException;
-import org.apache.airavata.gfac.core.context.ApplicationContext;
-import org.apache.airavata.gfac.core.context.JobExecutionContext;
-import org.apache.airavata.gfac.core.context.MessageContext;
-import org.apache.airavata.gfac.core.cpi.BetterGfacImpl;
-import org.apache.airavata.schemas.gfac.*;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-
-import java.io.File;
-import java.net.URL;
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * Your Amazon instance should be in a running state before running this test.
- */
-public class EC2ProviderTest {
-    private JobExecutionContext jobExecutionContext;
-
-    private static final String hostName = "ec2-host";
-
-    private static final String hostAddress = "ec2-address";
-
-    private static final String sequence1 = "RR042383.21413#CTGGCACGGAGTTAGCCGATCCTTATTCATAAAGTACATGCAAACGGGTATCCATA" +
-            "CTCGACTTTATTCCTTTATAAAAGAAGTTTACAACCCATAGGGCAGTCATCCTTCACGCTACTTGGCTGGTTCAGGCCTGCGCCCATTGACCAATATTCCTCA" +
-            "CTGCTGCCTCCCGTAGGAGTTTGGACCGTGTCTCAGTTCCAATGTGGGGGACCTTCCTCTCAGAACCCCTATCCATCGAAGACTAGGTGGGCCGTTACCCCGC" +
-            "CTACTATCTAATGGAACGCATCCCCATCGTCTACCGGAATACCTTTAATCATGTGAACATGCGGACTCATGATGCCATCTTGTATTAATCTTCCTTTCAGAAG" +
-            "GCTGTCCAAGAGTAGACGGCAGGTTGGATACGTGTTACTCACCGTGCCGCCGGTCGCCATCAGTCTTAGCAAGCTAAGACCATGCTGCCCCTGACTTGCATGT" +
-            "GTTAAGCCTGTAGCTTAGCGTTC";
-
-    private static final String sequence2 = "RR042383.31934#CTGGCACGGAGTTAGCCGATCCTTATTCATAAAGTACATGCAAACGGGTATCCATA" +
-            "CCCGACTTTATTCCTTTATAAAAGAAGTTTACAACCCATAGGGCAGTCATCCTTCACGCTACTTGGCTGGTTCAGGCTCTCGCCCATTGACCAATATTCCTCA" +
-            "CTGCTGCCTCCCGTAGGAGTTTGGACCGTGTCTCAGTTCCAATGTGGGGGACCTTCCTCTCAGAACCCCTATCCATCGAAGACTAGGTGGGCCGTTACCCCGC" +
-            "CTACTATCTAATGGAACGCATCCCCATCGTCTACCGGAATACCTTTAATCATGTGAACATGCGGACTCATGATGCCATCTTGTATTAAATCTTCCTTTCAGAA" +
-            "GGCTATCCAAGAGTAGACGGCAGGTTGGATACGTGTTACTCACCGTGCG";
-
-    /* Following variables are needed to be set in-order to run the test. Since these are account specific information,
-       I'm not adding the values here. It's the responsibility of the person who's running the test to update
-       these variables accordingly.
-       */
-
-    /* Username used to log into your ec2 instance eg.ec2-user */
-    private String userName = "";
-
-    /* Secret key used to connect to the image */
-    private String secretKey = "";
-
-    /* Access key used to connect to the image */
-    private String accessKey = "";
-
-    /* Instance id of the running instance of your image */
-    private String instanceId = "";
-
-    @Before
-    public void setUp() throws Exception {
-        URL resource = EC2ProviderTest.class.getClassLoader().getResource(org.apache.airavata.common.utils.Constants.GFAC_CONFIG_XML);
-        assert resource != null;
-        System.out.println(resource.getFile());
-        GFacConfiguration gFacConfiguration = GFacConfiguration.create(new File(resource.getPath()), null);
-
-        /* EC2 Host */
-        HostDescription host = new HostDescription(Ec2HostType.type);
-        host.getType().setHostName(hostName);
-        host.getType().setHostAddress(hostAddress);
-
-        /* App */
-        ApplicationDescription ec2Desc = new ApplicationDescription(Ec2ApplicationDeploymentType.type);
-        Ec2ApplicationDeploymentType ec2App = (Ec2ApplicationDeploymentType)ec2Desc.getType();
-
-        String serviceName = "Gnome_distance_calculation_workflow";
-        ec2Desc.getType().addNewApplicationName().setStringValue(serviceName);
-        ec2App.setJobType(JobTypeType.EC_2);
-        ec2App.setExecutable("/home/ec2-user/run.sh");
-        ec2App.setExecutableType("sh");
-
-        /* Service */
-        ServiceDescription serv = new ServiceDescription();
-        serv.getType().setName("GenomeEC2");
-
-        List<InputParameterType> inputList = new ArrayList<InputParameterType>();
-
-        InputParameterType input1 = InputParameterType.Factory.newInstance();
-        input1.setParameterName("genome_input1");
-        input1.setParameterType(StringParameterType.Factory.newInstance());
-        inputList.add(input1);
-
-        InputParameterType input2 = InputParameterType.Factory.newInstance();
-        input2.setParameterName("genome_input2");
-        input2.setParameterType(StringParameterType.Factory.newInstance());
-        inputList.add(input2);
-
-        InputParameterType[] inputParamList = inputList.toArray(new InputParameterType[inputList.size()]);
-
-        List<OutputParameterType> outputList = new ArrayList<OutputParameterType>();
-        OutputParameterType output = OutputParameterType.Factory.newInstance();
-        output.setParameterName("genome_output");
-        output.setParameterType(StringParameterType.Factory.newInstance());
-        outputList.add(output);
-
-        OutputParameterType[] outputParamList = outputList
-                .toArray(new OutputParameterType[outputList.size()]);
-
-        serv.getType().setInputParametersArray(inputParamList);
-        serv.getType().setOutputParametersArray(outputParamList);
-
-        jobExecutionContext = new JobExecutionContext(gFacConfiguration,serv.getType().getName());
-        ApplicationContext applicationContext = new ApplicationContext();
-        jobExecutionContext.setApplicationContext(applicationContext);
-        applicationContext.setServiceDescription(serv);
-        applicationContext.setApplicationDeploymentDescription(ec2Desc);
-        applicationContext.setHostDescription(host);
-
-        AmazonSecurityContext amazonSecurityContext =
-                new AmazonSecurityContext(userName, accessKey, secretKey, instanceId);
-        jobExecutionContext.addSecurityContext(AmazonSecurityContext.AMAZON_SECURITY_CONTEXT, amazonSecurityContext);
-
-        MessageContext inMessage = new MessageContext();
-        ActualParameter genomeInput1 = new ActualParameter();
-        ((StringParameterType)genomeInput1.getType()).setValue(sequence1);
-        inMessage.addParameter("genome_input1", genomeInput1);
-
-        ActualParameter genomeInput2 = new ActualParameter();
-        ((StringParameterType)genomeInput2.getType()).setValue(sequence2);
-        inMessage.addParameter("genome_input2", genomeInput2);
-
-        MessageContext outMessage = new MessageContext();
-        ActualParameter echo_out = new ActualParameter();
-        outMessage.addParameter("distance", echo_out);
-
-        jobExecutionContext.setInMessageContext(inMessage);
-        jobExecutionContext.setOutMessageContext(outMessage);
-    }
-
-    @Test
-    public void testGramProvider() throws GFacException {
-        BetterGfacImpl gFacAPI = new BetterGfacImpl();
-        gFacAPI.submitJob(jobExecutionContext.getExperimentID(), jobExecutionContext.getTaskData().getTaskID(), jobExecutionContext.getGatewayID());
-        MessageContext outMessageContext = jobExecutionContext.getOutMessageContext();
-        Assert.assertEquals(MappingFactory.
-                toString((ActualParameter) outMessageContext.getParameter("genome_output")), "476");
-    }
-}
-
-
+///*
+// *
+// * Licensed to the Apache Software Foundation (ASF) under one
+// * or more contributor license agreements.  See the NOTICE file
+// * distributed with this work for additional information
+// * regarding copyright ownership.  The ASF licenses this file
+// * to you under the Apache License, Version 2.0 (the
+// * "License"); you may not use this file except in compliance
+// * with the License.  You may obtain a copy of the License at
+// *
+// *   http://www.apache.org/licenses/LICENSE-2.0
+// *
+// * Unless required by applicable law or agreed to in writing,
+// * software distributed under the License is distributed on an
+// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// * KIND, either express or implied.  See the License for the
+// * specific language governing permissions and limitations
+// * under the License.
+// *
+// */
+//
+//package org.apache.airavata.gfac.ec2;
+//
+//import org.airavata.appcatalog.cpi.AppCatalog;
+//import org.apache.aiaravata.application.catalog.data.impl.AppCatalogFactory;
+//import org.apache.airavata.commons.gfac.type.*;
+//import org.apache.airavata.gfac.GFacConfiguration;
+//import org.apache.airavata.gfac.GFacException;
+//import org.apache.airavata.gfac.core.context.ApplicationContext;
+//import org.apache.airavata.gfac.core.context.JobExecutionContext;
+//import org.apache.airavata.gfac.core.context.MessageContext;
+//import org.apache.airavata.gfac.core.cpi.BetterGfacImpl;
+//import org.apache.airavata.model.appcatalog.computeresource.*;
+//import org.apache.airavata.schemas.gfac.*;
+//import org.junit.Assert;
+//import org.junit.Before;
+//import org.junit.Test;
+//
+//import java.io.File;
+//import java.net.URL;
+//import java.util.ArrayList;
+//import java.util.List;
+//
+///**
+// * Your Amazon instance should be in a running state before running this test.
+// */
+//public class EC2ProviderTest {
+//    private JobExecutionContext jobExecutionContext;
+//
+//    private static final String hostName = "ec2-host";
+//
+//    private static final String hostAddress = "ec2-address";
+//
+//    private static final String sequence1 = "RR042383.21413#CTGGCACGGAGTTAGCCGATCCTTATTCATAAAGTACATGCAAACGGGTATCCATA" +
+//            "CTCGACTTTATTCCTTTATAAAAGAAGTTTACAACCCATAGGGCAGTCATCCTTCACGCTACTTGGCTGGTTCAGGCCTGCGCCCATTGACCAATATTCCTCA" +
+//            "CTGCTGCCTCCCGTAGGAGTTTGGACCGTGTCTCAGTTCCAATGTGGGGGACCTTCCTCTCAGAACCCCTATCCATCGAAGACTAGGTGGGCCGTTACCCCGC" +
+//            "CTACTATCTAATGGAACGCATCCCCATCGTCTACCGGAATACCTTTAATCATGTGAACATGCGGACTCATGATGCCATCTTGTATTAATCTTCCTTTCAGAAG" +
+//            "GCTGTCCAAGAGTAGACGGCAGGTTGGATACGTGTTACTCACCGTGCCGCCGGTCGCCATCAGTCTTAGCAAGCTAAGACCATGCTGCCCCTGACTTGCATGT" +
+//            "GTTAAGCCTGTAGCTTAGCGTTC";
+//
+//    private static final String sequence2 = "RR042383.31934#CTGGCACGGAGTTAGCCGATCCTTATTCATAAAGTACATGCAAACGGGTATCCATA" +
+//            "CCCGACTTTATTCCTTTATAAAAGAAGTTTACAACCCATAGGGCAGTCATCCTTCACGCTACTTGGCTGGTTCAGGCTCTCGCCCATTGACCAATATTCCTCA" +
+//            "CTGCTGCCTCCCGTAGGAGTTTGGACCGTGTCTCAGTTCCAATGTGGGGGACCTTCCTCTCAGAACCCCTATCCATCGAAGACTAGGTGGGCCGTTACCCCGC" +
+//            "CTACTATCTAATGGAACGCATCCCCATCGTCTACCGGAATACCTTTAATCATGTGAACATGCGGACTCATGATGCCATCTTGTATTAAATCTTCCTTTCAGAA" +
+//            "GGCTATCCAAGAGTAGACGGCAGGTTGGATACGTGTTACTCACCGTGCG";
+//
+//    /* Following variables are needed to be set in-order to run the test. Since these are account specific information,
+//       I'm not adding the values here. It's the responsibility of the person who's running the test to update
+//       these variables accordingly.
+//       */
+//
+//    /* Username used to log into your ec2 instance eg.ec2-user */
+//    private String userName = "";
+//
+//    /* Secret key used to connect to the image */
+//    private String secretKey = "";
+//
+//    /* Access key used to connect to the image */
+//    private String accessKey = "";
+//
+//    /* Instance id of the running instance of your image */
+//    private String instanceId = "";
+//
+//    @Before
+//    public void setUp() throws Exception {
+//        URL resource = EC2ProviderTest.class.getClassLoader().getResource(org.apache.airavata.common.utils.Constants.GFAC_CONFIG_XML);
+//        assert resource != null;
+//        System.out.println(resource.getFile());
+//        GFacConfiguration gFacConfiguration = GFacConfiguration.create(new File(resource.getPath()), null);
+//
+//        /* EC2 Host */
+//        ComputeResourceDescription host = new ComputeResourceDescription();
+//        host.setHostName(hostName);
+//        host.setResourceDescription("EC2 compute resource");
+//        host.addToIpAddresses(hostAddress);
+//
+//        CloudJobSubmission cloudJobSubmission = new CloudJobSubmission();
+//        cloudJobSubmission.setProviderName(ProviderName.EC2);
+//        cloudJobSubmission.setExecutableType("sh");
+//        cloudJobSubmission.setNodeId(instanceId);
+//        cloudJobSubmission.setSecurityProtocol(SecurityProtocol.USERNAME_PASSWORD);
+//        cloudJobSubmission.setUserAccountName(userName);
+//
+//        AppCatalog appCatalog = AppCatalogFactory.getAppCatalog();
+//        String submissionId = appCatalog.getComputeResource().addCloudJobSubmission(cloudJobSubmission);
+//
+//        JobSubmissionInterface submissionInterface = new JobSubmissionInterface();
+//        submissionInterface.setJobSubmissionInterfaceId(submissionId);
+//        submissionInterface.setJobSubmissionProtocol(JobSubmissionProtocol.CLOUD);
+//        submissionInterface.setPriorityOrder(0);
+//
+//        host.addToJobSubmissionInterfaces(submissionInterface);
+//
+//        String computeResourceId = appCatalog.getComputeResource().addComputeResource(host);
+//
+//        /* App */
+//
+//        ApplicationDescription ec2Desc = new ApplicationDescription(Ec2ApplicationDeploymentType.type);
+//        Ec2ApplicationDeploymentType ec2App = (Ec2ApplicationDeploymentType)ec2Desc.getType();
+//
+//        String serviceName = "Gnome_distance_calculation_workflow";
+//        ec2Desc.getType().addNewApplicationName().setStringValue(serviceName);
+//        ec2App.setJobType(JobTypeType.EC_2);
+//        ec2App.setExecutable("/home/ec2-user/run.sh");
+//        ec2App.setExecutableType("sh");
+//
+//        /* Service */
+//        ServiceDescription serv = new ServiceDescription();
+//        serv.getType().setName("GenomeEC2");
+//
+//        List<InputParameterType> inputList = new ArrayList<InputParameterType>();
+//
+//        InputParameterType input1 = InputParameterType.Factory.newInstance();
+//        input1.setParameterName("genome_input1");
+//        input1.setParameterType(StringParameterType.Factory.newInstance());
+//        inputList.add(input1);
+//
+//        InputParameterType input2 = InputParameterType.Factory.newInstance();
+//        input2.setParameterName("genome_input2");
+//        input2.setParameterType(StringParameterType.Factory.newInstance());
+//        inputList.add(input2);
+//
+//        InputParameterType[] inputParamList = inputList.toArray(new InputParameterType[inputList.size()]);
+//
+//        List<OutputParameterType> outputList = new ArrayList<OutputParameterType>();
+//        OutputParameterType output = OutputParameterType.Factory.newInstance();
+//        output.setParameterName("genome_output");
+//        output.setParameterType(StringParameterType.Factory.newInstance());
+//        outputList.add(output);
+//
+//        OutputParameterType[] outputParamList = outputList
+//                .toArray(new OutputParameterType[outputList.size()]);
+//
+//        serv.getType().setInputParametersArray(inputParamList);
+//        serv.getType().setOutputParametersArray(outputParamList);
+//
+//        jobExecutionContext = new JobExecutionContext(gFacConfiguration,serv.getType().getName());
+//        ApplicationContext applicationContext = new ApplicationContext();
+//        jobExecutionContext.setApplicationContext(applicationContext);
+//        applicationContext.setServiceDescription(serv);
+//        applicationContext.setApplicationDeploymentDescription(ec2Desc);
+//        applicationContext.setHostDescription(host);
+//
+//        AmazonSecurityContext amazonSecurityContext =
+//                new AmazonSecurityContext(userName, accessKey, secretKey, instanceId);
+//        jobExecutionContext.addSecurityContext(AmazonSecurityContext.AMAZON_SECURITY_CONTEXT, amazonSecurityContext);
+//
+//        MessageContext inMessage = new MessageContext();
+//        ActualParameter genomeInput1 = new ActualParameter();
+//        ((StringParameterType)genomeInput1.getType()).setValue(sequence1);
+//        inMessage.addParameter("genome_input1", genomeInput1);
+//
+//        ActualParameter genomeInput2 = new ActualParameter();
+//        ((StringParameterType)genomeInput2.getType()).setValue(sequence2);
+//        inMessage.addParameter("genome_input2", genomeInput2);
+//
+//        MessageContext outMessage = new MessageContext();
+//        ActualParameter echo_out = new ActualParameter();
+//        outMessage.addParameter("distance", echo_out);
+//
+//        jobExecutionContext.setInMessageContext(inMessage);
+//        jobExecutionContext.setOutMessageContext(outMessage);
+//    }
+//
+//    @Test
+//    public void testGramProvider() throws GFacException {
+//        BetterGfacImpl gFacAPI = new BetterGfacImpl();
+//        gFacAPI.submitJob(jobExecutionContext.getExperimentID(), jobExecutionContext.getTaskData().getTaskID(), jobExecutionContext.getGatewayID());
+//        MessageContext outMessageContext = jobExecutionContext.getOutMessageContext();
+//        Assert.assertEquals(MappingFactory.
+//                toString((ActualParameter) outMessageContext.getParameter("genome_output")), "476");
+//    }
+//}
+//
+//


[10/50] [abbrv] airavata git commit: Added monitor mode enum to computeResourceModel.thrift and changed ComputerResourcePreference's preferred JobSubmission and DataMovement protocols types to their enums

Posted by ch...@apache.org.
Added monitor mode enum to computeResourceModel.thrift and changed ComputerResourcePreference's preferred JobSubmission and DataMovement protocols types to their enums


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/96a673f0
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/96a673f0
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/96a673f0

Branch: refs/heads/master
Commit: 96a673f0a51fa8944d5700f61fef089e477f99d8
Parents: a1e0ec8
Author: shamrath <sh...@gmail.com>
Authored: Thu Oct 30 12:20:54 2014 -0400
Committer: Chathuri Wimalasena <ka...@gmail.com>
Committed: Wed Nov 5 11:16:14 2014 -0500

----------------------------------------------------------------------
 .../lib/airavata/computeResourceModel_types.cpp | 278 ++++++++++---------
 .../lib/airavata/computeResourceModel_types.h   |  36 ++-
 .../gatewayResourceProfileModel_types.cpp       |  48 ++--
 .../gatewayResourceProfileModel_types.h         |  19 +-
 .../Model/AppCatalog/ComputeResource/Types.php  |  29 ++
 .../Model/AppCatalog/GatewayProfile/Types.php   |  20 +-
 .../appcatalog/computeresource/MonitorMode.java |  73 +++++
 .../computeresource/ResourceJobManager.java     | 121 +++++++-
 .../ComputeResourcePreference.java              |  68 +++--
 .../computeResourceModel.thrift                 |  18 +-
 .../gatewayResourceProfileModel.thrift          |   5 +-
 11 files changed, 511 insertions(+), 204 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/96a673f0/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/computeResourceModel_types.cpp
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/computeResourceModel_types.cpp b/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/computeResourceModel_types.cpp
index 2555cb8..27f62dd 100644
--- a/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/computeResourceModel_types.cpp
+++ b/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/computeResourceModel_types.cpp
@@ -61,6 +61,16 @@ const char* _kJobManagerCommandNames[] = {
 };
 const std::map<int, const char*> _JobManagerCommand_VALUES_TO_NAMES(::apache::thrift::TEnumIterator(7, _kJobManagerCommandValues, _kJobManagerCommandNames), ::apache::thrift::TEnumIterator(-1, NULL, NULL));
 
+int _kMonitorModeValues[] = {
+  MonitorMode::PUSH,
+  MonitorMode::PULL
+};
+const char* _kMonitorModeNames[] = {
+  "PUSH",
+  "PULL"
+};
+const std::map<int, const char*> _MonitorMode_VALUES_TO_NAMES(::apache::thrift::TEnumIterator(2, _kMonitorModeValues, _kMonitorModeNames), ::apache::thrift::TEnumIterator(-1, NULL, NULL));
+
 int _kFileSystemsValues[] = {
   FileSystems::HOME,
   FileSystems::WORK,
@@ -137,8 +147,8 @@ const char* _kProviderNameNames[] = {
 };
 const std::map<int, const char*> _ProviderName_VALUES_TO_NAMES(::apache::thrift::TEnumIterator(3, _kProviderNameValues, _kProviderNameNames), ::apache::thrift::TEnumIterator(-1, NULL, NULL));
 
-const char* ResourceJobManager::ascii_fingerprint = "F61CAF80247D0E44C8D52504F3A43BED";
-const uint8_t ResourceJobManager::binary_fingerprint[16] = {0xF6,0x1C,0xAF,0x80,0x24,0x7D,0x0E,0x44,0xC8,0xD5,0x25,0x04,0xF3,0xA4,0x3B,0xED};
+const char* ResourceJobManager::ascii_fingerprint = "83F3E1FB1C076C79A1E733A1E531B938";
+const uint8_t ResourceJobManager::binary_fingerprint[16] = {0x83,0xF3,0xE1,0xFB,0x1C,0x07,0x6C,0x79,0xA1,0xE7,0x33,0xA1,0xE5,0x31,0xB9,0x38};
 
 uint32_t ResourceJobManager::read(::apache::thrift::protocol::TProtocol* iprot) {
 
@@ -221,6 +231,16 @@ uint32_t ResourceJobManager::read(::apache::thrift::protocol::TProtocol* iprot)
           xfer += iprot->skip(ftype);
         }
         break;
+      case 6:
+        if (ftype == ::apache::thrift::protocol::T_I32) {
+          int32_t ecast9;
+          xfer += iprot->readI32(ecast9);
+          this->monitorMode = (MonitorMode::type)ecast9;
+          this->__isset.monitorMode = true;
+        } else {
+          xfer += iprot->skip(ftype);
+        }
+        break;
       default:
         xfer += iprot->skip(ftype);
         break;
@@ -263,16 +283,21 @@ uint32_t ResourceJobManager::write(::apache::thrift::protocol::TProtocol* oprot)
     xfer += oprot->writeFieldBegin("jobManagerCommands", ::apache::thrift::protocol::T_MAP, 5);
     {
       xfer += oprot->writeMapBegin(::apache::thrift::protocol::T_I32, ::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->jobManagerCommands.size()));
-      std::map<JobManagerCommand::type, std::string> ::const_iterator _iter9;
-      for (_iter9 = this->jobManagerCommands.begin(); _iter9 != this->jobManagerCommands.end(); ++_iter9)
+      std::map<JobManagerCommand::type, std::string> ::const_iterator _iter10;
+      for (_iter10 = this->jobManagerCommands.begin(); _iter10 != this->jobManagerCommands.end(); ++_iter10)
       {
-        xfer += oprot->writeI32((int32_t)_iter9->first);
-        xfer += oprot->writeString(_iter9->second);
+        xfer += oprot->writeI32((int32_t)_iter10->first);
+        xfer += oprot->writeString(_iter10->second);
       }
       xfer += oprot->writeMapEnd();
     }
     xfer += oprot->writeFieldEnd();
   }
+  if (this->__isset.monitorMode) {
+    xfer += oprot->writeFieldBegin("monitorMode", ::apache::thrift::protocol::T_I32, 6);
+    xfer += oprot->writeI32((int32_t)this->monitorMode);
+    xfer += oprot->writeFieldEnd();
+  }
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
   return xfer;
@@ -285,6 +310,7 @@ void swap(ResourceJobManager &a, ResourceJobManager &b) {
   swap(a.pushMonitoringEndpoint, b.pushMonitoringEndpoint);
   swap(a.jobManagerBinPath, b.jobManagerBinPath);
   swap(a.jobManagerCommands, b.jobManagerCommands);
+  swap(a.monitorMode, b.monitorMode);
   swap(a.__isset, b.__isset);
 }
 
@@ -458,9 +484,9 @@ uint32_t SCPDataMovement::read(::apache::thrift::protocol::TProtocol* iprot) {
         break;
       case 2:
         if (ftype == ::apache::thrift::protocol::T_I32) {
-          int32_t ecast10;
-          xfer += iprot->readI32(ecast10);
-          this->securityProtocol = (SecurityProtocol::type)ecast10;
+          int32_t ecast11;
+          xfer += iprot->readI32(ecast11);
+          this->securityProtocol = (SecurityProtocol::type)ecast11;
           isset_securityProtocol = true;
         } else {
           xfer += iprot->skip(ftype);
@@ -570,9 +596,9 @@ uint32_t GridFTPDataMovement::read(::apache::thrift::protocol::TProtocol* iprot)
         break;
       case 2:
         if (ftype == ::apache::thrift::protocol::T_I32) {
-          int32_t ecast11;
-          xfer += iprot->readI32(ecast11);
-          this->securityProtocol = (SecurityProtocol::type)ecast11;
+          int32_t ecast12;
+          xfer += iprot->readI32(ecast12);
+          this->securityProtocol = (SecurityProtocol::type)ecast12;
           isset_securityProtocol = true;
         } else {
           xfer += iprot->skip(ftype);
@@ -582,14 +608,14 @@ uint32_t GridFTPDataMovement::read(::apache::thrift::protocol::TProtocol* iprot)
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->gridFTPEndPoints.clear();
-            uint32_t _size12;
-            ::apache::thrift::protocol::TType _etype15;
-            xfer += iprot->readListBegin(_etype15, _size12);
-            this->gridFTPEndPoints.resize(_size12);
-            uint32_t _i16;
-            for (_i16 = 0; _i16 < _size12; ++_i16)
+            uint32_t _size13;
+            ::apache::thrift::protocol::TType _etype16;
+            xfer += iprot->readListBegin(_etype16, _size13);
+            this->gridFTPEndPoints.resize(_size13);
+            uint32_t _i17;
+            for (_i17 = 0; _i17 < _size13; ++_i17)
             {
-              xfer += iprot->readString(this->gridFTPEndPoints[_i16]);
+              xfer += iprot->readString(this->gridFTPEndPoints[_i17]);
             }
             xfer += iprot->readListEnd();
           }
@@ -631,10 +657,10 @@ uint32_t GridFTPDataMovement::write(::apache::thrift::protocol::TProtocol* oprot
   xfer += oprot->writeFieldBegin("gridFTPEndPoints", ::apache::thrift::protocol::T_LIST, 3);
   {
     xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->gridFTPEndPoints.size()));
-    std::vector<std::string> ::const_iterator _iter17;
-    for (_iter17 = this->gridFTPEndPoints.begin(); _iter17 != this->gridFTPEndPoints.end(); ++_iter17)
+    std::vector<std::string> ::const_iterator _iter18;
+    for (_iter18 = this->gridFTPEndPoints.begin(); _iter18 != this->gridFTPEndPoints.end(); ++_iter18)
     {
-      xfer += oprot->writeString((*_iter17));
+      xfer += oprot->writeString((*_iter18));
     }
     xfer += oprot->writeListEnd();
   }
@@ -688,9 +714,9 @@ uint32_t UnicoreDataMovement::read(::apache::thrift::protocol::TProtocol* iprot)
         break;
       case 2:
         if (ftype == ::apache::thrift::protocol::T_I32) {
-          int32_t ecast18;
-          xfer += iprot->readI32(ecast18);
-          this->securityProtocol = (SecurityProtocol::type)ecast18;
+          int32_t ecast19;
+          xfer += iprot->readI32(ecast19);
+          this->securityProtocol = (SecurityProtocol::type)ecast19;
           isset_securityProtocol = true;
         } else {
           xfer += iprot->skip(ftype);
@@ -750,8 +776,8 @@ void swap(UnicoreDataMovement &a, UnicoreDataMovement &b) {
   swap(a.unicoreEndPointURL, b.unicoreEndPointURL);
 }
 
-const char* LOCALSubmission::ascii_fingerprint = "A5A35C842CBE1CA9D6A13C5974C6FB8F";
-const uint8_t LOCALSubmission::binary_fingerprint[16] = {0xA5,0xA3,0x5C,0x84,0x2C,0xBE,0x1C,0xA9,0xD6,0xA1,0x3C,0x59,0x74,0xC6,0xFB,0x8F};
+const char* LOCALSubmission::ascii_fingerprint = "D51508D1A661370F4785A01334DB8637";
+const uint8_t LOCALSubmission::binary_fingerprint[16] = {0xD5,0x15,0x08,0xD1,0xA6,0x61,0x37,0x0F,0x47,0x85,0xA0,0x13,0x34,0xDB,0x86,0x37};
 
 uint32_t LOCALSubmission::read(::apache::thrift::protocol::TProtocol* iprot) {
 
@@ -894,8 +920,8 @@ void swap(LOCALDataMovement &a, LOCALDataMovement &b) {
   swap(a.dataMovementInterfaceId, b.dataMovementInterfaceId);
 }
 
-const char* SSHJobSubmission::ascii_fingerprint = "8BC403A3B093DDB0CB8F04ED699DBA3D";
-const uint8_t SSHJobSubmission::binary_fingerprint[16] = {0x8B,0xC4,0x03,0xA3,0xB0,0x93,0xDD,0xB0,0xCB,0x8F,0x04,0xED,0x69,0x9D,0xBA,0x3D};
+const char* SSHJobSubmission::ascii_fingerprint = "BCAF073DD81C8F6A9ED716A45569D2B3";
+const uint8_t SSHJobSubmission::binary_fingerprint[16] = {0xBC,0xAF,0x07,0x3D,0xD8,0x1C,0x8F,0x6A,0x9E,0xD7,0x16,0xA4,0x55,0x69,0xD2,0xB3};
 
 uint32_t SSHJobSubmission::read(::apache::thrift::protocol::TProtocol* iprot) {
 
@@ -930,9 +956,9 @@ uint32_t SSHJobSubmission::read(::apache::thrift::protocol::TProtocol* iprot) {
         break;
       case 2:
         if (ftype == ::apache::thrift::protocol::T_I32) {
-          int32_t ecast19;
-          xfer += iprot->readI32(ecast19);
-          this->securityProtocol = (SecurityProtocol::type)ecast19;
+          int32_t ecast20;
+          xfer += iprot->readI32(ecast20);
+          this->securityProtocol = (SecurityProtocol::type)ecast20;
           isset_securityProtocol = true;
         } else {
           xfer += iprot->skip(ftype);
@@ -1056,9 +1082,9 @@ uint32_t GlobusJobSubmission::read(::apache::thrift::protocol::TProtocol* iprot)
         break;
       case 2:
         if (ftype == ::apache::thrift::protocol::T_I32) {
-          int32_t ecast20;
-          xfer += iprot->readI32(ecast20);
-          this->securityProtocol = (SecurityProtocol::type)ecast20;
+          int32_t ecast21;
+          xfer += iprot->readI32(ecast21);
+          this->securityProtocol = (SecurityProtocol::type)ecast21;
           isset_securityProtocol = true;
         } else {
           xfer += iprot->skip(ftype);
@@ -1068,14 +1094,14 @@ uint32_t GlobusJobSubmission::read(::apache::thrift::protocol::TProtocol* iprot)
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->globusGateKeeperEndPoint.clear();
-            uint32_t _size21;
-            ::apache::thrift::protocol::TType _etype24;
-            xfer += iprot->readListBegin(_etype24, _size21);
-            this->globusGateKeeperEndPoint.resize(_size21);
-            uint32_t _i25;
-            for (_i25 = 0; _i25 < _size21; ++_i25)
+            uint32_t _size22;
+            ::apache::thrift::protocol::TType _etype25;
+            xfer += iprot->readListBegin(_etype25, _size22);
+            this->globusGateKeeperEndPoint.resize(_size22);
+            uint32_t _i26;
+            for (_i26 = 0; _i26 < _size22; ++_i26)
             {
-              xfer += iprot->readString(this->globusGateKeeperEndPoint[_i25]);
+              xfer += iprot->readString(this->globusGateKeeperEndPoint[_i26]);
             }
             xfer += iprot->readListEnd();
           }
@@ -1116,10 +1142,10 @@ uint32_t GlobusJobSubmission::write(::apache::thrift::protocol::TProtocol* oprot
     xfer += oprot->writeFieldBegin("globusGateKeeperEndPoint", ::apache::thrift::protocol::T_LIST, 3);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->globusGateKeeperEndPoint.size()));
-      std::vector<std::string> ::const_iterator _iter26;
-      for (_iter26 = this->globusGateKeeperEndPoint.begin(); _iter26 != this->globusGateKeeperEndPoint.end(); ++_iter26)
+      std::vector<std::string> ::const_iterator _iter27;
+      for (_iter27 = this->globusGateKeeperEndPoint.begin(); _iter27 != this->globusGateKeeperEndPoint.end(); ++_iter27)
       {
-        xfer += oprot->writeString((*_iter26));
+        xfer += oprot->writeString((*_iter27));
       }
       xfer += oprot->writeListEnd();
     }
@@ -1174,9 +1200,9 @@ uint32_t UnicoreJobSubmission::read(::apache::thrift::protocol::TProtocol* iprot
         break;
       case 2:
         if (ftype == ::apache::thrift::protocol::T_I32) {
-          int32_t ecast27;
-          xfer += iprot->readI32(ecast27);
-          this->securityProtocol = (SecurityProtocol::type)ecast27;
+          int32_t ecast28;
+          xfer += iprot->readI32(ecast28);
+          this->securityProtocol = (SecurityProtocol::type)ecast28;
           isset_securityProtocol = true;
         } else {
           xfer += iprot->skip(ftype);
@@ -1275,9 +1301,9 @@ uint32_t CloudJobSubmission::read(::apache::thrift::protocol::TProtocol* iprot)
         break;
       case 2:
         if (ftype == ::apache::thrift::protocol::T_I32) {
-          int32_t ecast28;
-          xfer += iprot->readI32(ecast28);
-          this->securityProtocol = (SecurityProtocol::type)ecast28;
+          int32_t ecast29;
+          xfer += iprot->readI32(ecast29);
+          this->securityProtocol = (SecurityProtocol::type)ecast29;
           isset_securityProtocol = true;
         } else {
           xfer += iprot->skip(ftype);
@@ -1301,9 +1327,9 @@ uint32_t CloudJobSubmission::read(::apache::thrift::protocol::TProtocol* iprot)
         break;
       case 5:
         if (ftype == ::apache::thrift::protocol::T_I32) {
-          int32_t ecast29;
-          xfer += iprot->readI32(ecast29);
-          this->providerName = (ProviderName::type)ecast29;
+          int32_t ecast30;
+          xfer += iprot->readI32(ecast30);
+          this->providerName = (ProviderName::type)ecast30;
           isset_providerName = true;
         } else {
           xfer += iprot->skip(ftype);
@@ -1420,9 +1446,9 @@ uint32_t JobSubmissionInterface::read(::apache::thrift::protocol::TProtocol* ipr
         break;
       case 2:
         if (ftype == ::apache::thrift::protocol::T_I32) {
-          int32_t ecast30;
-          xfer += iprot->readI32(ecast30);
-          this->jobSubmissionProtocol = (JobSubmissionProtocol::type)ecast30;
+          int32_t ecast31;
+          xfer += iprot->readI32(ecast31);
+          this->jobSubmissionProtocol = (JobSubmissionProtocol::type)ecast31;
           isset_jobSubmissionProtocol = true;
         } else {
           xfer += iprot->skip(ftype);
@@ -1518,9 +1544,9 @@ uint32_t DataMovementInterface::read(::apache::thrift::protocol::TProtocol* ipro
         break;
       case 2:
         if (ftype == ::apache::thrift::protocol::T_I32) {
-          int32_t ecast31;
-          xfer += iprot->readI32(ecast31);
-          this->dataMovementProtocol = (DataMovementProtocol::type)ecast31;
+          int32_t ecast32;
+          xfer += iprot->readI32(ecast32);
+          this->dataMovementProtocol = (DataMovementProtocol::type)ecast32;
           isset_dataMovementProtocol = true;
         } else {
           xfer += iprot->skip(ftype);
@@ -1625,14 +1651,14 @@ uint32_t ComputeResourceDescription::read(::apache::thrift::protocol::TProtocol*
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->hostAliases.clear();
-            uint32_t _size32;
-            ::apache::thrift::protocol::TType _etype35;
-            xfer += iprot->readListBegin(_etype35, _size32);
-            this->hostAliases.resize(_size32);
-            uint32_t _i36;
-            for (_i36 = 0; _i36 < _size32; ++_i36)
+            uint32_t _size33;
+            ::apache::thrift::protocol::TType _etype36;
+            xfer += iprot->readListBegin(_etype36, _size33);
+            this->hostAliases.resize(_size33);
+            uint32_t _i37;
+            for (_i37 = 0; _i37 < _size33; ++_i37)
             {
-              xfer += iprot->readString(this->hostAliases[_i36]);
+              xfer += iprot->readString(this->hostAliases[_i37]);
             }
             xfer += iprot->readListEnd();
           }
@@ -1645,14 +1671,14 @@ uint32_t ComputeResourceDescription::read(::apache::thrift::protocol::TProtocol*
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->ipAddresses.clear();
-            uint32_t _size37;
-            ::apache::thrift::protocol::TType _etype40;
-            xfer += iprot->readListBegin(_etype40, _size37);
-            this->ipAddresses.resize(_size37);
-            uint32_t _i41;
-            for (_i41 = 0; _i41 < _size37; ++_i41)
+            uint32_t _size38;
+            ::apache::thrift::protocol::TType _etype41;
+            xfer += iprot->readListBegin(_etype41, _size38);
+            this->ipAddresses.resize(_size38);
+            uint32_t _i42;
+            for (_i42 = 0; _i42 < _size38; ++_i42)
             {
-              xfer += iprot->readString(this->ipAddresses[_i41]);
+              xfer += iprot->readString(this->ipAddresses[_i42]);
             }
             xfer += iprot->readListEnd();
           }
@@ -1673,14 +1699,14 @@ uint32_t ComputeResourceDescription::read(::apache::thrift::protocol::TProtocol*
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->batchQueues.clear();
-            uint32_t _size42;
-            ::apache::thrift::protocol::TType _etype45;
-            xfer += iprot->readListBegin(_etype45, _size42);
-            this->batchQueues.resize(_size42);
-            uint32_t _i46;
-            for (_i46 = 0; _i46 < _size42; ++_i46)
+            uint32_t _size43;
+            ::apache::thrift::protocol::TType _etype46;
+            xfer += iprot->readListBegin(_etype46, _size43);
+            this->batchQueues.resize(_size43);
+            uint32_t _i47;
+            for (_i47 = 0; _i47 < _size43; ++_i47)
             {
-              xfer += this->batchQueues[_i46].read(iprot);
+              xfer += this->batchQueues[_i47].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -1693,19 +1719,19 @@ uint32_t ComputeResourceDescription::read(::apache::thrift::protocol::TProtocol*
         if (ftype == ::apache::thrift::protocol::T_MAP) {
           {
             this->fileSystems.clear();
-            uint32_t _size47;
-            ::apache::thrift::protocol::TType _ktype48;
-            ::apache::thrift::protocol::TType _vtype49;
-            xfer += iprot->readMapBegin(_ktype48, _vtype49, _size47);
-            uint32_t _i51;
-            for (_i51 = 0; _i51 < _size47; ++_i51)
+            uint32_t _size48;
+            ::apache::thrift::protocol::TType _ktype49;
+            ::apache::thrift::protocol::TType _vtype50;
+            xfer += iprot->readMapBegin(_ktype49, _vtype50, _size48);
+            uint32_t _i52;
+            for (_i52 = 0; _i52 < _size48; ++_i52)
             {
-              FileSystems::type _key52;
-              int32_t ecast54;
-              xfer += iprot->readI32(ecast54);
-              _key52 = (FileSystems::type)ecast54;
-              std::string& _val53 = this->fileSystems[_key52];
-              xfer += iprot->readString(_val53);
+              FileSystems::type _key53;
+              int32_t ecast55;
+              xfer += iprot->readI32(ecast55);
+              _key53 = (FileSystems::type)ecast55;
+              std::string& _val54 = this->fileSystems[_key53];
+              xfer += iprot->readString(_val54);
             }
             xfer += iprot->readMapEnd();
           }
@@ -1718,14 +1744,14 @@ uint32_t ComputeResourceDescription::read(::apache::thrift::protocol::TProtocol*
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->jobSubmissionInterfaces.clear();
-            uint32_t _size55;
-            ::apache::thrift::protocol::TType _etype58;
-            xfer += iprot->readListBegin(_etype58, _size55);
-            this->jobSubmissionInterfaces.resize(_size55);
-            uint32_t _i59;
-            for (_i59 = 0; _i59 < _size55; ++_i59)
+            uint32_t _size56;
+            ::apache::thrift::protocol::TType _etype59;
+            xfer += iprot->readListBegin(_etype59, _size56);
+            this->jobSubmissionInterfaces.resize(_size56);
+            uint32_t _i60;
+            for (_i60 = 0; _i60 < _size56; ++_i60)
             {
-              xfer += this->jobSubmissionInterfaces[_i59].read(iprot);
+              xfer += this->jobSubmissionInterfaces[_i60].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -1738,14 +1764,14 @@ uint32_t ComputeResourceDescription::read(::apache::thrift::protocol::TProtocol*
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->dataMovementInterfaces.clear();
-            uint32_t _size60;
-            ::apache::thrift::protocol::TType _etype63;
-            xfer += iprot->readListBegin(_etype63, _size60);
-            this->dataMovementInterfaces.resize(_size60);
-            uint32_t _i64;
-            for (_i64 = 0; _i64 < _size60; ++_i64)
+            uint32_t _size61;
+            ::apache::thrift::protocol::TType _etype64;
+            xfer += iprot->readListBegin(_etype64, _size61);
+            this->dataMovementInterfaces.resize(_size61);
+            uint32_t _i65;
+            for (_i65 = 0; _i65 < _size61; ++_i65)
             {
-              xfer += this->dataMovementInterfaces[_i64].read(iprot);
+              xfer += this->dataMovementInterfaces[_i65].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -1786,10 +1812,10 @@ uint32_t ComputeResourceDescription::write(::apache::thrift::protocol::TProtocol
     xfer += oprot->writeFieldBegin("hostAliases", ::apache::thrift::protocol::T_LIST, 3);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->hostAliases.size()));
-      std::vector<std::string> ::const_iterator _iter65;
-      for (_iter65 = this->hostAliases.begin(); _iter65 != this->hostAliases.end(); ++_iter65)
+      std::vector<std::string> ::const_iterator _iter66;
+      for (_iter66 = this->hostAliases.begin(); _iter66 != this->hostAliases.end(); ++_iter66)
       {
-        xfer += oprot->writeString((*_iter65));
+        xfer += oprot->writeString((*_iter66));
       }
       xfer += oprot->writeListEnd();
     }
@@ -1799,10 +1825,10 @@ uint32_t ComputeResourceDescription::write(::apache::thrift::protocol::TProtocol
     xfer += oprot->writeFieldBegin("ipAddresses", ::apache::thrift::protocol::T_LIST, 4);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->ipAddresses.size()));
-      std::vector<std::string> ::const_iterator _iter66;
-      for (_iter66 = this->ipAddresses.begin(); _iter66 != this->ipAddresses.end(); ++_iter66)
+      std::vector<std::string> ::const_iterator _iter67;
+      for (_iter67 = this->ipAddresses.begin(); _iter67 != this->ipAddresses.end(); ++_iter67)
       {
-        xfer += oprot->writeString((*_iter66));
+        xfer += oprot->writeString((*_iter67));
       }
       xfer += oprot->writeListEnd();
     }
@@ -1817,10 +1843,10 @@ uint32_t ComputeResourceDescription::write(::apache::thrift::protocol::TProtocol
     xfer += oprot->writeFieldBegin("batchQueues", ::apache::thrift::protocol::T_LIST, 6);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->batchQueues.size()));
-      std::vector<BatchQueue> ::const_iterator _iter67;
-      for (_iter67 = this->batchQueues.begin(); _iter67 != this->batchQueues.end(); ++_iter67)
+      std::vector<BatchQueue> ::const_iterator _iter68;
+      for (_iter68 = this->batchQueues.begin(); _iter68 != this->batchQueues.end(); ++_iter68)
       {
-        xfer += (*_iter67).write(oprot);
+        xfer += (*_iter68).write(oprot);
       }
       xfer += oprot->writeListEnd();
     }
@@ -1830,11 +1856,11 @@ uint32_t ComputeResourceDescription::write(::apache::thrift::protocol::TProtocol
     xfer += oprot->writeFieldBegin("fileSystems", ::apache::thrift::protocol::T_MAP, 7);
     {
       xfer += oprot->writeMapBegin(::apache::thrift::protocol::T_I32, ::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->fileSystems.size()));
-      std::map<FileSystems::type, std::string> ::const_iterator _iter68;
-      for (_iter68 = this->fileSystems.begin(); _iter68 != this->fileSystems.end(); ++_iter68)
+      std::map<FileSystems::type, std::string> ::const_iterator _iter69;
+      for (_iter69 = this->fileSystems.begin(); _iter69 != this->fileSystems.end(); ++_iter69)
       {
-        xfer += oprot->writeI32((int32_t)_iter68->first);
-        xfer += oprot->writeString(_iter68->second);
+        xfer += oprot->writeI32((int32_t)_iter69->first);
+        xfer += oprot->writeString(_iter69->second);
       }
       xfer += oprot->writeMapEnd();
     }
@@ -1844,10 +1870,10 @@ uint32_t ComputeResourceDescription::write(::apache::thrift::protocol::TProtocol
     xfer += oprot->writeFieldBegin("jobSubmissionInterfaces", ::apache::thrift::protocol::T_LIST, 8);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->jobSubmissionInterfaces.size()));
-      std::vector<JobSubmissionInterface> ::const_iterator _iter69;
-      for (_iter69 = this->jobSubmissionInterfaces.begin(); _iter69 != this->jobSubmissionInterfaces.end(); ++_iter69)
+      std::vector<JobSubmissionInterface> ::const_iterator _iter70;
+      for (_iter70 = this->jobSubmissionInterfaces.begin(); _iter70 != this->jobSubmissionInterfaces.end(); ++_iter70)
       {
-        xfer += (*_iter69).write(oprot);
+        xfer += (*_iter70).write(oprot);
       }
       xfer += oprot->writeListEnd();
     }
@@ -1857,10 +1883,10 @@ uint32_t ComputeResourceDescription::write(::apache::thrift::protocol::TProtocol
     xfer += oprot->writeFieldBegin("dataMovementInterfaces", ::apache::thrift::protocol::T_LIST, 9);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->dataMovementInterfaces.size()));
-      std::vector<DataMovementInterface> ::const_iterator _iter70;
-      for (_iter70 = this->dataMovementInterfaces.begin(); _iter70 != this->dataMovementInterfaces.end(); ++_iter70)
+      std::vector<DataMovementInterface> ::const_iterator _iter71;
+      for (_iter71 = this->dataMovementInterfaces.begin(); _iter71 != this->dataMovementInterfaces.end(); ++_iter71)
       {
-        xfer += (*_iter70).write(oprot);
+        xfer += (*_iter71).write(oprot);
       }
       xfer += oprot->writeListEnd();
     }

http://git-wip-us.apache.org/repos/asf/airavata/blob/96a673f0/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/computeResourceModel_types.h
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/computeResourceModel_types.h b/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/computeResourceModel_types.h
index c69be3f..e94520d 100644
--- a/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/computeResourceModel_types.h
+++ b/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/computeResourceModel_types.h
@@ -59,6 +59,15 @@ struct JobManagerCommand {
 
 extern const std::map<int, const char*> _JobManagerCommand_VALUES_TO_NAMES;
 
+struct MonitorMode {
+  enum type {
+    PUSH = 0,
+    PULL = 1
+  };
+};
+
+extern const std::map<int, const char*> _MonitorMode_VALUES_TO_NAMES;
+
 struct FileSystems {
   enum type {
     HOME = 0,
@@ -118,19 +127,20 @@ struct ProviderName {
 extern const std::map<int, const char*> _ProviderName_VALUES_TO_NAMES;
 
 typedef struct _ResourceJobManager__isset {
-  _ResourceJobManager__isset() : pushMonitoringEndpoint(false), jobManagerBinPath(false), jobManagerCommands(false) {}
+  _ResourceJobManager__isset() : pushMonitoringEndpoint(false), jobManagerBinPath(false), jobManagerCommands(false), monitorMode(false) {}
   bool pushMonitoringEndpoint;
   bool jobManagerBinPath;
   bool jobManagerCommands;
+  bool monitorMode;
 } _ResourceJobManager__isset;
 
 class ResourceJobManager {
  public:
 
-  static const char* ascii_fingerprint; // = "F61CAF80247D0E44C8D52504F3A43BED";
-  static const uint8_t binary_fingerprint[16]; // = {0xF6,0x1C,0xAF,0x80,0x24,0x7D,0x0E,0x44,0xC8,0xD5,0x25,0x04,0xF3,0xA4,0x3B,0xED};
+  static const char* ascii_fingerprint; // = "83F3E1FB1C076C79A1E733A1E531B938";
+  static const uint8_t binary_fingerprint[16]; // = {0x83,0xF3,0xE1,0xFB,0x1C,0x07,0x6C,0x79,0xA1,0xE7,0x33,0xA1,0xE5,0x31,0xB9,0x38};
 
-  ResourceJobManager() : resourceJobManagerId("DO_NOT_SET_AT_CLIENTS"), resourceJobManagerType((ResourceJobManagerType::type)0), pushMonitoringEndpoint(), jobManagerBinPath() {
+  ResourceJobManager() : resourceJobManagerId("DO_NOT_SET_AT_CLIENTS"), resourceJobManagerType((ResourceJobManagerType::type)0), pushMonitoringEndpoint(), jobManagerBinPath(), monitorMode((MonitorMode::type)0) {
   }
 
   virtual ~ResourceJobManager() throw() {}
@@ -140,6 +150,7 @@ class ResourceJobManager {
   std::string pushMonitoringEndpoint;
   std::string jobManagerBinPath;
   std::map<JobManagerCommand::type, std::string>  jobManagerCommands;
+  MonitorMode::type monitorMode;
 
   _ResourceJobManager__isset __isset;
 
@@ -166,6 +177,11 @@ class ResourceJobManager {
     __isset.jobManagerCommands = true;
   }
 
+  void __set_monitorMode(const MonitorMode::type val) {
+    monitorMode = val;
+    __isset.monitorMode = true;
+  }
+
   bool operator == (const ResourceJobManager & rhs) const
   {
     if (!(resourceJobManagerId == rhs.resourceJobManagerId))
@@ -184,6 +200,10 @@ class ResourceJobManager {
       return false;
     else if (__isset.jobManagerCommands && !(jobManagerCommands == rhs.jobManagerCommands))
       return false;
+    if (__isset.monitorMode != rhs.__isset.monitorMode)
+      return false;
+    else if (__isset.monitorMode && !(monitorMode == rhs.monitorMode))
+      return false;
     return true;
   }
   bool operator != (const ResourceJobManager &rhs) const {
@@ -473,8 +493,8 @@ void swap(UnicoreDataMovement &a, UnicoreDataMovement &b);
 class LOCALSubmission {
  public:
 
-  static const char* ascii_fingerprint; // = "A5A35C842CBE1CA9D6A13C5974C6FB8F";
-  static const uint8_t binary_fingerprint[16]; // = {0xA5,0xA3,0x5C,0x84,0x2C,0xBE,0x1C,0xA9,0xD6,0xA1,0x3C,0x59,0x74,0xC6,0xFB,0x8F};
+  static const char* ascii_fingerprint; // = "D51508D1A661370F4785A01334DB8637";
+  static const uint8_t binary_fingerprint[16]; // = {0xD5,0x15,0x08,0xD1,0xA6,0x61,0x37,0x0F,0x47,0x85,0xA0,0x13,0x34,0xDB,0x86,0x37};
 
   LOCALSubmission() : jobSubmissionInterfaceId("DO_NOT_SET_AT_CLIENTS") {
   }
@@ -559,8 +579,8 @@ typedef struct _SSHJobSubmission__isset {
 class SSHJobSubmission {
  public:
 
-  static const char* ascii_fingerprint; // = "8BC403A3B093DDB0CB8F04ED699DBA3D";
-  static const uint8_t binary_fingerprint[16]; // = {0x8B,0xC4,0x03,0xA3,0xB0,0x93,0xDD,0xB0,0xCB,0x8F,0x04,0xED,0x69,0x9D,0xBA,0x3D};
+  static const char* ascii_fingerprint; // = "BCAF073DD81C8F6A9ED716A45569D2B3";
+  static const uint8_t binary_fingerprint[16]; // = {0xBC,0xAF,0x07,0x3D,0xD8,0x1C,0x8F,0x6A,0x9E,0xD7,0x16,0xA4,0x55,0x69,0xD2,0xB3};
 
   SSHJobSubmission() : jobSubmissionInterfaceId("DO_NOT_SET_AT_CLIENTS"), securityProtocol((SecurityProtocol::type)0), alternativeSSHHostName(), sshPort(22) {
   }

http://git-wip-us.apache.org/repos/asf/airavata/blob/96a673f0/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/gatewayResourceProfileModel_types.cpp
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/gatewayResourceProfileModel_types.cpp b/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/gatewayResourceProfileModel_types.cpp
index 715a346..a996421 100644
--- a/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/gatewayResourceProfileModel_types.cpp
+++ b/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/gatewayResourceProfileModel_types.cpp
@@ -27,8 +27,8 @@
 
 namespace apache { namespace airavata { namespace model { namespace appcatalog { namespace gatewayprofile {
 
-const char* ComputeResourcePreference::ascii_fingerprint = "9C98338B7E052CD4DEECB22F243D6DAE";
-const uint8_t ComputeResourcePreference::binary_fingerprint[16] = {0x9C,0x98,0x33,0x8B,0x7E,0x05,0x2C,0xD4,0xDE,0xEC,0xB2,0x2F,0x24,0x3D,0x6D,0xAE};
+const char* ComputeResourcePreference::ascii_fingerprint = "365108C84A2E160D53CD17C2A7F06F5C";
+const uint8_t ComputeResourcePreference::binary_fingerprint[16] = {0x36,0x51,0x08,0xC8,0x4A,0x2E,0x16,0x0D,0x53,0xCD,0x17,0xC2,0xA7,0xF0,0x6F,0x5C};
 
 uint32_t ComputeResourcePreference::read(::apache::thrift::protocol::TProtocol* iprot) {
 
@@ -69,16 +69,20 @@ uint32_t ComputeResourcePreference::read(::apache::thrift::protocol::TProtocol*
         }
         break;
       case 3:
-        if (ftype == ::apache::thrift::protocol::T_STRING) {
-          xfer += iprot->readString(this->preferredJobSubmissionProtocol);
+        if (ftype == ::apache::thrift::protocol::T_I32) {
+          int32_t ecast0;
+          xfer += iprot->readI32(ecast0);
+          this->preferredJobSubmissionProtocol = ( ::apache::airavata::model::appcatalog::computeresource::JobSubmissionProtocol::type)ecast0;
           this->__isset.preferredJobSubmissionProtocol = true;
         } else {
           xfer += iprot->skip(ftype);
         }
         break;
       case 4:
-        if (ftype == ::apache::thrift::protocol::T_STRING) {
-          xfer += iprot->readString(this->preferredDataMovementProtocol);
+        if (ftype == ::apache::thrift::protocol::T_I32) {
+          int32_t ecast1;
+          xfer += iprot->readI32(ecast1);
+          this->preferredDataMovementProtocol = ( ::apache::airavata::model::appcatalog::computeresource::DataMovementProtocol::type)ecast1;
           this->__isset.preferredDataMovementProtocol = true;
         } else {
           xfer += iprot->skip(ftype);
@@ -137,13 +141,13 @@ uint32_t ComputeResourcePreference::write(::apache::thrift::protocol::TProtocol*
   xfer += oprot->writeFieldEnd();
 
   if (this->__isset.preferredJobSubmissionProtocol) {
-    xfer += oprot->writeFieldBegin("preferredJobSubmissionProtocol", ::apache::thrift::protocol::T_STRING, 3);
-    xfer += oprot->writeString(this->preferredJobSubmissionProtocol);
+    xfer += oprot->writeFieldBegin("preferredJobSubmissionProtocol", ::apache::thrift::protocol::T_I32, 3);
+    xfer += oprot->writeI32((int32_t)this->preferredJobSubmissionProtocol);
     xfer += oprot->writeFieldEnd();
   }
   if (this->__isset.preferredDataMovementProtocol) {
-    xfer += oprot->writeFieldBegin("preferredDataMovementProtocol", ::apache::thrift::protocol::T_STRING, 4);
-    xfer += oprot->writeString(this->preferredDataMovementProtocol);
+    xfer += oprot->writeFieldBegin("preferredDataMovementProtocol", ::apache::thrift::protocol::T_I32, 4);
+    xfer += oprot->writeI32((int32_t)this->preferredDataMovementProtocol);
     xfer += oprot->writeFieldEnd();
   }
   if (this->__isset.preferredBatchQueue) {
@@ -178,8 +182,8 @@ void swap(ComputeResourcePreference &a, ComputeResourcePreference &b) {
   swap(a.__isset, b.__isset);
 }
 
-const char* GatewayResourceProfile::ascii_fingerprint = "D6477904C48AAB4DC8F09369D670B400";
-const uint8_t GatewayResourceProfile::binary_fingerprint[16] = {0xD6,0x47,0x79,0x04,0xC4,0x8A,0xAB,0x4D,0xC8,0xF0,0x93,0x69,0xD6,0x70,0xB4,0x00};
+const char* GatewayResourceProfile::ascii_fingerprint = "42DA2625493A482A59D0742432A025BD";
+const uint8_t GatewayResourceProfile::binary_fingerprint[16] = {0x42,0xDA,0x26,0x25,0x49,0x3A,0x48,0x2A,0x59,0xD0,0x74,0x24,0x32,0xA0,0x25,0xBD};
 
 uint32_t GatewayResourceProfile::read(::apache::thrift::protocol::TProtocol* iprot) {
 
@@ -231,14 +235,14 @@ uint32_t GatewayResourceProfile::read(::apache::thrift::protocol::TProtocol* ipr
         if (ftype == ::apache::thrift::protocol::T_LIST) {
           {
             this->computeResourcePreferences.clear();
-            uint32_t _size0;
-            ::apache::thrift::protocol::TType _etype3;
-            xfer += iprot->readListBegin(_etype3, _size0);
-            this->computeResourcePreferences.resize(_size0);
-            uint32_t _i4;
-            for (_i4 = 0; _i4 < _size0; ++_i4)
+            uint32_t _size2;
+            ::apache::thrift::protocol::TType _etype5;
+            xfer += iprot->readListBegin(_etype5, _size2);
+            this->computeResourcePreferences.resize(_size2);
+            uint32_t _i6;
+            for (_i6 = 0; _i6 < _size2; ++_i6)
             {
-              xfer += this->computeResourcePreferences[_i4].read(iprot);
+              xfer += this->computeResourcePreferences[_i6].read(iprot);
             }
             xfer += iprot->readListEnd();
           }
@@ -284,10 +288,10 @@ uint32_t GatewayResourceProfile::write(::apache::thrift::protocol::TProtocol* op
     xfer += oprot->writeFieldBegin("computeResourcePreferences", ::apache::thrift::protocol::T_LIST, 4);
     {
       xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->computeResourcePreferences.size()));
-      std::vector<ComputeResourcePreference> ::const_iterator _iter5;
-      for (_iter5 = this->computeResourcePreferences.begin(); _iter5 != this->computeResourcePreferences.end(); ++_iter5)
+      std::vector<ComputeResourcePreference> ::const_iterator _iter7;
+      for (_iter7 = this->computeResourcePreferences.begin(); _iter7 != this->computeResourcePreferences.end(); ++_iter7)
       {
-        xfer += (*_iter5).write(oprot);
+        xfer += (*_iter7).write(oprot);
       }
       xfer += oprot->writeListEnd();
     }

http://git-wip-us.apache.org/repos/asf/airavata/blob/96a673f0/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/gatewayResourceProfileModel_types.h
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/gatewayResourceProfileModel_types.h b/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/gatewayResourceProfileModel_types.h
index 8a0a002..db18209 100644
--- a/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/gatewayResourceProfileModel_types.h
+++ b/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/gatewayResourceProfileModel_types.h
@@ -30,6 +30,7 @@
 #include <thrift/transport/TTransport.h>
 
 #include <thrift/cxxfunctional.h>
+#include "computeResourceModel_types.h"
 
 
 namespace apache { namespace airavata { namespace model { namespace appcatalog { namespace gatewayprofile {
@@ -46,18 +47,18 @@ typedef struct _ComputeResourcePreference__isset {
 class ComputeResourcePreference {
  public:
 
-  static const char* ascii_fingerprint; // = "9C98338B7E052CD4DEECB22F243D6DAE";
-  static const uint8_t binary_fingerprint[16]; // = {0x9C,0x98,0x33,0x8B,0x7E,0x05,0x2C,0xD4,0xDE,0xEC,0xB2,0x2F,0x24,0x3D,0x6D,0xAE};
+  static const char* ascii_fingerprint; // = "365108C84A2E160D53CD17C2A7F06F5C";
+  static const uint8_t binary_fingerprint[16]; // = {0x36,0x51,0x08,0xC8,0x4A,0x2E,0x16,0x0D,0x53,0xCD,0x17,0xC2,0xA7,0xF0,0x6F,0x5C};
 
-  ComputeResourcePreference() : computeResourceId(), overridebyAiravata(true), preferredJobSubmissionProtocol(), preferredDataMovementProtocol(), preferredBatchQueue(), scratchLocation(), allocationProjectNumber() {
+  ComputeResourcePreference() : computeResourceId(), overridebyAiravata(true), preferredJobSubmissionProtocol(( ::apache::airavata::model::appcatalog::computeresource::JobSubmissionProtocol::type)0), preferredDataMovementProtocol(( ::apache::airavata::model::appcatalog::computeresource::DataMovementProtocol::type)0), preferredBatchQueue(), scratchLocation(), allocationProjectNumber() {
   }
 
   virtual ~ComputeResourcePreference() throw() {}
 
   std::string computeResourceId;
   bool overridebyAiravata;
-  std::string preferredJobSubmissionProtocol;
-  std::string preferredDataMovementProtocol;
+   ::apache::airavata::model::appcatalog::computeresource::JobSubmissionProtocol::type preferredJobSubmissionProtocol;
+   ::apache::airavata::model::appcatalog::computeresource::DataMovementProtocol::type preferredDataMovementProtocol;
   std::string preferredBatchQueue;
   std::string scratchLocation;
   std::string allocationProjectNumber;
@@ -72,12 +73,12 @@ class ComputeResourcePreference {
     overridebyAiravata = val;
   }
 
-  void __set_preferredJobSubmissionProtocol(const std::string& val) {
+  void __set_preferredJobSubmissionProtocol(const  ::apache::airavata::model::appcatalog::computeresource::JobSubmissionProtocol::type val) {
     preferredJobSubmissionProtocol = val;
     __isset.preferredJobSubmissionProtocol = true;
   }
 
-  void __set_preferredDataMovementProtocol(const std::string& val) {
+  void __set_preferredDataMovementProtocol(const  ::apache::airavata::model::appcatalog::computeresource::DataMovementProtocol::type val) {
     preferredDataMovementProtocol = val;
     __isset.preferredDataMovementProtocol = true;
   }
@@ -147,8 +148,8 @@ typedef struct _GatewayResourceProfile__isset {
 class GatewayResourceProfile {
  public:
 
-  static const char* ascii_fingerprint; // = "D6477904C48AAB4DC8F09369D670B400";
-  static const uint8_t binary_fingerprint[16]; // = {0xD6,0x47,0x79,0x04,0xC4,0x8A,0xAB,0x4D,0xC8,0xF0,0x93,0x69,0xD6,0x70,0xB4,0x00};
+  static const char* ascii_fingerprint; // = "42DA2625493A482A59D0742432A025BD";
+  static const uint8_t binary_fingerprint[16]; // = {0x42,0xDA,0x26,0x25,0x49,0x3A,0x48,0x2A,0x59,0xD0,0x74,0x24,0x32,0xA0,0x25,0xBD};
 
   GatewayResourceProfile() : gatewayID("DO_NOT_SET_AT_CLIENTS"), gatewayName(), gatewayDescription() {
   }

http://git-wip-us.apache.org/repos/asf/airavata/blob/96a673f0/airavata-api/airavata-client-sdks/airavata-php-sdk/src/main/resources/lib/Airavata/Model/AppCatalog/ComputeResource/Types.php
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-php-sdk/src/main/resources/lib/Airavata/Model/AppCatalog/ComputeResource/Types.php b/airavata-api/airavata-client-sdks/airavata-php-sdk/src/main/resources/lib/Airavata/Model/AppCatalog/ComputeResource/Types.php
index 68addd1..3d7b921 100644
--- a/airavata-api/airavata-client-sdks/airavata-php-sdk/src/main/resources/lib/Airavata/Model/AppCatalog/ComputeResource/Types.php
+++ b/airavata-api/airavata-client-sdks/airavata-php-sdk/src/main/resources/lib/Airavata/Model/AppCatalog/ComputeResource/Types.php
@@ -49,6 +49,15 @@ final class JobManagerCommand {
   );
 }
 
+final class MonitorMode {
+  const PUSH = 0;
+  const PULL = 1;
+  static public $__names = array(
+    0 => 'PUSH',
+    1 => 'PULL',
+  );
+}
+
 final class FileSystems {
   const HOME = 0;
   const WORK = 1;
@@ -128,6 +137,7 @@ class ResourceJobManager {
   public $pushMonitoringEndpoint = null;
   public $jobManagerBinPath = null;
   public $jobManagerCommands = null;
+  public $monitorMode = null;
 
   public function __construct($vals=null) {
     if (!isset(self::$_TSPEC)) {
@@ -160,6 +170,10 @@ class ResourceJobManager {
             'type' => TType::STRING,
             ),
           ),
+        6 => array(
+          'var' => 'monitorMode',
+          'type' => TType::I32,
+          ),
         );
     }
     if (is_array($vals)) {
@@ -178,6 +192,9 @@ class ResourceJobManager {
       if (isset($vals['jobManagerCommands'])) {
         $this->jobManagerCommands = $vals['jobManagerCommands'];
       }
+      if (isset($vals['monitorMode'])) {
+        $this->monitorMode = $vals['monitorMode'];
+      }
     }
   }
 
@@ -248,6 +265,13 @@ class ResourceJobManager {
             $xfer += $input->skip($ftype);
           }
           break;
+        case 6:
+          if ($ftype == TType::I32) {
+            $xfer += $input->readI32($this->monitorMode);
+          } else {
+            $xfer += $input->skip($ftype);
+          }
+          break;
         default:
           $xfer += $input->skip($ftype);
           break;
@@ -299,6 +323,11 @@ class ResourceJobManager {
       }
       $xfer += $output->writeFieldEnd();
     }
+    if ($this->monitorMode !== null) {
+      $xfer += $output->writeFieldBegin('monitorMode', TType::I32, 6);
+      $xfer += $output->writeI32($this->monitorMode);
+      $xfer += $output->writeFieldEnd();
+    }
     $xfer += $output->writeFieldStop();
     $xfer += $output->writeStructEnd();
     return $xfer;

http://git-wip-us.apache.org/repos/asf/airavata/blob/96a673f0/airavata-api/airavata-client-sdks/airavata-php-sdk/src/main/resources/lib/Airavata/Model/AppCatalog/GatewayProfile/Types.php
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-php-sdk/src/main/resources/lib/Airavata/Model/AppCatalog/GatewayProfile/Types.php b/airavata-api/airavata-client-sdks/airavata-php-sdk/src/main/resources/lib/Airavata/Model/AppCatalog/GatewayProfile/Types.php
index faf2b05..3e8db10 100644
--- a/airavata-api/airavata-client-sdks/airavata-php-sdk/src/main/resources/lib/Airavata/Model/AppCatalog/GatewayProfile/Types.php
+++ b/airavata-api/airavata-client-sdks/airavata-php-sdk/src/main/resources/lib/Airavata/Model/AppCatalog/GatewayProfile/Types.php
@@ -41,11 +41,11 @@ class ComputeResourcePreference {
           ),
         3 => array(
           'var' => 'preferredJobSubmissionProtocol',
-          'type' => TType::STRING,
+          'type' => TType::I32,
           ),
         4 => array(
           'var' => 'preferredDataMovementProtocol',
-          'type' => TType::STRING,
+          'type' => TType::I32,
           ),
         5 => array(
           'var' => 'preferredBatchQueue',
@@ -120,15 +120,15 @@ class ComputeResourcePreference {
           }
           break;
         case 3:
-          if ($ftype == TType::STRING) {
-            $xfer += $input->readString($this->preferredJobSubmissionProtocol);
+          if ($ftype == TType::I32) {
+            $xfer += $input->readI32($this->preferredJobSubmissionProtocol);
           } else {
             $xfer += $input->skip($ftype);
           }
           break;
         case 4:
-          if ($ftype == TType::STRING) {
-            $xfer += $input->readString($this->preferredDataMovementProtocol);
+          if ($ftype == TType::I32) {
+            $xfer += $input->readI32($this->preferredDataMovementProtocol);
           } else {
             $xfer += $input->skip($ftype);
           }
@@ -178,13 +178,13 @@ class ComputeResourcePreference {
       $xfer += $output->writeFieldEnd();
     }
     if ($this->preferredJobSubmissionProtocol !== null) {
-      $xfer += $output->writeFieldBegin('preferredJobSubmissionProtocol', TType::STRING, 3);
-      $xfer += $output->writeString($this->preferredJobSubmissionProtocol);
+      $xfer += $output->writeFieldBegin('preferredJobSubmissionProtocol', TType::I32, 3);
+      $xfer += $output->writeI32($this->preferredJobSubmissionProtocol);
       $xfer += $output->writeFieldEnd();
     }
     if ($this->preferredDataMovementProtocol !== null) {
-      $xfer += $output->writeFieldBegin('preferredDataMovementProtocol', TType::STRING, 4);
-      $xfer += $output->writeString($this->preferredDataMovementProtocol);
+      $xfer += $output->writeFieldBegin('preferredDataMovementProtocol', TType::I32, 4);
+      $xfer += $output->writeI32($this->preferredDataMovementProtocol);
       $xfer += $output->writeFieldEnd();
     }
     if ($this->preferredBatchQueue !== null) {

http://git-wip-us.apache.org/repos/asf/airavata/blob/96a673f0/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/appcatalog/computeresource/MonitorMode.java
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/appcatalog/computeresource/MonitorMode.java b/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/appcatalog/computeresource/MonitorMode.java
new file mode 100644
index 0000000..30528b7
--- /dev/null
+++ b/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/appcatalog/computeresource/MonitorMode.java
@@ -0,0 +1,73 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Autogenerated by Thrift Compiler (0.9.1)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ *  @generated
+ */
+package org.apache.airavata.model.appcatalog.computeresource;
+
+
+import java.util.Map;
+import java.util.HashMap;
+import org.apache.thrift.TEnum;
+
+/**
+ * Monitoring modes
+ * 
+ * PUSH:
+ * Server will push job status changes.
+ * 
+ * PULL:
+ * Need to pull and get the Job status changes.
+ * 
+ * 
+ */
+@SuppressWarnings("all") public enum MonitorMode implements org.apache.thrift.TEnum {
+  PUSH(0),
+  PULL(1);
+
+  private final int value;
+
+  private MonitorMode(int value) {
+    this.value = value;
+  }
+
+  /**
+   * Get the integer value of this enum value, as defined in the Thrift IDL.
+   */
+  public int getValue() {
+    return value;
+  }
+
+  /**
+   * Find a the enum type by its integer value, as defined in the Thrift IDL.
+   * @return null if the value is not found.
+   */
+  public static MonitorMode findByValue(int value) { 
+    switch (value) {
+      case 0:
+        return PUSH;
+      case 1:
+        return PULL;
+      default:
+        return null;
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/airavata/blob/96a673f0/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/appcatalog/computeresource/ResourceJobManager.java
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/appcatalog/computeresource/ResourceJobManager.java b/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/appcatalog/computeresource/ResourceJobManager.java
index 680a40a..d0487b1 100644
--- a/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/appcatalog/computeresource/ResourceJobManager.java
+++ b/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/appcatalog/computeresource/ResourceJobManager.java
@@ -74,6 +74,7 @@ import org.slf4j.LoggerFactory;
   private static final org.apache.thrift.protocol.TField PUSH_MONITORING_ENDPOINT_FIELD_DESC = new org.apache.thrift.protocol.TField("pushMonitoringEndpoint", org.apache.thrift.protocol.TType.STRING, (short)3);
   private static final org.apache.thrift.protocol.TField JOB_MANAGER_BIN_PATH_FIELD_DESC = new org.apache.thrift.protocol.TField("jobManagerBinPath", org.apache.thrift.protocol.TType.STRING, (short)4);
   private static final org.apache.thrift.protocol.TField JOB_MANAGER_COMMANDS_FIELD_DESC = new org.apache.thrift.protocol.TField("jobManagerCommands", org.apache.thrift.protocol.TType.MAP, (short)5);
+  private static final org.apache.thrift.protocol.TField MONITOR_MODE_FIELD_DESC = new org.apache.thrift.protocol.TField("monitorMode", org.apache.thrift.protocol.TType.I32, (short)6);
 
   private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
   static {
@@ -86,6 +87,7 @@ import org.slf4j.LoggerFactory;
   private String pushMonitoringEndpoint; // optional
   private String jobManagerBinPath; // optional
   private Map<JobManagerCommand,String> jobManagerCommands; // optional
+  private MonitorMode monitorMode; // optional
 
   /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
   @SuppressWarnings("all") public enum _Fields implements org.apache.thrift.TFieldIdEnum {
@@ -97,7 +99,12 @@ import org.slf4j.LoggerFactory;
     RESOURCE_JOB_MANAGER_TYPE((short)2, "resourceJobManagerType"),
     PUSH_MONITORING_ENDPOINT((short)3, "pushMonitoringEndpoint"),
     JOB_MANAGER_BIN_PATH((short)4, "jobManagerBinPath"),
-    JOB_MANAGER_COMMANDS((short)5, "jobManagerCommands");
+    JOB_MANAGER_COMMANDS((short)5, "jobManagerCommands"),
+    /**
+     * 
+     * @see MonitorMode
+     */
+    MONITOR_MODE((short)6, "monitorMode");
 
     private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
 
@@ -122,6 +129,8 @@ import org.slf4j.LoggerFactory;
           return JOB_MANAGER_BIN_PATH;
         case 5: // JOB_MANAGER_COMMANDS
           return JOB_MANAGER_COMMANDS;
+        case 6: // MONITOR_MODE
+          return MONITOR_MODE;
         default:
           return null;
       }
@@ -162,7 +171,7 @@ import org.slf4j.LoggerFactory;
   }
 
   // isset id assignments
-  private _Fields optionals[] = {_Fields.PUSH_MONITORING_ENDPOINT,_Fields.JOB_MANAGER_BIN_PATH,_Fields.JOB_MANAGER_COMMANDS};
+  private _Fields optionals[] = {_Fields.PUSH_MONITORING_ENDPOINT,_Fields.JOB_MANAGER_BIN_PATH,_Fields.JOB_MANAGER_COMMANDS,_Fields.MONITOR_MODE};
   public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
   static {
     Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
@@ -178,6 +187,8 @@ import org.slf4j.LoggerFactory;
         new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP, 
             new org.apache.thrift.meta_data.EnumMetaData(org.apache.thrift.protocol.TType.ENUM, JobManagerCommand.class), 
             new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))));
+    tmpMap.put(_Fields.MONITOR_MODE, new org.apache.thrift.meta_data.FieldMetaData("monitorMode", org.apache.thrift.TFieldRequirementType.OPTIONAL, 
+        new org.apache.thrift.meta_data.EnumMetaData(org.apache.thrift.protocol.TType.ENUM, MonitorMode.class)));
     metaDataMap = Collections.unmodifiableMap(tmpMap);
     org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(ResourceJobManager.class, metaDataMap);
   }
@@ -227,6 +238,9 @@ import org.slf4j.LoggerFactory;
       }
       this.jobManagerCommands = __this__jobManagerCommands;
     }
+    if (other.isSetMonitorMode()) {
+      this.monitorMode = other.monitorMode;
+    }
   }
 
   public ResourceJobManager deepCopy() {
@@ -241,6 +255,7 @@ import org.slf4j.LoggerFactory;
     this.pushMonitoringEndpoint = null;
     this.jobManagerBinPath = null;
     this.jobManagerCommands = null;
+    this.monitorMode = null;
   }
 
   public String getResourceJobManagerId() {
@@ -377,6 +392,37 @@ import org.slf4j.LoggerFactory;
     }
   }
 
+  /**
+   * 
+   * @see MonitorMode
+   */
+  public MonitorMode getMonitorMode() {
+    return this.monitorMode;
+  }
+
+  /**
+   * 
+   * @see MonitorMode
+   */
+  public void setMonitorMode(MonitorMode monitorMode) {
+    this.monitorMode = monitorMode;
+  }
+
+  public void unsetMonitorMode() {
+    this.monitorMode = null;
+  }
+
+  /** Returns true if field monitorMode is set (has been assigned a value) and false otherwise */
+  public boolean isSetMonitorMode() {
+    return this.monitorMode != null;
+  }
+
+  public void setMonitorModeIsSet(boolean value) {
+    if (!value) {
+      this.monitorMode = null;
+    }
+  }
+
   public void setFieldValue(_Fields field, Object value) {
     switch (field) {
     case RESOURCE_JOB_MANAGER_ID:
@@ -419,6 +465,14 @@ import org.slf4j.LoggerFactory;
       }
       break;
 
+    case MONITOR_MODE:
+      if (value == null) {
+        unsetMonitorMode();
+      } else {
+        setMonitorMode((MonitorMode)value);
+      }
+      break;
+
     }
   }
 
@@ -439,6 +493,9 @@ import org.slf4j.LoggerFactory;
     case JOB_MANAGER_COMMANDS:
       return getJobManagerCommands();
 
+    case MONITOR_MODE:
+      return getMonitorMode();
+
     }
     throw new IllegalStateException();
   }
@@ -460,6 +517,8 @@ import org.slf4j.LoggerFactory;
       return isSetJobManagerBinPath();
     case JOB_MANAGER_COMMANDS:
       return isSetJobManagerCommands();
+    case MONITOR_MODE:
+      return isSetMonitorMode();
     }
     throw new IllegalStateException();
   }
@@ -522,6 +581,15 @@ import org.slf4j.LoggerFactory;
         return false;
     }
 
+    boolean this_present_monitorMode = true && this.isSetMonitorMode();
+    boolean that_present_monitorMode = true && that.isSetMonitorMode();
+    if (this_present_monitorMode || that_present_monitorMode) {
+      if (!(this_present_monitorMode && that_present_monitorMode))
+        return false;
+      if (!this.monitorMode.equals(that.monitorMode))
+        return false;
+    }
+
     return true;
   }
 
@@ -588,6 +656,16 @@ import org.slf4j.LoggerFactory;
         return lastComparison;
       }
     }
+    lastComparison = Boolean.valueOf(isSetMonitorMode()).compareTo(other.isSetMonitorMode());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetMonitorMode()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.monitorMode, other.monitorMode);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
     return 0;
   }
 
@@ -653,6 +731,16 @@ import org.slf4j.LoggerFactory;
       }
       first = false;
     }
+    if (isSetMonitorMode()) {
+      if (!first) sb.append(", ");
+      sb.append("monitorMode:");
+      if (this.monitorMode == null) {
+        sb.append("null");
+      } else {
+        sb.append(this.monitorMode);
+      }
+      first = false;
+    }
     sb.append(")");
     return sb.toString();
   }
@@ -756,6 +844,14 @@ import org.slf4j.LoggerFactory;
               org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
             }
             break;
+          case 6: // MONITOR_MODE
+            if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
+              struct.monitorMode = MonitorMode.findByValue(iprot.readI32());
+              struct.setMonitorModeIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
           default:
             org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
         }
@@ -808,6 +904,13 @@ import org.slf4j.LoggerFactory;
           oprot.writeFieldEnd();
         }
       }
+      if (struct.monitorMode != null) {
+        if (struct.isSetMonitorMode()) {
+          oprot.writeFieldBegin(MONITOR_MODE_FIELD_DESC);
+          oprot.writeI32(struct.monitorMode.getValue());
+          oprot.writeFieldEnd();
+        }
+      }
       oprot.writeFieldStop();
       oprot.writeStructEnd();
     }
@@ -837,7 +940,10 @@ import org.slf4j.LoggerFactory;
       if (struct.isSetJobManagerCommands()) {
         optionals.set(2);
       }
-      oprot.writeBitSet(optionals, 3);
+      if (struct.isSetMonitorMode()) {
+        optionals.set(3);
+      }
+      oprot.writeBitSet(optionals, 4);
       if (struct.isSetPushMonitoringEndpoint()) {
         oprot.writeString(struct.pushMonitoringEndpoint);
       }
@@ -854,6 +960,9 @@ import org.slf4j.LoggerFactory;
           }
         }
       }
+      if (struct.isSetMonitorMode()) {
+        oprot.writeI32(struct.monitorMode.getValue());
+      }
     }
 
     @Override
@@ -863,7 +972,7 @@ import org.slf4j.LoggerFactory;
       struct.setResourceJobManagerIdIsSet(true);
       struct.resourceJobManagerType = ResourceJobManagerType.findByValue(iprot.readI32());
       struct.setResourceJobManagerTypeIsSet(true);
-      BitSet incoming = iprot.readBitSet(3);
+      BitSet incoming = iprot.readBitSet(4);
       if (incoming.get(0)) {
         struct.pushMonitoringEndpoint = iprot.readString();
         struct.setPushMonitoringEndpointIsSet(true);
@@ -887,6 +996,10 @@ import org.slf4j.LoggerFactory;
         }
         struct.setJobManagerCommandsIsSet(true);
       }
+      if (incoming.get(3)) {
+        struct.monitorMode = MonitorMode.findByValue(iprot.readI32());
+        struct.setMonitorModeIsSet(true);
+      }
     }
   }
 

http://git-wip-us.apache.org/repos/asf/airavata/blob/96a673f0/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/appcatalog/gatewayprofile/ComputeResourcePreference.java
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/appcatalog/gatewayprofile/ComputeResourcePreference.java b/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/appcatalog/gatewayprofile/ComputeResourcePreference.java
index d1e7649..26bd817 100644
--- a/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/appcatalog/gatewayprofile/ComputeResourcePreference.java
+++ b/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/appcatalog/gatewayprofile/ComputeResourcePreference.java
@@ -81,8 +81,8 @@ import org.slf4j.LoggerFactory;
 
   private static final org.apache.thrift.protocol.TField COMPUTE_RESOURCE_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("computeResourceId", org.apache.thrift.protocol.TType.STRING, (short)1);
   private static final org.apache.thrift.protocol.TField OVERRIDEBY_AIRAVATA_FIELD_DESC = new org.apache.thrift.protocol.TField("overridebyAiravata", org.apache.thrift.protocol.TType.BOOL, (short)2);
-  private static final org.apache.thrift.protocol.TField PREFERRED_JOB_SUBMISSION_PROTOCOL_FIELD_DESC = new org.apache.thrift.protocol.TField("preferredJobSubmissionProtocol", org.apache.thrift.protocol.TType.STRING, (short)3);
-  private static final org.apache.thrift.protocol.TField PREFERRED_DATA_MOVEMENT_PROTOCOL_FIELD_DESC = new org.apache.thrift.protocol.TField("preferredDataMovementProtocol", org.apache.thrift.protocol.TType.STRING, (short)4);
+  private static final org.apache.thrift.protocol.TField PREFERRED_JOB_SUBMISSION_PROTOCOL_FIELD_DESC = new org.apache.thrift.protocol.TField("preferredJobSubmissionProtocol", org.apache.thrift.protocol.TType.I32, (short)3);
+  private static final org.apache.thrift.protocol.TField PREFERRED_DATA_MOVEMENT_PROTOCOL_FIELD_DESC = new org.apache.thrift.protocol.TField("preferredDataMovementProtocol", org.apache.thrift.protocol.TType.I32, (short)4);
   private static final org.apache.thrift.protocol.TField PREFERRED_BATCH_QUEUE_FIELD_DESC = new org.apache.thrift.protocol.TField("preferredBatchQueue", org.apache.thrift.protocol.TType.STRING, (short)5);
   private static final org.apache.thrift.protocol.TField SCRATCH_LOCATION_FIELD_DESC = new org.apache.thrift.protocol.TField("scratchLocation", org.apache.thrift.protocol.TType.STRING, (short)6);
   private static final org.apache.thrift.protocol.TField ALLOCATION_PROJECT_NUMBER_FIELD_DESC = new org.apache.thrift.protocol.TField("allocationProjectNumber", org.apache.thrift.protocol.TType.STRING, (short)7);
@@ -95,8 +95,8 @@ import org.slf4j.LoggerFactory;
 
   private String computeResourceId; // required
   private boolean overridebyAiravata; // required
-  private String preferredJobSubmissionProtocol; // optional
-  private String preferredDataMovementProtocol; // optional
+  private org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol preferredJobSubmissionProtocol; // optional
+  private org.apache.airavata.model.appcatalog.computeresource.DataMovementProtocol preferredDataMovementProtocol; // optional
   private String preferredBatchQueue; // optional
   private String scratchLocation; // optional
   private String allocationProjectNumber; // optional
@@ -105,7 +105,15 @@ import org.slf4j.LoggerFactory;
   @SuppressWarnings("all") public enum _Fields implements org.apache.thrift.TFieldIdEnum {
     COMPUTE_RESOURCE_ID((short)1, "computeResourceId"),
     OVERRIDEBY_AIRAVATA((short)2, "overridebyAiravata"),
+    /**
+     * 
+     * @see org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol
+     */
     PREFERRED_JOB_SUBMISSION_PROTOCOL((short)3, "preferredJobSubmissionProtocol"),
+    /**
+     * 
+     * @see org.apache.airavata.model.appcatalog.computeresource.DataMovementProtocol
+     */
     PREFERRED_DATA_MOVEMENT_PROTOCOL((short)4, "preferredDataMovementProtocol"),
     PREFERRED_BATCH_QUEUE((short)5, "preferredBatchQueue"),
     SCRATCH_LOCATION((short)6, "scratchLocation"),
@@ -189,9 +197,9 @@ import org.slf4j.LoggerFactory;
     tmpMap.put(_Fields.OVERRIDEBY_AIRAVATA, new org.apache.thrift.meta_data.FieldMetaData("overridebyAiravata", org.apache.thrift.TFieldRequirementType.REQUIRED, 
         new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BOOL)));
     tmpMap.put(_Fields.PREFERRED_JOB_SUBMISSION_PROTOCOL, new org.apache.thrift.meta_data.FieldMetaData("preferredJobSubmissionProtocol", org.apache.thrift.TFieldRequirementType.OPTIONAL, 
-        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+        new org.apache.thrift.meta_data.EnumMetaData(org.apache.thrift.protocol.TType.ENUM, org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol.class)));
     tmpMap.put(_Fields.PREFERRED_DATA_MOVEMENT_PROTOCOL, new org.apache.thrift.meta_data.FieldMetaData("preferredDataMovementProtocol", org.apache.thrift.TFieldRequirementType.OPTIONAL, 
-        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+        new org.apache.thrift.meta_data.EnumMetaData(org.apache.thrift.protocol.TType.ENUM, org.apache.airavata.model.appcatalog.computeresource.DataMovementProtocol.class)));
     tmpMap.put(_Fields.PREFERRED_BATCH_QUEUE, new org.apache.thrift.meta_data.FieldMetaData("preferredBatchQueue", org.apache.thrift.TFieldRequirementType.OPTIONAL, 
         new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
     tmpMap.put(_Fields.SCRATCH_LOCATION, new org.apache.thrift.meta_data.FieldMetaData("scratchLocation", org.apache.thrift.TFieldRequirementType.OPTIONAL, 
@@ -304,11 +312,19 @@ import org.slf4j.LoggerFactory;
     __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __OVERRIDEBYAIRAVATA_ISSET_ID, value);
   }
 
-  public String getPreferredJobSubmissionProtocol() {
+  /**
+   * 
+   * @see org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol
+   */
+  public org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol getPreferredJobSubmissionProtocol() {
     return this.preferredJobSubmissionProtocol;
   }
 
-  public void setPreferredJobSubmissionProtocol(String preferredJobSubmissionProtocol) {
+  /**
+   * 
+   * @see org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol
+   */
+  public void setPreferredJobSubmissionProtocol(org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol preferredJobSubmissionProtocol) {
     this.preferredJobSubmissionProtocol = preferredJobSubmissionProtocol;
   }
 
@@ -327,11 +343,19 @@ import org.slf4j.LoggerFactory;
     }
   }
 
-  public String getPreferredDataMovementProtocol() {
+  /**
+   * 
+   * @see org.apache.airavata.model.appcatalog.computeresource.DataMovementProtocol
+   */
+  public org.apache.airavata.model.appcatalog.computeresource.DataMovementProtocol getPreferredDataMovementProtocol() {
     return this.preferredDataMovementProtocol;
   }
 
-  public void setPreferredDataMovementProtocol(String preferredDataMovementProtocol) {
+  /**
+   * 
+   * @see org.apache.airavata.model.appcatalog.computeresource.DataMovementProtocol
+   */
+  public void setPreferredDataMovementProtocol(org.apache.airavata.model.appcatalog.computeresource.DataMovementProtocol preferredDataMovementProtocol) {
     this.preferredDataMovementProtocol = preferredDataMovementProtocol;
   }
 
@@ -441,7 +465,7 @@ import org.slf4j.LoggerFactory;
       if (value == null) {
         unsetPreferredJobSubmissionProtocol();
       } else {
-        setPreferredJobSubmissionProtocol((String)value);
+        setPreferredJobSubmissionProtocol((org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol)value);
       }
       break;
 
@@ -449,7 +473,7 @@ import org.slf4j.LoggerFactory;
       if (value == null) {
         unsetPreferredDataMovementProtocol();
       } else {
-        setPreferredDataMovementProtocol((String)value);
+        setPreferredDataMovementProtocol((org.apache.airavata.model.appcatalog.computeresource.DataMovementProtocol)value);
       }
       break;
 
@@ -845,16 +869,16 @@ import org.slf4j.LoggerFactory;
             }
             break;
           case 3: // PREFERRED_JOB_SUBMISSION_PROTOCOL
-            if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
-              struct.preferredJobSubmissionProtocol = iprot.readString();
+            if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
+              struct.preferredJobSubmissionProtocol = org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol.findByValue(iprot.readI32());
               struct.setPreferredJobSubmissionProtocolIsSet(true);
             } else { 
               org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
             }
             break;
           case 4: // PREFERRED_DATA_MOVEMENT_PROTOCOL
-            if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
-              struct.preferredDataMovementProtocol = iprot.readString();
+            if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
+              struct.preferredDataMovementProtocol = org.apache.airavata.model.appcatalog.computeresource.DataMovementProtocol.findByValue(iprot.readI32());
               struct.setPreferredDataMovementProtocolIsSet(true);
             } else { 
               org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
@@ -908,14 +932,14 @@ import org.slf4j.LoggerFactory;
       if (struct.preferredJobSubmissionProtocol != null) {
         if (struct.isSetPreferredJobSubmissionProtocol()) {
           oprot.writeFieldBegin(PREFERRED_JOB_SUBMISSION_PROTOCOL_FIELD_DESC);
-          oprot.writeString(struct.preferredJobSubmissionProtocol);
+          oprot.writeI32(struct.preferredJobSubmissionProtocol.getValue());
           oprot.writeFieldEnd();
         }
       }
       if (struct.preferredDataMovementProtocol != null) {
         if (struct.isSetPreferredDataMovementProtocol()) {
           oprot.writeFieldBegin(PREFERRED_DATA_MOVEMENT_PROTOCOL_FIELD_DESC);
-          oprot.writeString(struct.preferredDataMovementProtocol);
+          oprot.writeI32(struct.preferredDataMovementProtocol.getValue());
           oprot.writeFieldEnd();
         }
       }
@@ -977,10 +1001,10 @@ import org.slf4j.LoggerFactory;
       }
       oprot.writeBitSet(optionals, 5);
       if (struct.isSetPreferredJobSubmissionProtocol()) {
-        oprot.writeString(struct.preferredJobSubmissionProtocol);
+        oprot.writeI32(struct.preferredJobSubmissionProtocol.getValue());
       }
       if (struct.isSetPreferredDataMovementProtocol()) {
-        oprot.writeString(struct.preferredDataMovementProtocol);
+        oprot.writeI32(struct.preferredDataMovementProtocol.getValue());
       }
       if (struct.isSetPreferredBatchQueue()) {
         oprot.writeString(struct.preferredBatchQueue);
@@ -1002,11 +1026,11 @@ import org.slf4j.LoggerFactory;
       struct.setOverridebyAiravataIsSet(true);
       BitSet incoming = iprot.readBitSet(5);
       if (incoming.get(0)) {
-        struct.preferredJobSubmissionProtocol = iprot.readString();
+        struct.preferredJobSubmissionProtocol = org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol.findByValue(iprot.readI32());
         struct.setPreferredJobSubmissionProtocolIsSet(true);
       }
       if (incoming.get(1)) {
-        struct.preferredDataMovementProtocol = iprot.readString();
+        struct.preferredDataMovementProtocol = org.apache.airavata.model.appcatalog.computeresource.DataMovementProtocol.findByValue(iprot.readI32());
         struct.setPreferredDataMovementProtocolIsSet(true);
       }
       if (incoming.get(2)) {

http://git-wip-us.apache.org/repos/asf/airavata/blob/96a673f0/airavata-api/thrift-interface-descriptions/computeResourceModel.thrift
----------------------------------------------------------------------
diff --git a/airavata-api/thrift-interface-descriptions/computeResourceModel.thrift b/airavata-api/thrift-interface-descriptions/computeResourceModel.thrift
index 62ebfe5..80a70df 100644
--- a/airavata-api/thrift-interface-descriptions/computeResourceModel.thrift
+++ b/airavata-api/thrift-interface-descriptions/computeResourceModel.thrift
@@ -83,6 +83,21 @@ enum JobManagerCommand {
 }
 
 /**
+* Monitoring modes
+*
+* PUSH:
+* Server will push job status changes.
+*
+* PULL:
+* Need to pull and get the Job status changes.
+*
+**/
+enum MonitorMode {
+   PUSH,
+   PULL
+}
+
+/**
  * Resource Job Manager Information
  *
  * resourceJobManagerType:
@@ -104,7 +119,8 @@ struct ResourceJobManager {
     2: required ResourceJobManagerType resourceJobManagerType,
     3: optional string pushMonitoringEndpoint,
     4: optional string jobManagerBinPath,
-    5: optional map<JobManagerCommand, string> jobManagerCommands
+    5: optional map<JobManagerCommand, string> jobManagerCommands,
+    6: optional MonitorMode monitorMode
 }
 
 /**

http://git-wip-us.apache.org/repos/asf/airavata/blob/96a673f0/airavata-api/thrift-interface-descriptions/gatewayResourceProfileModel.thrift
----------------------------------------------------------------------
diff --git a/airavata-api/thrift-interface-descriptions/gatewayResourceProfileModel.thrift b/airavata-api/thrift-interface-descriptions/gatewayResourceProfileModel.thrift
index fb856b3..3839890 100644
--- a/airavata-api/thrift-interface-descriptions/gatewayResourceProfileModel.thrift
+++ b/airavata-api/thrift-interface-descriptions/gatewayResourceProfileModel.thrift
@@ -21,6 +21,7 @@
 namespace java org.apache.airavata.model.appcatalog.gatewayprofile
 namespace php Airavata.Model.AppCatalog.GatewayProfile
 namespace cpp apache.airavata.model.appcatalog.gatewayprofile
+include "computeResourceModel.thrift"
 
 const string DEFAULT_ID = "DO_NOT_SET_AT_CLIENTS"
 
@@ -54,8 +55,8 @@ const string DEFAULT_ID = "DO_NOT_SET_AT_CLIENTS"
 struct ComputeResourcePreference {
     1: required string computeResourceId,
     2: required bool overridebyAiravata = 1,
-    3: optional string preferredJobSubmissionProtocol,
-    4: optional string preferredDataMovementProtocol,
+    3: optional computeResourceModel.JobSubmissionProtocol preferredJobSubmissionProtocol,
+    4: optional computeResourceModel.DataMovementProtocol preferredDataMovementProtocol,
     5: optional string preferredBatchQueue,
     6: optional string scratchLocation,
     7: optional string allocationProjectNumber


[46/50] [abbrv] airavata git commit: fixing schedular class to get correct execution mode

Posted by ch...@apache.org.
fixing schedular class to get correct execution mode


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/225b6746
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/225b6746
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/225b6746

Branch: refs/heads/master
Commit: 225b6746f40d66d6335b590038bf273cb5ed5c7b
Parents: 62f0036
Author: chathuriw <ka...@gmail.com>
Authored: Fri Nov 7 14:53:09 2014 -0500
Committer: Chathuri Wimalasena <ka...@gmail.com>
Committed: Fri Nov 7 14:53:09 2014 -0500

----------------------------------------------------------------------
 .../src/main/java/org/apache/airavata/gfac/Scheduler.java   | 9 +++++++++
 .../org/apache/airavata/gfac/monitor/HostMonitorData.java   | 8 ++++++++
 2 files changed, 17 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/225b6746/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/Scheduler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/Scheduler.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/Scheduler.java
index 0dae029..fb5899d 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/Scheduler.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/Scheduler.java
@@ -182,6 +182,7 @@ public class Scheduler {
         DocumentBuilderFactory docBuilderFactory = DocumentBuilderFactory.newInstance();
         DocumentBuilder docBuilder = null;
         Document handlerDoc = null;
+        String jobSubmissionProtocol = jobExecutionContext.getPreferredJobSubmissionProtocol().toString();
         try {
             docBuilder = docBuilderFactory.newDocumentBuilder();
             handlerDoc = docBuilder.parse(new File(resource.getPath()));
@@ -203,6 +204,14 @@ public class Scheduler {
                 String hostClass = jobExecutionContext.getPreferredJobSubmissionProtocol().toString();
                 executionMode = GFacConfiguration.getAttributeValue(GFacConfiguration.getHandlerDoc(), Constants.XPATH_EXPR_PROVIDER_ON_HOST + hostClass + "']", Constants.GFAC_CONFIG_EXECUTION_MODE_ATTRIBUTE);
             }
+
+            if (executionMode == null || "".equals(executionMode)) {
+                List<Element> elements = GFacUtils.getElementList(GFacConfiguration.getHandlerDoc(), Constants.XPATH_EXPR_PROVIDER_ON_SUBMISSION + jobSubmissionProtocol + "']");
+                for (Element element : elements) {
+                    executionMode = element.getAttribute(Constants.GFAC_CONFIG_EXECUTION_MODE_ATTRIBUTE);
+                }
+            }
+
         } catch (XPathExpressionException e) {
             log.error("Error evaluating XPath expression");  //To change body of catch statement use File | Settings | File Templates.
             throw new GFacException("Error evaluating XPath expression", e);

http://git-wip-us.apache.org/repos/asf/airavata/blob/225b6746/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/HostMonitorData.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/HostMonitorData.java b/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/HostMonitorData.java
index c2017a0..f29e3e6 100644
--- a/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/HostMonitorData.java
+++ b/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/HostMonitorData.java
@@ -27,6 +27,7 @@ import org.apache.airavata.model.appcatalog.computeresource.ComputeResourceDescr
 import org.apache.airavata.model.appcatalog.computeresource.DataMovementProtocol;
 import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol;
 
+import java.util.ArrayList;
 import java.util.List;
 
 public class HostMonitorData {
@@ -41,7 +42,14 @@ public class HostMonitorData {
         this.computeResourceDescription = jobExecutionContext.getApplicationContext().getComputeResourceDescription();
         this.jobSubmissionProtocol = jobExecutionContext.getPreferredJobSubmissionProtocol();
         this.dataMovementProtocol = jobExecutionContext.getPreferredDataMovementProtocol();
+        this.monitorIDs = new ArrayList<MonitorID>();
+    }
 
+    public HostMonitorData(JobExecutionContext jobExecutionContext, List<MonitorID> monitorIDs) {
+        this.computeResourceDescription = jobExecutionContext.getApplicationContext().getComputeResourceDescription();
+        this.jobSubmissionProtocol = jobExecutionContext.getPreferredJobSubmissionProtocol();
+        this.dataMovementProtocol = jobExecutionContext.getPreferredDataMovementProtocol();
+        this.monitorIDs = monitorIDs;
     }
 
     public ComputeResourceDescription getComputeResourceDescription() {


[36/50] [abbrv] airavata git commit: Added STdOUT and STDERR types to DataType enum

Posted by ch...@apache.org.
Added STdOUT and STDERR types to DataType enum


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/428b2407
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/428b2407
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/428b2407

Branch: refs/heads/master
Commit: 428b24075b7ac82109fbd5ab20df314b549da96e
Parents: 0b95d3b
Author: shamrath <sh...@gmail.com>
Authored: Thu Nov 6 16:57:51 2014 -0500
Committer: shamrath <sh...@gmail.com>
Committed: Thu Nov 6 16:57:51 2014 -0500

----------------------------------------------------------------------
 .../lib/airavata/applicationInterfaceModel_types.cpp      | 10 +++++++---
 .../lib/airavata/applicationInterfaceModel_types.h        |  4 +++-
 .../lib/Airavata/Model/AppCatalog/AppInterface/Types.php  |  4 ++++
 .../airavata/model/appcatalog/appinterface/DataType.java  |  8 +++++++-
 .../applicationInterfaceModel.thrift                      |  4 +++-
 5 files changed, 24 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/428b2407/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/applicationInterfaceModel_types.cpp
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/applicationInterfaceModel_types.cpp b/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/applicationInterfaceModel_types.cpp
index 4a171a7..a324d13 100644
--- a/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/applicationInterfaceModel_types.cpp
+++ b/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/applicationInterfaceModel_types.cpp
@@ -31,15 +31,19 @@ int _kDataTypeValues[] = {
   DataType::STRING,
   DataType::INTEGER,
   DataType::FLOAT,
-  DataType::URI
+  DataType::URI,
+  DataType::STDOUT,
+  DataType::STDERR
 };
 const char* _kDataTypeNames[] = {
   "STRING",
   "INTEGER",
   "FLOAT",
-  "URI"
+  "URI",
+  "STDOUT",
+  "STDERR"
 };
-const std::map<int, const char*> _DataType_VALUES_TO_NAMES(::apache::thrift::TEnumIterator(4, _kDataTypeValues, _kDataTypeNames), ::apache::thrift::TEnumIterator(-1, NULL, NULL));
+const std::map<int, const char*> _DataType_VALUES_TO_NAMES(::apache::thrift::TEnumIterator(6, _kDataTypeValues, _kDataTypeNames), ::apache::thrift::TEnumIterator(-1, NULL, NULL));
 
 const char* InputDataObjectType::ascii_fingerprint = "24F962C1CE4BE9FBD0F5D5EE9D1D5C00";
 const uint8_t InputDataObjectType::binary_fingerprint[16] = {0x24,0xF9,0x62,0xC1,0xCE,0x4B,0xE9,0xFB,0xD0,0xF5,0xD5,0xEE,0x9D,0x1D,0x5C,0x00};

http://git-wip-us.apache.org/repos/asf/airavata/blob/428b2407/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/applicationInterfaceModel_types.h
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/applicationInterfaceModel_types.h b/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/applicationInterfaceModel_types.h
index a3c6d86..fbbd7cb 100644
--- a/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/applicationInterfaceModel_types.h
+++ b/airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/applicationInterfaceModel_types.h
@@ -39,7 +39,9 @@ struct DataType {
     STRING = 0,
     INTEGER = 1,
     FLOAT = 2,
-    URI = 3
+    URI = 3,
+    STDOUT = 4,
+    STDERR = 5
   };
 };
 

http://git-wip-us.apache.org/repos/asf/airavata/blob/428b2407/airavata-api/airavata-client-sdks/airavata-php-sdk/src/main/resources/lib/Airavata/Model/AppCatalog/AppInterface/Types.php
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-php-sdk/src/main/resources/lib/Airavata/Model/AppCatalog/AppInterface/Types.php b/airavata-api/airavata-client-sdks/airavata-php-sdk/src/main/resources/lib/Airavata/Model/AppCatalog/AppInterface/Types.php
index 07b5c9e..e0cf00b 100644
--- a/airavata-api/airavata-client-sdks/airavata-php-sdk/src/main/resources/lib/Airavata/Model/AppCatalog/AppInterface/Types.php
+++ b/airavata-api/airavata-client-sdks/airavata-php-sdk/src/main/resources/lib/Airavata/Model/AppCatalog/AppInterface/Types.php
@@ -22,11 +22,15 @@ final class DataType {
   const INTEGER = 1;
   const FLOAT = 2;
   const URI = 3;
+  const STDOUT = 4;
+  const STDERR = 5;
   static public $__names = array(
     0 => 'STRING',
     1 => 'INTEGER',
     2 => 'FLOAT',
     3 => 'URI',
+    4 => 'STDOUT',
+    5 => 'STDERR',
   );
 }
 

http://git-wip-us.apache.org/repos/asf/airavata/blob/428b2407/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/appcatalog/appinterface/DataType.java
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/appcatalog/appinterface/DataType.java b/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/appcatalog/appinterface/DataType.java
index b3771ad..dc14aac 100644
--- a/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/appcatalog/appinterface/DataType.java
+++ b/airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/appcatalog/appinterface/DataType.java
@@ -36,7 +36,9 @@ import org.apache.thrift.TEnum;
   STRING(0),
   INTEGER(1),
   FLOAT(2),
-  URI(3);
+  URI(3),
+  STDOUT(4),
+  STDERR(5);
 
   private final int value;
 
@@ -65,6 +67,10 @@ import org.apache.thrift.TEnum;
         return FLOAT;
       case 3:
         return URI;
+      case 4:
+        return STDOUT;
+      case 5:
+        return STDERR;
       default:
         return null;
     }

http://git-wip-us.apache.org/repos/asf/airavata/blob/428b2407/airavata-api/thrift-interface-descriptions/applicationInterfaceModel.thrift
----------------------------------------------------------------------
diff --git a/airavata-api/thrift-interface-descriptions/applicationInterfaceModel.thrift b/airavata-api/thrift-interface-descriptions/applicationInterfaceModel.thrift
index 7a33078..5dcb809 100644
--- a/airavata-api/thrift-interface-descriptions/applicationInterfaceModel.thrift
+++ b/airavata-api/thrift-interface-descriptions/applicationInterfaceModel.thrift
@@ -38,7 +38,9 @@ enum DataType{
 	STRING,
 	INTEGER,
 	FLOAT,
-	URI
+	URI,
+	STDOUT,
+	STDERR
 }
 
 /**


[37/50] [abbrv] airavata git commit: Merge remote-tracking branch 'origin/gfac_appcatalog_int' into gfac_appcatalog_int

Posted by ch...@apache.org.
Merge remote-tracking branch 'origin/gfac_appcatalog_int' into gfac_appcatalog_int


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/088e80f0
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/088e80f0
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/088e80f0

Branch: refs/heads/master
Commit: 088e80f035372fd25f4f07ea6f06b2ec230cce28
Parents: 428b240 f9fdf53
Author: shamrath <sh...@gmail.com>
Authored: Thu Nov 6 17:00:06 2014 -0500
Committer: shamrath <sh...@gmail.com>
Committed: Thu Nov 6 17:00:06 2014 -0500

----------------------------------------------------------------------
 .../model/util/ExperimentModelUtil.java         |  15 +--
 .../registry/jpa/impl/ExperimentRegistry.java   | 126 ++++++++++---------
 .../registry/jpa/impl/RegistryImpl.java         |  13 +-
 .../jpa/utils/ThriftDataModelConversion.java    | 101 ++++++++-------
 4 files changed, 135 insertions(+), 120 deletions(-)
----------------------------------------------------------------------



[29/50] [abbrv] airavata git commit: fixing NPE

Posted by ch...@apache.org.
fixing NPE


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/38bc5929
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/38bc5929
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/38bc5929

Branch: refs/heads/master
Commit: 38bc592927b2fbee8c36ec9dbed800235fdd0c11
Parents: d8176e8
Author: chathuriw <ka...@gmail.com>
Authored: Thu Nov 6 11:48:36 2014 -0500
Committer: Chathuri Wimalasena <ka...@gmail.com>
Committed: Thu Nov 6 11:48:36 2014 -0500

----------------------------------------------------------------------
 .../data/impl/GwyResourceProfileImpl.java        | 19 +++++++++++++++----
 1 file changed, 15 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/38bc5929/modules/app-catalog/app-catalog-data/src/main/java/org/apache/aiaravata/application/catalog/data/impl/GwyResourceProfileImpl.java
----------------------------------------------------------------------
diff --git a/modules/app-catalog/app-catalog-data/src/main/java/org/apache/aiaravata/application/catalog/data/impl/GwyResourceProfileImpl.java b/modules/app-catalog/app-catalog-data/src/main/java/org/apache/aiaravata/application/catalog/data/impl/GwyResourceProfileImpl.java
index 101b647..827c560 100644
--- a/modules/app-catalog/app-catalog-data/src/main/java/org/apache/aiaravata/application/catalog/data/impl/GwyResourceProfileImpl.java
+++ b/modules/app-catalog/app-catalog-data/src/main/java/org/apache/aiaravata/application/catalog/data/impl/GwyResourceProfileImpl.java
@@ -66,8 +66,14 @@ public class GwyResourceProfileImpl implements GwyResourceProfile {
                     resource.setComputeHostResource((ComputeResourceResource)computeHostResource.get(preference.getComputeResourceId()));
                     resource.setGatewayId(profileResource.getGatewayID());
                     resource.setOverrideByAiravata(preference.isOverridebyAiravata());
-                    resource.setPreferredJobProtocol(preference.getPreferredJobSubmissionProtocol().toString());
-                    resource.setPreferedDMProtocol(preference.getPreferredDataMovementProtocol().toString());
+                    if (preference.getPreferredJobSubmissionProtocol() != null){
+                        resource.setPreferredJobProtocol(preference.getPreferredJobSubmissionProtocol().toString());
+                    }
+
+                    if (preference.getPreferredDataMovementProtocol() != null){
+                        resource.setPreferedDMProtocol(preference.getPreferredDataMovementProtocol().toString());
+                    }
+
                     resource.setBatchQueue(preference.getPreferredBatchQueue());
                     resource.setProjectNumber(preference.getAllocationProjectNumber());
                     resource.setScratchLocation(preference.getScratchLocation());
@@ -100,8 +106,13 @@ public class GwyResourceProfileImpl implements GwyResourceProfile {
                     resource.setComputeHostResource((ComputeResourceResource)computeHostResource.get(preference.getComputeResourceId()));
                     resource.setGatewayId(gatewayId);
                     resource.setOverrideByAiravata(preference.isOverridebyAiravata());
-                    resource.setPreferredJobProtocol(preference.getPreferredJobSubmissionProtocol().toString());
-                    resource.setPreferedDMProtocol(preference.getPreferredDataMovementProtocol().toString());
+                    if (preference.getPreferredJobSubmissionProtocol() != null){
+                        resource.setPreferredJobProtocol(preference.getPreferredJobSubmissionProtocol().toString());
+                    }
+
+                    if (preference.getPreferredDataMovementProtocol() != null){
+                        resource.setPreferedDMProtocol(preference.getPreferredDataMovementProtocol().toString());
+                    }
                     resource.setBatchQueue(preference.getPreferredBatchQueue());
                     resource.setProjectNumber(preference.getAllocationProjectNumber());
                     resource.setScratchLocation(preference.getScratchLocation());


[39/50] [abbrv] airavata git commit: Gfac - Removed AcutalParameter objects and replace Input and Output DataObjectTypes

Posted by ch...@apache.org.
Gfac - Removed AcutalParameter objects and replace Input and Output DataObjectTypes


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/b52499eb
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/b52499eb
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/b52499eb

Branch: refs/heads/master
Commit: b52499eb72b438718f32d1b6653c40d4de8ced9f
Parents: 088e80f
Author: shamrath <sh...@gmail.com>
Authored: Thu Nov 6 17:39:59 2014 -0500
Committer: shamrath <sh...@gmail.com>
Committed: Thu Nov 6 17:39:59 2014 -0500

----------------------------------------------------------------------
 .../client/samples/CreateLaunchExperiment.java  |  231 ++--
 .../samples/CreateLaunchExperimentUS3.java      |  139 +--
 .../airavata/gfac/core/cpi/BetterGfacImpl.java  |   10 +-
 .../handler/AbstractRecoverableHandler.java     |    4 +-
 .../airavata/gfac/core/utils/GFacUtils.java     | 1017 +++++++++---------
 .../airavata/gfac/core/utils/OutputUtils.java   |   72 +-
 .../gfac/gram/handler/GridFTPOutputHandler.java |    3 +-
 .../gfac/gsissh/handler/GSISSHInputHandler.java |    8 +-
 .../gsissh/handler/GSISSHOutputHandler.java     |   71 +-
 .../gfac/local/provider/impl/LocalProvider.java |    6 +-
 .../ssh/handler/AdvancedSCPOutputHandler.java   |   13 +-
 .../gfac/ssh/handler/SSHOutputHandler.java      |   64 +-
 12 files changed, 831 insertions(+), 807 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/b52499eb/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperiment.java
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperiment.java b/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperiment.java
index daaf4cc..d6c0f80 100644
--- a/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperiment.java
+++ b/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperiment.java
@@ -25,6 +25,9 @@ import org.apache.airavata.api.Airavata;
 import org.apache.airavata.api.client.AiravataClientFactory;
 import org.apache.airavata.client.tools.RegisterSampleApplications;
 import org.apache.airavata.client.tools.RegisterSampleApplicationsUtils;
+import org.apache.airavata.model.appcatalog.appinterface.DataType;
+import org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType;
+import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
 import org.apache.airavata.model.appcatalog.computeresource.ComputeResourceDescription;
 import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionInterface;
 import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol;
@@ -152,16 +155,16 @@ public class CreateLaunchExperiment {
     
     public static String createEchoExperimentForTrestles(Airavata.Client client) throws TException {
         try {
-            List<DataObjectType> exInputs = new ArrayList<DataObjectType>();
-            DataObjectType input = new DataObjectType();
-            input.setKey("Input_to_Echo");
+            List<InputDataObjectType> exInputs = new ArrayList<InputDataObjectType>();
+            InputDataObjectType input = new InputDataObjectType();
+            input.setName("Input_to_Echo");
             input.setType(DataType.STRING);
             input.setValue("Echoed_Output=Hello World");
             exInputs.add(input);
 
-            List<DataObjectType> exOut = new ArrayList<DataObjectType>();
-            DataObjectType output = new DataObjectType();
-            output.setKey("echo_output");
+            List<OutputDataObjectType> exOut = new ArrayList<OutputDataObjectType>();
+            OutputDataObjectType output = new OutputDataObjectType();
+            output.setName("echo_output");
             output.setType(DataType.STRING);
             output.setValue("");
             exOut.add(output);
@@ -204,16 +207,16 @@ public class CreateLaunchExperiment {
     
     public static String createEchoExperimentForFSD(Airavata.Client client) throws TException {
         try {
-            List<DataObjectType> exInputs = new ArrayList<DataObjectType>();
-            DataObjectType input = new DataObjectType();
-            input.setKey("Input_to_Echo");
+            List<InputDataObjectType> exInputs = new ArrayList<InputDataObjectType>();
+            InputDataObjectType input = new InputDataObjectType();
+            input.setName("Input_to_Echo");
             input.setType(DataType.STRING);
             input.setValue("Echoed_Output=Hello World");
             exInputs.add(input);
 
-            List<DataObjectType> exOut = new ArrayList<DataObjectType>();
-            DataObjectType output = new DataObjectType();
-            output.setKey("echo_output");
+            List<OutputDataObjectType> exOut = new ArrayList<OutputDataObjectType>();
+            OutputDataObjectType output = new OutputDataObjectType();
+            output.setName("echo_output");
             output.setType(DataType.STRING);
             output.setValue("");
             exOut.add(output);
@@ -261,19 +264,19 @@ public class CreateLaunchExperiment {
     
     public static String createExperimentWRFStampede(Airavata.Client client) throws TException {
         try {
-            List<DataObjectType> exInputs = new ArrayList<DataObjectType>();
-            DataObjectType input = new DataObjectType();
-            input.setKey("Config_Namelist_File");
+            List<InputDataObjectType> exInputs = new ArrayList<InputDataObjectType>();
+            InputDataObjectType input = new InputDataObjectType();
+            input.setName("Config_Namelist_File");
             input.setType(DataType.URI);
             input.setValue("/Users/lahirugunathilake/Downloads/wrf_sample_inputs/namelist.input");
 
-            DataObjectType input1 = new DataObjectType();
-            input1.setKey("WRF_Initial_Conditions");
+            InputDataObjectType input1 = new InputDataObjectType();
+            input1.setName("WRF_Initial_Conditions");
             input1.setType(DataType.URI);
             input1.setValue("/Users/lahirugunathilake/Downloads/wrf_sample_inputs/wrfinput_d01");
 
-            DataObjectType input2 = new DataObjectType();
-            input2.setKey("WRF_Boundary_File");
+            InputDataObjectType input2 = new InputDataObjectType();
+            input2.setName("WRF_Boundary_File");
             input2.setType(DataType.URI);
             input2.setValue("/Users/lahirugunathilake/Downloads/wrf_sample_inputs/wrfbdy_d01");
 
@@ -282,14 +285,14 @@ public class CreateLaunchExperiment {
             exInputs.add(input2);
 
 
-            List<DataObjectType> exOut = new ArrayList<DataObjectType>();
-            DataObjectType output = new DataObjectType();
-            output.setKey("WRF_Output");
+            List<OutputDataObjectType> exOut = new ArrayList<OutputDataObjectType>();
+            OutputDataObjectType output = new OutputDataObjectType();
+            output.setName("WRF_Output");
             output.setType(DataType.URI);
             output.setValue("");
 
-            DataObjectType output1 = new DataObjectType();
-            output1.setKey("WRF_Execution_Log");
+            OutputDataObjectType output1 = new OutputDataObjectType();
+            output1.setName("WRF_Execution_Log");
             output1.setType(DataType.URI);
             output1.setValue("");
 
@@ -335,19 +338,19 @@ public class CreateLaunchExperiment {
 
     public static String createExperimentWRFTrestles(Airavata.Client client) throws TException {
         try {
-            List<DataObjectType> exInputs = new ArrayList<DataObjectType>();
-            DataObjectType input = new DataObjectType();
-            input.setKey("WRF_Namelist");
+            List<InputDataObjectType> exInputs = new ArrayList<InputDataObjectType>();
+            InputDataObjectType input = new InputDataObjectType();
+            input.setName("WRF_Namelist");
             input.setType(DataType.URI);
             input.setValue("/Users/chathuri/Downloads/wrf_sample_inputs/namelist.input");
 
-            DataObjectType input1 = new DataObjectType();
-            input1.setKey("WRF_Input_File");
+            InputDataObjectType input1 = new InputDataObjectType();
+            input1.setName("WRF_Input_File");
             input1.setType(DataType.URI);
             input1.setValue("/Users/chathuri/Downloads/wrf_sample_inputs/wrfinput_d01");
 
-            DataObjectType input2 = new DataObjectType();
-            input2.setKey("WRF_Boundary_File");
+            InputDataObjectType input2 = new InputDataObjectType();
+            input2.setName("WRF_Boundary_File");
             input2.setType(DataType.URI);
             input2.setValue("/Users/chathuri/Downloads/wrf_sample_inputs/wrfbdy_d01");
 
@@ -356,14 +359,14 @@ public class CreateLaunchExperiment {
             exInputs.add(input2);
 
 
-            List<DataObjectType> exOut = new ArrayList<DataObjectType>();
-            DataObjectType output = new DataObjectType();
-            output.setKey("WRF_Output");
+            List<OutputDataObjectType> exOut = new ArrayList<OutputDataObjectType>();
+            OutputDataObjectType output = new OutputDataObjectType();
+            output.setName("WRF_Output");
             output.setType(DataType.URI);
             output.setValue("");
 
-            DataObjectType output1 = new DataObjectType();
-            output1.setKey("WRF_Execution_Log");
+            OutputDataObjectType output1 = new OutputDataObjectType();
+            output1.setName("WRF_Execution_Log");
             output1.setType(DataType.URI);
             output1.setValue("");
 
@@ -431,16 +434,16 @@ public class CreateLaunchExperiment {
 
     public static String createExperimentEchoForLocalHost(Airavata.Client client) throws TException {
         try {
-            List<DataObjectType> exInputs = new ArrayList<DataObjectType>();
-            DataObjectType input = new DataObjectType();
-            input.setKey("Input_to_Echo");
+            List<InputDataObjectType> exInputs = new ArrayList<InputDataObjectType>();
+            InputDataObjectType input = new InputDataObjectType();
+            input.setName("Input_to_Echo");
             input.setType(DataType.STRING);
             input.setValue("Echoed_Output=Hello World");
             exInputs.add(input);
 
-            List<DataObjectType> exOut = new ArrayList<DataObjectType>();
-            DataObjectType output = new DataObjectType();
-            output.setKey("Echoed_Output");
+            List<OutputDataObjectType> exOut = new ArrayList<OutputDataObjectType>();
+            OutputDataObjectType output = new OutputDataObjectType();
+            output.setName("Echoed_Output");
             output.setType(DataType.STRING);
             output.setValue("");
             exOut.add(output);
@@ -485,16 +488,16 @@ public class CreateLaunchExperiment {
 
 //    public static String createExperimentForSSHHost(Airavata.Client client) throws TException {
 //        try {
-//            List<DataObjectType> exInputs = new ArrayList<DataObjectType>();
-//            DataObjectType input = new DataObjectType();
-//            input.setKey("echo_input");
+//            List<OutputDataObjectType> exInputs = new ArrayList<OutputDataObjectType>();
+//            OutputDataObjectType input = new OutputDataObjectType();
+//            input.setName("echo_input");
 //            input.setType(DataType.STRING);
 //            input.setValue("echo_output=Hello World");
 //            exInputs.add(input);
 //
-//            List<DataObjectType> exOut = new ArrayList<DataObjectType>();
-//            DataObjectType output = new DataObjectType();
-//            output.setKey("echo_output");
+//            List<OutputDataObjectType> exOut = new ArrayList<OutputDataObjectType>();
+//            OutputDataObjectType output = new OutputDataObjectType();
+//            output.setName("echo_output");
 //            output.setType(DataType.STRING);
 //            output.setValue("");
 //            exOut.add(output);
@@ -531,16 +534,16 @@ public class CreateLaunchExperiment {
 
     public static String createEchoExperimentForStampede(Airavata.Client client) throws TException {
         try {
-            List<DataObjectType> exInputs = new ArrayList<DataObjectType>();
-            DataObjectType input = new DataObjectType();
-            input.setKey("Input_to_Echo");
+            List<InputDataObjectType> exInputs = new ArrayList<InputDataObjectType>();
+            InputDataObjectType input = new InputDataObjectType();
+            input.setName("Input_to_Echo");
             input.setType(DataType.STRING);
             input.setValue("Echoed_Output=Hello World");
             exInputs.add(input);
 
-            List<DataObjectType> exOut = new ArrayList<DataObjectType>();
-            DataObjectType output = new DataObjectType();
-            output.setKey("Echoed_Output");
+            List<OutputDataObjectType> exOut = new ArrayList<OutputDataObjectType>();
+            OutputDataObjectType output = new OutputDataObjectType();
+            output.setName("Echoed_Output");
             output.setType(DataType.STRING);
             output.setValue("");
             exOut.add(output);
@@ -585,16 +588,16 @@ public class CreateLaunchExperiment {
 
 //    public static String createEchoExperimentForLonestar(Airavata.Client client) throws TException {
 //        try {
-//            List<DataObjectType> exInputs = new ArrayList<DataObjectType>();
-//            DataObjectType input = new DataObjectType();
-//            input.setKey("echo_input");
+//            List<OutputDataObjectType> exInputs = new ArrayList<OutputDataObjectType>();
+//            OutputDataObjectType input = new OutputDataObjectType();
+//            input.setName("echo_input");
 //            input.setType(DataType.STRING);
 //            input.setValue("echo_output=Hello World");
 //            exInputs.add(input);
 //
-//            List<DataObjectType> exOut = new ArrayList<DataObjectType>();
-//            DataObjectType output = new DataObjectType();
-//            output.setKey("echo_output");
+//            List<OutputDataObjectType> exOut = new ArrayList<OutputDataObjectType>();
+//            OutputDataObjectType output = new OutputDataObjectType();
+//            output.setName("echo_output");
 //            output.setType(DataType.STRING);
 //            output.setValue("");
 //            exOut.add(output);
@@ -655,16 +658,16 @@ public class CreateLaunchExperiment {
 
     public static String createExperimentForBR2(Airavata.Client client) throws TException {
         try {
-            List<DataObjectType> exInputs = new ArrayList<DataObjectType>();
-            DataObjectType input = new DataObjectType();
-            input.setKey("Input_to_Echo");
+            List<InputDataObjectType> exInputs = new ArrayList<InputDataObjectType>();
+            InputDataObjectType input = new InputDataObjectType();
+            input.setName("Input_to_Echo");
             input.setType(DataType.STRING);
             input.setValue("Echoed_Output=Hello World");
             exInputs.add(input);
 
-            List<DataObjectType> exOut = new ArrayList<DataObjectType>();
-            DataObjectType output = new DataObjectType();
-            output.setKey("Echoed_Output");
+            List<OutputDataObjectType> exOut = new ArrayList<OutputDataObjectType>();
+            OutputDataObjectType output = new OutputDataObjectType();
+            output.setName("Echoed_Output");
             output.setType(DataType.STRING);
             output.setValue("");
             exOut.add(output);
@@ -709,44 +712,44 @@ public class CreateLaunchExperiment {
 
     public static String createExperimentForBR2Amber(Airavata.Client client) throws TException {
         try {
-            List<DataObjectType> exInputs = new ArrayList<DataObjectType>();
-            DataObjectType input = new DataObjectType();
-            input.setKey("Heat_Restart_File");
+            List<InputDataObjectType> exInputs = new ArrayList<InputDataObjectType>();
+            InputDataObjectType input = new InputDataObjectType();
+            input.setName("Heat_Restart_File");
             input.setType(DataType.URI);
             input.setValue("/Users/lahirugunathilake/Downloads/02_Heat.rst");
             exInputs.add(input);
 
-            DataObjectType input1 = new DataObjectType();
-            input1.setKey("Production_Control_File");
+            InputDataObjectType input1 = new InputDataObjectType();
+            input1.setName("Production_Control_File");
             input1.setType(DataType.URI);
             input1.setValue("/Users/lahirugunathilake/Downloads/03_Prod.in");
             exInputs.add(input1);
 
-            DataObjectType input2 = new DataObjectType();
-            input2.setKey("Parameter_Topology_File");
+            InputDataObjectType input2 = new InputDataObjectType();
+            input2.setName("Parameter_Topology_File");
             input2.setType(DataType.URI);
             input2.setValue("/Users/lahirugunathilake/Downloads/prmtop");
             exInputs.add(input2);
 
-            List<DataObjectType> exOut = new ArrayList<DataObjectType>();
-            DataObjectType output = new DataObjectType();
-            output.setKey("AMBER_Execution_Summary");
+            List<OutputDataObjectType> exOut = new ArrayList<OutputDataObjectType>();
+            OutputDataObjectType output = new OutputDataObjectType();
+            output.setName("AMBER_Execution_Summary");
             output.setType(DataType.URI);
             output.setValue("");
             exOut.add(output);
 
-            DataObjectType output1 = new DataObjectType();
-            output1.setKey("AMBER_Execution_log");
+            OutputDataObjectType output1 = new OutputDataObjectType();
+            output1.setName("AMBER_Execution_log");
             output1.setType(DataType.URI);
             output1.setValue("");
             exOut.add(output1);
-            DataObjectType output2 = new DataObjectType();
-            output2.setKey("AMBER_Trajectory_file");
+            OutputDataObjectType output2 = new OutputDataObjectType();
+            output2.setName("AMBER_Trajectory_file");
             output2.setType(DataType.URI);
             output2.setValue("");
             exOut.add(output2);
-            DataObjectType output3 = new DataObjectType();
-            output3.setKey("AMBER_Restart_file");
+            OutputDataObjectType output3 = new OutputDataObjectType();
+            output3.setName("AMBER_Restart_file");
             output3.setType(DataType.URI);
             output3.setValue("");
             exOut.add(output3);
@@ -792,44 +795,44 @@ public class CreateLaunchExperiment {
 
     public static String createExperimentForStampedeAmber(Airavata.Client client) throws TException {
         try {
-            List<DataObjectType> exInputs = new ArrayList<DataObjectType>();
-            DataObjectType input = new DataObjectType();
-            input.setKey("Heat_Restart_File");
+            List<InputDataObjectType> exInputs = new ArrayList<InputDataObjectType>();
+            InputDataObjectType input = new InputDataObjectType();
+            input.setName("Heat_Restart_File");
             input.setType(DataType.URI);
             input.setValue("/Users/lahirugunathilake/Downloads/02_Heat.rst");
             exInputs.add(input);
 
-            DataObjectType input1 = new DataObjectType();
-            input1.setKey("Production_Control_File");
+            InputDataObjectType input1 = new InputDataObjectType();
+            input1.setName("Production_Control_File");
             input1.setType(DataType.URI);
             input1.setValue("/Users/lahirugunathilake/Downloads/03_Prod.in");
             exInputs.add(input1);
 
-            DataObjectType input2 = new DataObjectType();
-            input2.setKey("Parameter_Topology_File");
+            InputDataObjectType input2 = new InputDataObjectType();
+            input2.setName("Parameter_Topology_File");
             input2.setType(DataType.URI);
             input2.setValue("/Users/lahirugunathilake/Downloads/prmtop");
             exInputs.add(input2);
 
-            List<DataObjectType> exOut = new ArrayList<DataObjectType>();
-            DataObjectType output = new DataObjectType();
-            output.setKey("AMBER_Execution_Summary");
+            List<OutputDataObjectType> exOut = new ArrayList<OutputDataObjectType>();
+            OutputDataObjectType output = new OutputDataObjectType();
+            output.setName("AMBER_Execution_Summary");
             output.setType(DataType.URI);
             output.setValue("");
             exOut.add(output);
 
-            DataObjectType output1 = new DataObjectType();
-            output1.setKey("AMBER_Execution_Summary");
+            OutputDataObjectType output1 = new OutputDataObjectType();
+            output1.setName("AMBER_Execution_Summary");
             output1.setType(DataType.URI);
             output1.setValue("");
             exOut.add(output1);
-            DataObjectType output2 = new DataObjectType();
-            output2.setKey("AMBER_Trajectory_file");
+            OutputDataObjectType output2 = new OutputDataObjectType();
+            output2.setName("AMBER_Trajectory_file");
             output2.setType(DataType.URI);
             output2.setValue("");
             exOut.add(output2);
-            DataObjectType output3 = new DataObjectType();
-            output3.setKey("AMBER_Restart_file");
+            OutputDataObjectType output3 = new OutputDataObjectType();
+            output3.setName("AMBER_Restart_file");
             output3.setType(DataType.URI);
             output3.setValue("");
             exOut.add(output3);
@@ -874,44 +877,44 @@ public class CreateLaunchExperiment {
 
     public static String createExperimentForTrestlesAmber(Airavata.Client client) throws TException {
         try {
-            List<DataObjectType> exInputs = new ArrayList<DataObjectType>();
-            DataObjectType input = new DataObjectType();
-            input.setKey("Heat_Restart_File");
+            List<InputDataObjectType> exInputs = new ArrayList<InputDataObjectType>();
+            InputDataObjectType input = new InputDataObjectType();
+            input.setName("Heat_Restart_File");
             input.setType(DataType.URI);
             input.setValue("/Users/lahirugunathilake/Downloads/02_Heat.rst");
             exInputs.add(input);
 
-            DataObjectType input1 = new DataObjectType();
-            input1.setKey("Production_Control_File");
+            InputDataObjectType input1 = new InputDataObjectType();
+            input1.setName("Production_Control_File");
             input1.setType(DataType.URI);
             input1.setValue("/Users/lahirugunathilake/Downloads/03_Prod.in");
             exInputs.add(input1);
 
-            DataObjectType input2 = new DataObjectType();
-            input2.setKey("Production_Control_File");
+            InputDataObjectType input2 = new InputDataObjectType();
+            input2.setName("Production_Control_File");
             input2.setType(DataType.URI);
             input2.setValue("/Users/lahirugunathilake/Downloads/prmtop");
             exInputs.add(input2);
 
-            List<DataObjectType> exOut = new ArrayList<DataObjectType>();
-            DataObjectType output = new DataObjectType();
-            output.setKey("AMBER_Execution_Summary");
+            List<OutputDataObjectType> exOut = new ArrayList<OutputDataObjectType>();
+            OutputDataObjectType output = new OutputDataObjectType();
+            output.setName("AMBER_Execution_Summary");
             output.setType(DataType.URI);
             output.setValue("");
             exOut.add(output);
 
-            DataObjectType output1 = new DataObjectType();
-            output1.setKey("AMBER_Execution_log");
+            OutputDataObjectType output1 = new OutputDataObjectType();
+            output1.setName("AMBER_Execution_log");
             output1.setType(DataType.URI);
             output1.setValue("");
             exOut.add(output1);
-            DataObjectType output2 = new DataObjectType();
-            output2.setKey("AMBER_Trajectory_file");
+            OutputDataObjectType output2 = new OutputDataObjectType();
+            output2.setName("AMBER_Trajectory_file");
             output2.setType(DataType.URI);
             output2.setValue("");
             exOut.add(output2);
-            DataObjectType output3 = new DataObjectType();
-            output3.setKey("AMBER_Restart_file");
+            OutputDataObjectType output3 = new OutputDataObjectType();
+            output3.setName("AMBER_Restart_file");
             output3.setType(DataType.URI);
             output3.setValue("");
             exOut.add(output3);

http://git-wip-us.apache.org/repos/asf/airavata/blob/b52499eb/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperimentUS3.java
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperimentUS3.java b/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperimentUS3.java
index bd9d959..b4f8d98 100644
--- a/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperimentUS3.java
+++ b/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperimentUS3.java
@@ -21,6 +21,9 @@ package org.apache.airavata.client.samples;
 
 import org.apache.airavata.api.Airavata;
 import org.apache.airavata.api.client.AiravataClientFactory;
+import org.apache.airavata.model.appcatalog.appinterface.DataType;
+import org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType;
+import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
 import org.apache.airavata.model.error.AiravataClientException;
 import org.apache.airavata.model.error.AiravataSystemException;
 import org.apache.airavata.model.error.ExperimentNotFoundException;
@@ -140,16 +143,16 @@ public class CreateLaunchExperimentUS3 {
 
     public static String createExperimentForTrestles(Airavata.Client client) throws TException  {
         try{
-            List<DataObjectType> exInputs = new ArrayList<DataObjectType>();
-            DataObjectType input = new DataObjectType();
-            input.setKey("echo_input");
+            List<InputDataObjectType> exInputs = new ArrayList<InputDataObjectType>();
+            InputDataObjectType input = new InputDataObjectType();
+            input.setName("echo_input");
             input.setType(DataType.STRING);
             input.setValue("echo_output=Hello World");
             exInputs.add(input);
 
-            List<DataObjectType> exOut = new ArrayList<DataObjectType>();
-            DataObjectType output = new DataObjectType();
-            output.setKey("echo_output");
+            List<OutputDataObjectType> exOut = new ArrayList<OutputDataObjectType>();
+            OutputDataObjectType output = new OutputDataObjectType();
+            output.setName("echo_output");
             output.setType(DataType.STRING);
             output.setValue("");
             exOut.add(output);
@@ -186,17 +189,17 @@ public class CreateLaunchExperimentUS3 {
     
     public static String createUS3ExperimentForTrestles (Airavata.Client client) throws AiravataSystemException, InvalidRequestException, AiravataClientException, TException  {
         try{
-            List<DataObjectType> exInputs = new ArrayList<DataObjectType>();
-            DataObjectType input = new DataObjectType();
-            input.setKey("input");
+            List<InputDataObjectType> exInputs = new ArrayList<InputDataObjectType>();
+            InputDataObjectType input = new InputDataObjectType();
+            input.setName("input");
             input.setType(DataType.URI);
             input.setValue("file:///home/airavata/input/hpcinput.tar");
-            DataObjectType input1 = new DataObjectType();
-            input1.setKey("walltime");
+            InputDataObjectType input1 = new InputDataObjectType();
+            input1.setName("walltime");
             input1.setType(DataType.STRING);
             input1.setValue("-walltime=60");
-            DataObjectType input2 = new DataObjectType();
-            input2.setKey("mgroupcount");
+            InputDataObjectType input2 = new InputDataObjectType();
+            input2.setName("mgroupcount");
             input2.setType(DataType.STRING);
             input2.setValue("-mgroupcount=1");
             
@@ -204,17 +207,17 @@ public class CreateLaunchExperimentUS3 {
             exInputs.add(input1);
             exInputs.add(input2);
 
-            List<DataObjectType> exOut = new ArrayList<DataObjectType>();
-            DataObjectType output = new DataObjectType();
-            output.setKey("output");
+            List<OutputDataObjectType> exOut = new ArrayList<OutputDataObjectType>();
+            OutputDataObjectType output = new OutputDataObjectType();
+            output.setName("output");
             output.setType(DataType.URI);
             output.setValue("");
-//            DataObjectType output1 = new DataObjectType();
-//            output1.setKey("stdout");
+//            OutputDataObjectType output1 = new OutputDataObjectType();
+//            output1.setName("stdout");
 //            output1.setType(DataType.STDOUT);
 //            output1.setValue("");
-//            DataObjectType output2 = new DataObjectType();
-//            output2.setKey("stderr");
+//            OutputDataObjectType output2 = new OutputDataObjectType();
+//            output2.setName("stderr");
 //            output2.setType(DataType.STDERR);
 //            output2.setValue("");
             exOut.add(output);
@@ -257,17 +260,17 @@ public class CreateLaunchExperimentUS3 {
     }
     public static String createUS3ExperimentForStampede (Airavata.Client client) throws AiravataSystemException, InvalidRequestException, AiravataClientException, TException  {
         try{
-            List<DataObjectType> exInputs = new ArrayList<DataObjectType>();
-            DataObjectType input = new DataObjectType();
-            input.setKey("input");
+            List<InputDataObjectType> exInputs = new ArrayList<InputDataObjectType>();
+            InputDataObjectType input = new InputDataObjectType();
+            input.setName("input");
             input.setType(DataType.URI);
             input.setValue("file:///home/airavata/input/hpcinput.tar");
-            DataObjectType input1 = new DataObjectType();
-            input1.setKey("walltime");
+            InputDataObjectType input1 = new InputDataObjectType();
+            input1.setName("walltime");
             input1.setType(DataType.STRING);
             input1.setValue("-walltime=60");
-            DataObjectType input2 = new DataObjectType();
-            input2.setKey("mgroupcount");
+            InputDataObjectType input2 = new InputDataObjectType();
+            input2.setName("mgroupcount");
             input2.setType(DataType.STRING);
             input2.setValue("-mgroupcount=1");
             
@@ -276,17 +279,17 @@ public class CreateLaunchExperimentUS3 {
             exInputs.add(input2);
 
 
-            List<DataObjectType> exOut = new ArrayList<DataObjectType>();
-            DataObjectType output = new DataObjectType();
-            output.setKey("output");
+            List<OutputDataObjectType> exOut = new ArrayList<OutputDataObjectType>();
+            OutputDataObjectType output = new OutputDataObjectType();
+            output.setName("output");
             output.setType(DataType.URI);
             output.setValue("");
-//            DataObjectType output1 = new DataObjectType();
-//            output1.setKey("stdout");
+//            OutputDataObjectType output1 = new OutputDataObjectType();
+//            output1.setName("stdout");
 //            output1.setType(DataType.STDOUT);
 //            output1.setValue("");
-//            DataObjectType output2 = new DataObjectType();
-//            output2.setKey("stderr");
+//            OutputDataObjectType output2 = new OutputDataObjectType();
+//            output2.setName("stderr");
 //            output2.setType(DataType.STDERR);
 //            output2.setValue("");
             exOut.add(output);
@@ -330,17 +333,17 @@ public class CreateLaunchExperimentUS3 {
     }
     public static String createUS3ExperimentForLonestar (Airavata.Client client) throws AiravataSystemException, InvalidRequestException, AiravataClientException, TException  {
         try{
-            List<DataObjectType> exInputs = new ArrayList<DataObjectType>();
-            DataObjectType input = new DataObjectType();
-            input.setKey("input");
+            List<InputDataObjectType> exInputs = new ArrayList<InputDataObjectType>();
+            InputDataObjectType input = new InputDataObjectType();
+            input.setName("input");
             input.setType(DataType.URI);
             input.setValue("file:///home/airavata/input/hpcinput.tar");
-            DataObjectType input1 = new DataObjectType();
-            input1.setKey("walltime");
+            InputDataObjectType input1 = new InputDataObjectType();
+            input1.setName("walltime");
             input1.setType(DataType.STRING);
             input1.setValue("-walltime=60");
-            DataObjectType input2 = new DataObjectType();
-            input2.setKey("mgroupcount");
+            InputDataObjectType input2 = new InputDataObjectType();
+            input2.setName("mgroupcount");
             input2.setType(DataType.STRING);
             input2.setValue("-mgroupcount=1");
             
@@ -348,17 +351,17 @@ public class CreateLaunchExperimentUS3 {
             exInputs.add(input1);
             exInputs.add(input2);
 
-            List<DataObjectType> exOut = new ArrayList<DataObjectType>();
-            DataObjectType output = new DataObjectType();
-            output.setKey("output");
+            List<OutputDataObjectType> exOut = new ArrayList<OutputDataObjectType>();
+            OutputDataObjectType output = new OutputDataObjectType();
+            output.setName("output");
             output.setType(DataType.URI);
             output.setValue("");
-//            DataObjectType output1 = new DataObjectType();
-//            output1.setKey("stdout");
+//            OutputDataObjectType output1 = new OutputDataObjectType();
+//            output1.setName("stdout");
 //            output1.setType(DataType.STDOUT);
 //            output1.setValue("");
-//            DataObjectType output2 = new DataObjectType();
-//            output2.setKey("stderr");
+//            OutputDataObjectType output2 = new OutputDataObjectType();
+//            output2.setName("stderr");
 //            output2.setType(DataType.STDERR);
 //            output2.setValue("");
             exOut.add(output);
@@ -402,17 +405,17 @@ public class CreateLaunchExperimentUS3 {
     }
     public static String createUS3ExperimentForAlamo (Airavata.Client client) throws AiravataSystemException, InvalidRequestException, AiravataClientException, TException  {
         try{
-            List<DataObjectType> exInputs = new ArrayList<DataObjectType>();
-            DataObjectType input = new DataObjectType();
-            input.setKey("input");
+            List<InputDataObjectType> exInputs = new ArrayList<InputDataObjectType>();
+            InputDataObjectType input = new InputDataObjectType();
+            input.setName("input");
             input.setType(DataType.URI);
             input.setValue("file:///home/airavata/input/hpcinput.tar");
-            DataObjectType input1 = new DataObjectType();
-            input1.setKey("walltime");
+            InputDataObjectType input1 = new InputDataObjectType();
+            input1.setName("walltime");
             input1.setType(DataType.STRING);
             input1.setValue("-walltime=60");
-            DataObjectType input2 = new DataObjectType();
-            input2.setKey("mgroupcount");
+            InputDataObjectType input2 = new InputDataObjectType();
+            input2.setName("mgroupcount");
             input2.setType(DataType.STRING);
             input2.setValue("-mgroupcount=1");
             
@@ -421,17 +424,17 @@ public class CreateLaunchExperimentUS3 {
             exInputs.add(input2);
 
 
-            List<DataObjectType> exOut = new ArrayList<DataObjectType>();
-            DataObjectType output = new DataObjectType();
-            output.setKey("output");
+            List<OutputDataObjectType> exOut = new ArrayList<OutputDataObjectType>();
+            OutputDataObjectType output = new OutputDataObjectType();
+            output.setName("output");
             output.setType(DataType.URI);
             output.setValue("");
-//            DataObjectType output1 = new DataObjectType();
-//            output1.setKey("stdout");
+//            OutputDataObjectType output1 = new OutputDataObjectType();
+//            output1.setName("stdout");
 //            output1.setType(DataType.STDOUT);
 //            output1.setValue("");
-//            DataObjectType output2 = new DataObjectType();
-//            output2.setKey("stderr");
+//            OutputDataObjectType output2 = new OutputDataObjectType();
+//            output2.setName("stderr");
 //            output2.setType(DataType.STDERR);
 //            output2.setValue("");
             exOut.add(output);
@@ -475,16 +478,16 @@ public class CreateLaunchExperimentUS3 {
     }
     public static String createExperimentForStampede(Airavata.Client client) throws TException  {
         try{
-            List<DataObjectType> exInputs = new ArrayList<DataObjectType>();
-            DataObjectType input = new DataObjectType();
-            input.setKey("echo_input");
+            List<InputDataObjectType> exInputs = new ArrayList<InputDataObjectType>();
+            InputDataObjectType input = new InputDataObjectType();
+            input.setName("echo_input");
             input.setType(DataType.STRING);
             input.setValue("echo_output=Hello World");
             exInputs.add(input);
 
-            List<DataObjectType> exOut = new ArrayList<DataObjectType>();
-            DataObjectType output = new DataObjectType();
-            output.setKey("echo_output");
+            List<OutputDataObjectType> exOut = new ArrayList<OutputDataObjectType>();
+            OutputDataObjectType output = new OutputDataObjectType();
+            output.setName("echo_output");
             output.setType(DataType.STRING);
             output.setValue("");
             exOut.add(output);

http://git-wip-us.apache.org/repos/asf/airavata/blob/b52499eb/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
index d063dac..cbf32f7 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
@@ -51,6 +51,8 @@ import org.apache.airavata.messaging.core.Publisher;
 import org.apache.airavata.messaging.core.PublisherFactory;
 import org.apache.airavata.model.appcatalog.appdeployment.ApplicationDeploymentDescription;
 import org.apache.airavata.model.appcatalog.appinterface.ApplicationInterfaceDescription;
+import org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType;
+import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
 import org.apache.airavata.model.appcatalog.computeresource.ComputeResourceDescription;
 import org.apache.airavata.model.appcatalog.computeresource.DataMovementInterface;
 import org.apache.airavata.model.appcatalog.computeresource.FileSystems;
@@ -295,11 +297,11 @@ public class BetterGfacImpl implements GFac,Watcher {
         applicationContext.setComputeResourcePreference(gatewayResourcePreferences);
         jobExecutionContext.setApplicationContext(applicationContext);
 
-        List<DataObjectType> experimentInputs = taskData.getApplicationInputs();
-        jobExecutionContext.setInMessageContext(new MessageContext(GFacUtils.getInMessageContext(experimentInputs)));
+        List<InputDataObjectType> experimentInputs = taskData.getApplicationInputs();
+        jobExecutionContext.setInMessageContext(new MessageContext(GFacUtils.getInputParamMap(experimentInputs)));
 
-        List<DataObjectType> outputData = taskData.getApplicationOutputs();
-        jobExecutionContext.setOutMessageContext(new MessageContext(GFacUtils.getOutMessageContext(outputData)));
+        List<OutputDataObjectType> outputData = taskData.getApplicationOutputs();
+        jobExecutionContext.setOutMessageContext(new MessageContext(GFacUtils.getOuputParamMap(outputData)));
 
         jobExecutionContext.setProperty(Constants.PROP_TOPIC, experimentID);
         jobExecutionContext.setGfac(this);

http://git-wip-us.apache.org/repos/asf/airavata/blob/b52499eb/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/handler/AbstractRecoverableHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/handler/AbstractRecoverableHandler.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/handler/AbstractRecoverableHandler.java
index 1ae19f3..566a6ba 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/handler/AbstractRecoverableHandler.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/handler/AbstractRecoverableHandler.java
@@ -25,9 +25,9 @@ import org.apache.airavata.gfac.core.context.JobExecutionContext;
 import org.apache.airavata.gfac.core.cpi.BetterGfacImpl;
 import org.apache.airavata.gfac.core.states.GfacPluginState;
 import org.apache.airavata.gfac.core.utils.GFacUtils;
+import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
 import org.apache.airavata.model.messaging.event.TaskIdentifier;
 import org.apache.airavata.model.messaging.event.TaskOutputChangeEvent;
-import org.apache.airavata.model.workspace.experiment.DataObjectType;
 import org.apache.airavata.persistance.registry.jpa.impl.RegistryFactory;
 import org.apache.airavata.registry.cpi.Registry;
 import org.apache.airavata.registry.cpi.RegistryException;
@@ -78,7 +78,7 @@ public abstract class AbstractRecoverableHandler implements GFacRecoverableHandl
         this.registry = registry;
     }
 
-    protected void fireTaskOutputChangeEvent(JobExecutionContext jobExecutionContext, List<DataObjectType> outputArray) {
+    protected void fireTaskOutputChangeEvent(JobExecutionContext jobExecutionContext, List<OutputDataObjectType> outputArray) {
         TaskIdentifier taskIdentity = new TaskIdentifier(jobExecutionContext.getTaskData().getTaskID(),
                 jobExecutionContext.getWorkflowNodeDetails().getNodeInstanceId(),
                 jobExecutionContext.getExperimentID(),


[35/50] [abbrv] airavata git commit: updating with app catalog input output objects

Posted by ch...@apache.org.
updating with app catalog input output objects


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/f9fdf53e
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/f9fdf53e
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/f9fdf53e

Branch: refs/heads/master
Commit: f9fdf53e7400d98f0985c8d66b492c97ab07b5bc
Parents: 0b95d3b
Author: chathuriw <ka...@gmail.com>
Authored: Thu Nov 6 16:46:52 2014 -0500
Committer: Chathuri Wimalasena <ka...@gmail.com>
Committed: Thu Nov 6 16:46:52 2014 -0500

----------------------------------------------------------------------
 .../model/util/ExperimentModelUtil.java         |  15 +--
 .../registry/jpa/impl/ExperimentRegistry.java   | 126 ++++++++++---------
 .../registry/jpa/impl/RegistryImpl.java         |  13 +-
 .../jpa/utils/ThriftDataModelConversion.java    | 101 ++++++++-------
 4 files changed, 135 insertions(+), 120 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/f9fdf53e/airavata-api/airavata-model-utils/src/main/java/org/apache/airavata/model/util/ExperimentModelUtil.java
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-model-utils/src/main/java/org/apache/airavata/model/util/ExperimentModelUtil.java b/airavata-api/airavata-model-utils/src/main/java/org/apache/airavata/model/util/ExperimentModelUtil.java
index 43fb241..fdfd617 100644
--- a/airavata-api/airavata-model-utils/src/main/java/org/apache/airavata/model/util/ExperimentModelUtil.java
+++ b/airavata-api/airavata-model-utils/src/main/java/org/apache/airavata/model/util/ExperimentModelUtil.java
@@ -25,10 +25,11 @@ package org.apache.airavata.model.util;
 import java.util.Calendar;
 import java.util.List;
 
+import org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType;
+import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
 import org.apache.airavata.model.workspace.experiment.AdvancedInputDataHandling;
 import org.apache.airavata.model.workspace.experiment.AdvancedOutputDataHandling;
 import org.apache.airavata.model.workspace.experiment.ComputationalResourceScheduling;
-import org.apache.airavata.model.workspace.experiment.DataObjectType;
 import org.apache.airavata.model.workspace.experiment.Experiment;
 import org.apache.airavata.model.workspace.experiment.QualityOfServiceParams;
 import org.apache.airavata.model.workspace.experiment.TaskDetails;
@@ -52,7 +53,7 @@ public class ExperimentModelUtil {
                                                     String experimentName,
                                                     String expDescription,
                                                     String applicationId,
-                                                    List<DataObjectType> experimentInputList) {
+                                                    List<InputDataObjectType> experimentInputList) {
         Experiment experiment = new Experiment();
         experiment.setProjectID(projectID);
         experiment.setUserName(userName);
@@ -124,12 +125,12 @@ public class ExperimentModelUtil {
         taskDetails.setCreationTime(experiment.getCreationTime());
         taskDetails.setApplicationId(experiment.getApplicationId());
         taskDetails.setApplicationVersion(experiment.getApplicationVersion());
-        List<DataObjectType> experimentInputs = experiment.getExperimentInputs();
+        List<InputDataObjectType> experimentInputs = experiment.getExperimentInputs();
         if (experimentInputs != null){
             taskDetails.setApplicationInputs(experimentInputs);
         }
 
-        List<DataObjectType> experimentOutputs = experiment.getExperimentOutputs();
+        List<OutputDataObjectType> experimentOutputs = experiment.getExperimentOutputs();
         if (experimentOutputs != null){
             taskDetails.setApplicationOutputs(experimentOutputs);
         }
@@ -158,12 +159,12 @@ public class ExperimentModelUtil {
 //        String[] split = ;
         taskDetails.setApplicationId(nodeDetails.getExecutionUnitData());
 //        taskDetails.setApplicationVersion(split[1]);
-        List<DataObjectType> experimentInputs = nodeDetails.getNodeInputs();
+        List<InputDataObjectType> experimentInputs = nodeDetails.getNodeInputs();
         if (experimentInputs != null){
             taskDetails.setApplicationInputs(experimentInputs);
         }
 
-        List<DataObjectType> experimentOutputs = nodeDetails.getNodeOutputs();
+        List<OutputDataObjectType> experimentOutputs = nodeDetails.getNodeOutputs();
         if (experimentOutputs != null){
             taskDetails.setApplicationOutputs(experimentOutputs);
         }
@@ -186,7 +187,7 @@ public class ExperimentModelUtil {
         return taskDetails;
     }
     public static WorkflowNodeDetails createWorkflowNode (String nodeName,
-                                                          List<DataObjectType> nodeInputs){
+                                                          List<InputDataObjectType> nodeInputs){
         WorkflowNodeDetails wfnod = new WorkflowNodeDetails();
         wfnod.setNodeName(nodeName);
         wfnod.setNodeInputs(nodeInputs);

http://git-wip-us.apache.org/repos/asf/airavata/blob/f9fdf53e/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/impl/ExperimentRegistry.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/impl/ExperimentRegistry.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/impl/ExperimentRegistry.java
index 03ca6fc..a83f5f1 100644
--- a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/impl/ExperimentRegistry.java
+++ b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/impl/ExperimentRegistry.java
@@ -24,6 +24,8 @@ package org.apache.airavata.persistance.registry.jpa.impl;
 import org.apache.airavata.common.logger.AiravataLogger;
 import org.apache.airavata.common.logger.AiravataLoggerFactory;
 import org.apache.airavata.common.utils.AiravataUtils;
+import org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType;
+import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
 import org.apache.airavata.model.workspace.experiment.*;
 import org.apache.airavata.persistance.registry.jpa.Resource;
 import org.apache.airavata.persistance.registry.jpa.ResourceType;
@@ -81,7 +83,7 @@ public class ExperimentRegistry {
             experimentResource.setWorkflowTemplateVersion(experiment.getWorkflowTemplateVersion());
             experimentResource.setWorkflowExecutionId(experiment.getWorkflowExecutionInstanceId());
             experimentResource.save();
-            List<DataObjectType> experimentInputs = experiment.getExperimentInputs();
+            List<InputDataObjectType> experimentInputs = experiment.getExperimentInputs();
             if (experimentInputs != null) {
                 addExpInputs(experimentInputs, experimentResource);
             }
@@ -91,9 +93,9 @@ public class ExperimentRegistry {
                 addUserConfigData(userConfigurationData, experimentID);
             }
 
-            List<DataObjectType> experimentOutputs = experiment.getExperimentOutputs();
+            List<OutputDataObjectType> experimentOutputs = experiment.getExperimentOutputs();
             if (experimentOutputs != null && !experimentOutputs.isEmpty()){
-                for (DataObjectType output : experimentOutputs){
+                for (OutputDataObjectType output : experimentOutputs){
                     output.setValue("");
                 }
                 addExpOutputs(experimentOutputs, experimentID);
@@ -260,12 +262,12 @@ public class ExperimentRegistry {
 
     }
 
-    public void addExpInputs(List<DataObjectType> exInputs, ExperimentResource experimentResource) throws RegistryException {
+    public void addExpInputs(List<InputDataObjectType> exInputs, ExperimentResource experimentResource) throws RegistryException {
         try {
-            for (DataObjectType input : exInputs) {
+            for (InputDataObjectType input : exInputs) {
                 ExperimentInputResource resource = (ExperimentInputResource) experimentResource.create(ResourceType.EXPERIMENT_INPUT);
                 resource.setExperimentResource(experimentResource);
-                resource.setExperimentKey(input.getKey());
+                resource.setExperimentKey(input.getName());
                 resource.setValue(input.getValue());
                 if (input.getType() != null){
                     resource.setInputType(input.getType().toString());
@@ -279,12 +281,12 @@ public class ExperimentRegistry {
         }
     }
 
-    public void updateExpInputs(List<DataObjectType> exInputs, ExperimentResource experimentResource) throws RegistryException {
+    public void updateExpInputs(List<InputDataObjectType> exInputs, ExperimentResource experimentResource) throws RegistryException {
         try {
             List<ExperimentInputResource> experimentInputs = experimentResource.getExperimentInputs();
-            for (DataObjectType input : exInputs) {
+            for (InputDataObjectType input : exInputs) {
                 for (ExperimentInputResource exinput : experimentInputs) {
-                    if (exinput.getExperimentKey().equals(input.getKey())) {
+                    if (exinput.getExperimentKey().equals(input.getName())) {
                         exinput.setValue(input.getValue());
                         if (input.getType() != null){
                             exinput.setInputType(input.getType().toString());
@@ -301,18 +303,18 @@ public class ExperimentRegistry {
 
     }
 
-    public String addExpOutputs(List<DataObjectType> exOutput, String expId) throws RegistryException {
+    public String addExpOutputs(List<OutputDataObjectType> exOutput, String expId) throws RegistryException {
         try {
             ExperimentResource experiment = gatewayResource.getExperiment(expId);
-            for (DataObjectType output : exOutput) {
+            for (OutputDataObjectType output : exOutput) {
                 ExperimentOutputResource resource = (ExperimentOutputResource) experiment.create(ResourceType.EXPERIMENT_OUTPUT);
                 resource.setExperimentResource(experiment);
-                resource.setExperimentKey(output.getKey());
+                resource.setExperimentKey(output.getName());
                 resource.setValue(output.getValue());
                 if (output.getType() != null){
                     resource.setOutputType(output.getType().toString());
                 }
-                resource.setMetadata(output.getMetaData());
+//                resource.setMetadata(output.get());
                 resource.save();
             }
         } catch (Exception e) {
@@ -322,20 +324,20 @@ public class ExperimentRegistry {
         return expId;
     }
 
-    public void updateExpOutputs(List<DataObjectType> exOutput, String expId) throws RegistryException {
+    public void updateExpOutputs(List<OutputDataObjectType> exOutput, String expId) throws RegistryException {
         try {
             ExperimentResource experiment = gatewayResource.getExperiment(expId);
             List<ExperimentOutputResource> existingExpOutputs = experiment.getExperimentOutputs();
-            for (DataObjectType output : exOutput) {
+            for (OutputDataObjectType output : exOutput) {
                 for (ExperimentOutputResource resource : existingExpOutputs) {
-                    if (resource.getExperimentKey().equals(output.getKey())) {
+                    if (resource.getExperimentKey().equals(output.getName())) {
                         resource.setExperimentResource(experiment);
-                        resource.setExperimentKey(output.getKey());
+                        resource.setExperimentKey(output.getName());
                         resource.setValue(output.getValue());
                         if (output.getType() != null){
                             resource.setOutputType(output.getType().toString());
                         }
-                        resource.setMetadata(output.getMetaData());
+//                        resource.setMetadata(output.getMetaData());
                         resource.save();
                     }
                 }
@@ -346,19 +348,19 @@ public class ExperimentRegistry {
         }
     }
 
-    public String addNodeOutputs(List<DataObjectType> wfOutputs, CompositeIdentifier ids) throws RegistryException {
+    public String addNodeOutputs(List<OutputDataObjectType> wfOutputs, CompositeIdentifier ids) throws RegistryException {
         try {
             ExperimentResource experiment = gatewayResource.getExperiment((String) ids.getTopLevelIdentifier());
             WorkflowNodeDetailResource workflowNode = experiment.getWorkflowNode((String) ids.getSecondLevelIdentifier());
-            for (DataObjectType output : wfOutputs) {
+            for (OutputDataObjectType output : wfOutputs) {
                 NodeOutputResource resource = (NodeOutputResource) workflowNode.create(ResourceType.NODE_OUTPUT);
                 resource.setNodeDetailResource(workflowNode);
-                resource.setOutputKey(output.getKey());
+                resource.setOutputKey(output.getName());
                 resource.setValue(output.getValue());
                 if (output.getType() != null){
                     resource.setOutputType(output.getType().toString());
                 }
-                resource.setMetadata(output.getMetaData());
+//                resource.setMetadata(output.getMetaData());
                 resource.save();
             }
         } catch (Exception e) {
@@ -368,20 +370,20 @@ public class ExperimentRegistry {
         return (String) ids.getSecondLevelIdentifier();
     }
 
-    public void updateNodeOutputs(List<DataObjectType> wfOutputs, String nodeId) throws RegistryException {
+    public void updateNodeOutputs(List<OutputDataObjectType> wfOutputs, String nodeId) throws RegistryException {
         try {
             ExperimentResource experiment = (ExperimentResource) gatewayResource.create(ResourceType.EXPERIMENT);
             WorkflowNodeDetailResource workflowNode = experiment.getWorkflowNode(nodeId);
             List<NodeOutputResource> nodeOutputs = workflowNode.getNodeOutputs();
-            for (DataObjectType output : wfOutputs) {
+            for (OutputDataObjectType output : wfOutputs) {
                 for (NodeOutputResource resource : nodeOutputs) {
                     resource.setNodeDetailResource(workflowNode);
-                    resource.setOutputKey(output.getKey());
+                    resource.setOutputKey(output.getName());
                     resource.setValue(output.getValue());
                     if (output.getType() != null){
                         resource.setOutputType(output.getType().toString());
                     }
-                    resource.setMetadata(output.getMetaData());
+//                    resource.setMetadata(output.getMetaData());
                     resource.save();
                 }
             }
@@ -391,20 +393,20 @@ public class ExperimentRegistry {
         }
     }
 
-    public String addApplicationOutputs(List<DataObjectType> appOutputs, CompositeIdentifier ids) throws RegistryException {
+    public String addApplicationOutputs(List<OutputDataObjectType> appOutputs, CompositeIdentifier ids) throws RegistryException {
         try {
             ExperimentResource experiment = (ExperimentResource) gatewayResource.create(ResourceType.EXPERIMENT);
             WorkflowNodeDetailResource workflowNode = experiment.getWorkflowNode((String) ids.getTopLevelIdentifier());
             TaskDetailResource taskDetail = workflowNode.getTaskDetail((String) ids.getSecondLevelIdentifier());
-            for (DataObjectType output : appOutputs) {
+            for (OutputDataObjectType output : appOutputs) {
                 ApplicationOutputResource resource = (ApplicationOutputResource) taskDetail.create(ResourceType.APPLICATION_OUTPUT);
                 resource.setTaskDetailResource(taskDetail);
-                resource.setOutputKey(output.getKey());
+                resource.setOutputKey(output.getName());
                 resource.setValue(output.getValue());
                 if (output.getType() != null){
                     resource.setOutputType(output.getType().toString());
                 }
-                resource.setMetadata(output.getMetaData());
+//                resource.setMetadata(output.getMetaData());
                 resource.save();
             }
         } catch (Exception e) {
@@ -704,11 +706,11 @@ public class ExperimentRegistry {
             resource.setNodeInstanceId(getNodeInstanceID(nodeDetails.getNodeName()));
             resource.save();
             String nodeId = resource.getNodeInstanceId();
-            List<DataObjectType> nodeInputs = nodeDetails.getNodeInputs();
+            List<InputDataObjectType> nodeInputs = nodeDetails.getNodeInputs();
             if (nodeInputs != null) {
                 addWorkflowInputs(nodeDetails.getNodeInputs(), resource);
             }
-            List<DataObjectType> nodeOutputs = nodeDetails.getNodeOutputs();
+            List<OutputDataObjectType> nodeOutputs = nodeDetails.getNodeOutputs();
             if (nodeOutputs != null && !nodeOutputs.isEmpty()){
                 CompositeIdentifier ids = new CompositeIdentifier(expId, nodeId);
                 addNodeOutputs(nodeOutputs, ids);
@@ -761,11 +763,11 @@ public class ExperimentRegistry {
             workflowNode.setNodeInstanceId(nodeId);
             workflowNode.save();
             String expID = workflowNode.getExperimentResource().getExpID();
-            List<DataObjectType> nodeInputs = nodeDetails.getNodeInputs();
+            List<InputDataObjectType> nodeInputs = nodeDetails.getNodeInputs();
             if (nodeInputs != null) {
                 updateWorkflowInputs(nodeDetails.getNodeInputs(), workflowNode);
             }
-            List<DataObjectType> nodeOutputs = nodeDetails.getNodeOutputs();
+            List<OutputDataObjectType> nodeOutputs = nodeDetails.getNodeOutputs();
             if (nodeOutputs != null && !nodeOutputs.isEmpty()){
                 updateNodeOutputs(nodeOutputs, nodeId);
             }
@@ -802,12 +804,12 @@ public class ExperimentRegistry {
     }
 
 
-    public void addWorkflowInputs(List<DataObjectType> wfInputs, WorkflowNodeDetailResource nodeDetailResource) throws RegistryException {
+    public void addWorkflowInputs(List<InputDataObjectType> wfInputs, WorkflowNodeDetailResource nodeDetailResource) throws RegistryException {
         try {
-            for (DataObjectType input : wfInputs) {
+            for (InputDataObjectType input : wfInputs) {
                 NodeInputResource resource = (NodeInputResource) nodeDetailResource.create(ResourceType.NODE_INPUT);
                 resource.setNodeDetailResource(nodeDetailResource);
-                resource.setInputKey(input.getKey());
+                resource.setInputKey(input.getName());
                 resource.setValue(input.getValue());
                 if (input.getType() != null){
                     resource.setInputType(input.getType().toString());
@@ -822,13 +824,13 @@ public class ExperimentRegistry {
 
     }
 
-    public void updateWorkflowInputs(List<DataObjectType> wfInputs, WorkflowNodeDetailResource nodeDetailResource) throws RegistryException {
+    public void updateWorkflowInputs(List<InputDataObjectType> wfInputs, WorkflowNodeDetailResource nodeDetailResource) throws RegistryException {
         try {
             List<NodeInputResource> nodeInputs = nodeDetailResource.getNodeInputs();
-            for (DataObjectType input : wfInputs) {
+            for (InputDataObjectType input : wfInputs) {
                 for (NodeInputResource resource : nodeInputs) {
                     resource.setNodeDetailResource(nodeDetailResource);
-                    resource.setInputKey(input.getKey());
+                    resource.setInputKey(input.getName());
                     resource.setValue(input.getValue());
                     if (input.getType() != null){
                         resource.setInputType(input.getType().toString());
@@ -855,11 +857,11 @@ public class ExperimentRegistry {
             taskDetail.setApplicationVersion(taskDetails.getApplicationVersion());
             taskDetail.setCreationTime(AiravataUtils.getTime(taskDetails.getCreationTime()));
             taskDetail.save();
-            List<DataObjectType> applicationInputs = taskDetails.getApplicationInputs();
+            List<InputDataObjectType> applicationInputs = taskDetails.getApplicationInputs();
             if (applicationInputs != null) {
                 addAppInputs(applicationInputs, taskDetail);
             }
-            List<DataObjectType> applicationOutput = taskDetails.getApplicationOutputs();
+            List<OutputDataObjectType> applicationOutput = taskDetails.getApplicationOutputs();
             if (applicationOutput != null) {
                 addAppOutputs(applicationOutput, taskDetail);
             }
@@ -931,7 +933,7 @@ public class ExperimentRegistry {
             taskDetail.setApplicationDeploymentId(taskDetails.getApplicationDeploymentId());
 
             taskDetail.save();
-            List<DataObjectType> applicationInputs = taskDetails.getApplicationInputs();
+            List<InputDataObjectType> applicationInputs = taskDetails.getApplicationInputs();
             if (applicationInputs != null) {
                 updateAppInputs(applicationInputs, taskDetail);
             }
@@ -980,12 +982,12 @@ public class ExperimentRegistry {
         }
     }
 
-    public void addAppInputs(List<DataObjectType> appInputs, TaskDetailResource taskDetailResource) throws RegistryException {
+    public void addAppInputs(List<InputDataObjectType> appInputs, TaskDetailResource taskDetailResource) throws RegistryException {
         try {
-            for (DataObjectType input : appInputs) {
+            for (InputDataObjectType input : appInputs) {
                 ApplicationInputResource resource = (ApplicationInputResource) taskDetailResource.create(ResourceType.APPLICATION_INPUT);
                 resource.setTaskDetailResource(taskDetailResource);
-                resource.setInputKey(input.getKey());
+                resource.setInputKey(input.getName());
                 resource.setValue(input.getValue());
                 if (input.getType() != null){
                     resource.setInputType(input.getType().toString());
@@ -1000,17 +1002,17 @@ public class ExperimentRegistry {
 
     }
 
-    public void addAppOutputs(List<DataObjectType> appOytputs, TaskDetailResource taskDetailResource) throws RegistryException {
+    public void addAppOutputs(List<OutputDataObjectType> appOytputs, TaskDetailResource taskDetailResource) throws RegistryException {
         try {
-            for (DataObjectType output : appOytputs) {
+            for (OutputDataObjectType output : appOytputs) {
                 ApplicationOutputResource resource = (ApplicationOutputResource) taskDetailResource.create(ResourceType.APPLICATION_OUTPUT);
                 resource.setTaskDetailResource(taskDetailResource);
-                resource.setOutputKey(output.getKey());
+                resource.setOutputKey(output.getName());
                 resource.setValue(output.getValue());
                 if (output.getType() != null){
                     resource.setOutputType(output.getType().toString());
                 }
-                resource.setMetadata(output.getMetaData());
+//                resource.setMetadata(output.getMetaData());
                 resource.save();
             }
         } catch (Exception e) {
@@ -1020,21 +1022,21 @@ public class ExperimentRegistry {
 
     }
 
-    public void updateAppOutputs(List<DataObjectType> appOutputs, String taskId) throws RegistryException {
+    public void updateAppOutputs(List<OutputDataObjectType> appOutputs, String taskId) throws RegistryException {
         try {
             ExperimentResource experiment = (ExperimentResource) gatewayResource.create(ResourceType.EXPERIMENT);
             WorkflowNodeDetailResource workflowNode = (WorkflowNodeDetailResource) experiment.create(ResourceType.WORKFLOW_NODE_DETAIL);
             TaskDetailResource taskDetail = workflowNode.getTaskDetail(taskId);
             List<ApplicationOutputResource> outputs = taskDetail.getApplicationOutputs();
-            for (DataObjectType output : appOutputs) {
+            for (OutputDataObjectType output : appOutputs) {
                 for (ApplicationOutputResource resource : outputs) {
                     resource.setTaskDetailResource(taskDetail);
-                    resource.setOutputKey(output.getKey());
+                    resource.setOutputKey(output.getName());
                     resource.setValue(output.getValue());
                     if (output.getType() != null){
                         resource.setOutputType(output.getType().toString());
                     }
-                    resource.setMetadata(output.getMetaData());
+//                    resource.setMetadata(output.getMetaData());
                     resource.save();
                 }
             }
@@ -1044,13 +1046,13 @@ public class ExperimentRegistry {
         }
     }
 
-    public void updateAppInputs(List<DataObjectType> appInputs, TaskDetailResource taskDetailResource) throws RegistryException {
+    public void updateAppInputs(List<InputDataObjectType> appInputs, TaskDetailResource taskDetailResource) throws RegistryException {
         try {
             List<ApplicationInputResource> inputs = taskDetailResource.getApplicationInputs();
-            for (DataObjectType input : appInputs) {
+            for (InputDataObjectType input : appInputs) {
                 for (ApplicationInputResource resource : inputs) {
                     resource.setTaskDetailResource(taskDetailResource);
-                    resource.setInputKey(input.getKey());
+                    resource.setInputKey(input.getName());
                     resource.setValue(input.getValue());
                     if (input.getType() != null){
                         resource.setInputType(input.getType().toString());
@@ -1517,7 +1519,7 @@ public class ExperimentRegistry {
             existingExperiment.setWorkflowTemplateVersion(experiment.getWorkflowTemplateVersion());
             existingExperiment.setWorkflowExecutionId(experiment.getWorkflowExecutionInstanceId());
             existingExperiment.save();
-            List<DataObjectType> experimentInputs = experiment.getExperimentInputs();
+            List<InputDataObjectType> experimentInputs = experiment.getExperimentInputs();
             if (experimentInputs != null && !experimentInputs.isEmpty()){
                 updateExpInputs(experimentInputs, existingExperiment);
             }
@@ -1527,7 +1529,7 @@ public class ExperimentRegistry {
                 updateUserConfigData(userConfigurationData, expId);
             }
 
-            List<DataObjectType> experimentOutputs = experiment.getExperimentOutputs();
+            List<OutputDataObjectType> experimentOutputs = experiment.getExperimentOutputs();
             if (experimentOutputs != null && !experimentOutputs.isEmpty()){
                 updateExpOutputs(experimentOutputs, expId);
             }
@@ -1946,7 +1948,7 @@ public class ExperimentRegistry {
         return null;
     }
 
-    public List<DataObjectType> getExperimentOutputs(String expId) throws RegistryException {
+    public List<OutputDataObjectType> getExperimentOutputs(String expId) throws RegistryException {
         try {
             ExperimentResource resource = gatewayResource.getExperiment(expId);
             List<ExperimentOutputResource> experimentOutputs = resource.getExperimentOutputs();
@@ -2095,7 +2097,7 @@ public class ExperimentRegistry {
         }
     }
 
-    public List<DataObjectType> getNodeOutputs(String nodeId) throws RegistryException {
+    public List<OutputDataObjectType> getNodeOutputs(String nodeId) throws RegistryException {
         try {
             ExperimentResource resource = (ExperimentResource) gatewayResource.create(ResourceType.EXPERIMENT);
             WorkflowNodeDetailResource workflowNode = resource.getWorkflowNode(nodeId);
@@ -2128,7 +2130,7 @@ public class ExperimentRegistry {
         }
     }
     
-    public List<DataObjectType> getApplicationOutputs(String taskId) throws RegistryException {
+    public List<OutputDataObjectType> getApplicationOutputs(String taskId) throws RegistryException {
         try {
             ExperimentResource resource = (ExperimentResource) gatewayResource.create(ResourceType.EXPERIMENT);
             WorkflowNodeDetailResource workflowNode = (WorkflowNodeDetailResource) resource.create(ResourceType.WORKFLOW_NODE_DETAIL);

http://git-wip-us.apache.org/repos/asf/airavata/blob/f9fdf53e/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/impl/RegistryImpl.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/impl/RegistryImpl.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/impl/RegistryImpl.java
index 71de056..a76bb5d 100644
--- a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/impl/RegistryImpl.java
+++ b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/impl/RegistryImpl.java
@@ -23,6 +23,7 @@ package org.apache.airavata.persistance.registry.jpa.impl;
 
 import org.apache.airavata.common.exception.ApplicationSettingsException;
 import org.apache.airavata.common.utils.ServerSettings;
+import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
 import org.apache.airavata.model.workspace.Project;
 import org.apache.airavata.model.workspace.experiment.*;
 import org.apache.airavata.persistance.registry.jpa.ResourceUtils;
@@ -130,7 +131,7 @@ public class RegistryImpl implements Registry {
                 case EXPERIMENT_CONFIGURATION_DATA:
                     return experimentRegistry.addUserConfigData((UserConfigurationData) newObjectToAdd, (String) dependentIdentifier);
                 case EXPERIMENT_OUTPUT:
-                    return experimentRegistry.addExpOutputs((List<DataObjectType>) newObjectToAdd, (String) dependentIdentifier);
+                    return experimentRegistry.addExpOutputs((List<OutputDataObjectType>) newObjectToAdd, (String) dependentIdentifier);
                 case EXPERIMENT_STATUS:
                     return experimentRegistry.updateExperimentStatus((ExperimentStatus) newObjectToAdd, (String) dependentIdentifier);
                 case WORKFLOW_NODE_DETAIL:
@@ -138,11 +139,11 @@ public class RegistryImpl implements Registry {
                 case WORKFLOW_NODE_STATUS:
                     return experimentRegistry.addWorkflowNodeStatus((WorkflowNodeStatus) newObjectToAdd, (CompositeIdentifier) dependentIdentifier);
                 case NODE_OUTPUT:
-                    return experimentRegistry.addNodeOutputs((List<DataObjectType>) newObjectToAdd, (CompositeIdentifier) dependentIdentifier);
+                    return experimentRegistry.addNodeOutputs((List<OutputDataObjectType>) newObjectToAdd, (CompositeIdentifier) dependentIdentifier);
                 case TASK_DETAIL:
                     return experimentRegistry.addTaskDetails((TaskDetails) newObjectToAdd, (String) dependentIdentifier);
                 case APPLICATION_OUTPUT:
-                    return experimentRegistry.addApplicationOutputs((List<DataObjectType>) newObjectToAdd, (CompositeIdentifier) dependentIdentifier);
+                    return experimentRegistry.addApplicationOutputs((List<OutputDataObjectType>) newObjectToAdd, (CompositeIdentifier) dependentIdentifier);
                 case TASK_STATUS:
                     return experimentRegistry.addTaskStatus((TaskStatus) newObjectToAdd, (CompositeIdentifier) dependentIdentifier);
                 case JOB_DETAIL:
@@ -201,7 +202,7 @@ public class RegistryImpl implements Registry {
                     experimentRegistry.updateUserConfigData((UserConfigurationData) newObjectToUpdate, (String) identifier);
                     break;
                 case EXPERIMENT_OUTPUT:
-                    experimentRegistry.updateExpOutputs((List<DataObjectType>) newObjectToUpdate, (String) identifier);
+                    experimentRegistry.updateExpOutputs((List<OutputDataObjectType>) newObjectToUpdate, (String) identifier);
                     break;
                 case EXPERIMENT_STATUS:
                     experimentRegistry.updateExperimentStatus((ExperimentStatus) newObjectToUpdate, (String) identifier);
@@ -213,13 +214,13 @@ public class RegistryImpl implements Registry {
                     experimentRegistry.updateWorkflowNodeStatus((WorkflowNodeStatus) newObjectToUpdate, (String) identifier);
                     break;
                 case NODE_OUTPUT:
-                    experimentRegistry.updateNodeOutputs((List<DataObjectType>) newObjectToUpdate, (String) identifier);
+                    experimentRegistry.updateNodeOutputs((List<OutputDataObjectType>) newObjectToUpdate, (String) identifier);
                     break;
                 case TASK_DETAIL:
                     experimentRegistry.updateTaskDetails((TaskDetails) newObjectToUpdate, (String) identifier);
                     break;
                 case APPLICATION_OUTPUT:
-                    experimentRegistry.updateAppOutputs((List<DataObjectType>) newObjectToUpdate, (String) identifier);
+                    experimentRegistry.updateAppOutputs((List<OutputDataObjectType>) newObjectToUpdate, (String) identifier);
                     break;
                 case TASK_STATUS:
                     experimentRegistry.updateTaskStatus((TaskStatus) newObjectToUpdate, (String) identifier);

http://git-wip-us.apache.org/repos/asf/airavata/blob/f9fdf53e/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/utils/ThriftDataModelConversion.java
----------------------------------------------------------------------
diff --git a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/utils/ThriftDataModelConversion.java b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/utils/ThriftDataModelConversion.java
index 775518b..306bd3f 100644
--- a/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/utils/ThriftDataModelConversion.java
+++ b/modules/registry/airavata-jpa-registry/src/main/java/org/apache/airavata/persistance/registry/jpa/utils/ThriftDataModelConversion.java
@@ -24,6 +24,9 @@ package org.apache.airavata.persistance.registry.jpa.utils;
 import java.util.ArrayList;
 import java.util.List;
 
+import org.apache.airavata.model.appcatalog.appinterface.DataType;
+import org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType;
+import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
 import org.apache.airavata.model.workspace.Project;
 import org.apache.airavata.model.workspace.experiment.ActionableGroup;
 import org.apache.airavata.model.workspace.experiment.AdvancedInputDataHandling;
@@ -31,9 +34,7 @@ import org.apache.airavata.model.workspace.experiment.AdvancedOutputDataHandling
 import org.apache.airavata.model.workspace.experiment.ApplicationStatus;
 import org.apache.airavata.model.workspace.experiment.ComputationalResourceScheduling;
 import org.apache.airavata.model.workspace.experiment.CorrectiveAction;
-import org.apache.airavata.model.workspace.experiment.DataObjectType;
 import org.apache.airavata.model.workspace.experiment.DataTransferDetails;
-import org.apache.airavata.model.workspace.experiment.DataType;
 import org.apache.airavata.model.workspace.experiment.ErrorCategory;
 import org.apache.airavata.model.workspace.experiment.ErrorDetails;
 import org.apache.airavata.model.workspace.experiment.ExecutionUnit;
@@ -178,62 +179,72 @@ public class ThriftDataModelConversion {
         return null;
     }
 
-    public static DataObjectType getInputOutput(Object object){
+    public static InputDataObjectType getInput(Object object){
         if (object != null){
-            DataObjectType dataObjectType = new DataObjectType();
+            InputDataObjectType dataObjectType = new InputDataObjectType();
             if (object instanceof  ExperimentInputResource){
                 ExperimentInputResource expInput = (ExperimentInputResource) object;
-                dataObjectType.setKey(expInput.getExperimentKey());
+                dataObjectType.setName(expInput.getExperimentKey());
                 dataObjectType.setValue(expInput.getValue());
                 if (expInput.getInputType() != null){
                     dataObjectType.setType(DataType.valueOf(expInput.getInputType()));
                 }
                 dataObjectType.setMetaData(expInput.getMetadata());
                 return dataObjectType;
-            }else if (object instanceof ExperimentOutputResource){
-                ExperimentOutputResource expOutput = (ExperimentOutputResource)object;
-                dataObjectType.setKey(expOutput.getExperimentKey());
-                dataObjectType.setValue(expOutput.getValue());
-                if (expOutput.getOutputType() != null){
-                    dataObjectType.setType(DataType.valueOf(expOutput.getOutputType()));
-                }
-                dataObjectType.setMetaData(expOutput.getMetadata());
-                return dataObjectType;
             }else if (object instanceof NodeInputResource){
                 NodeInputResource nodeInputResource = (NodeInputResource)object;
-                dataObjectType.setKey(nodeInputResource.getInputKey());
+                dataObjectType.setName(nodeInputResource.getInputKey());
                 dataObjectType.setValue(nodeInputResource.getValue());
                 if (nodeInputResource.getInputType() != null){
                     dataObjectType.setType(DataType.valueOf(nodeInputResource.getInputType()));
                 }
                 dataObjectType.setMetaData(nodeInputResource.getMetadata());
                 return dataObjectType;
-            }else if (object instanceof NodeOutputResource){
-                NodeOutputResource nodeOutputResource = (NodeOutputResource)object;
-                dataObjectType.setKey(nodeOutputResource.getOutputKey());
-                dataObjectType.setValue(nodeOutputResource.getValue());
-                if (nodeOutputResource.getOutputType() != null){
-                    dataObjectType.setType(DataType.valueOf(nodeOutputResource.getOutputType()));
-                }
-                dataObjectType.setMetaData(nodeOutputResource.getMetadata());
-                return dataObjectType;
             }else if (object instanceof ApplicationInputResource){
                 ApplicationInputResource inputResource = (ApplicationInputResource)object;
-                dataObjectType.setKey(inputResource.getInputKey());
+                dataObjectType.setName(inputResource.getInputKey());
                 dataObjectType.setValue(inputResource.getValue());
                 if (inputResource.getInputType() != null){
                     dataObjectType.setType(DataType.valueOf(inputResource.getInputType()));
                 }
                 dataObjectType.setMetaData(inputResource.getMetadata());
                 return dataObjectType;
+            }else {
+                return null;
+            }
+        }
+        return null;
+    }
+
+    public static OutputDataObjectType getOutput(Object object){
+        if (object != null){
+            OutputDataObjectType dataObjectType = new OutputDataObjectType();
+            if (object instanceof ExperimentOutputResource){
+                ExperimentOutputResource expOutput = (ExperimentOutputResource)object;
+                dataObjectType.setName(expOutput.getExperimentKey());
+                dataObjectType.setValue(expOutput.getValue());
+                if (expOutput.getOutputType() != null){
+                    dataObjectType.setType(DataType.valueOf(expOutput.getOutputType()));
+                }
+//                dataObjectType.setMetaData(expOutput.getMetadata());
+                return dataObjectType;
+            }else if (object instanceof NodeOutputResource){
+                NodeOutputResource nodeOutputResource = (NodeOutputResource)object;
+                dataObjectType.setName(nodeOutputResource.getOutputKey());
+                dataObjectType.setValue(nodeOutputResource.getValue());
+                if (nodeOutputResource.getOutputType() != null){
+                    dataObjectType.setType(DataType.valueOf(nodeOutputResource.getOutputType()));
+                }
+//                dataObjectType.setMetaData(nodeOutputResource.getMetadata());
+                return dataObjectType;
             }else if (object instanceof ApplicationOutputResource){
                 ApplicationOutputResource outputResource = (ApplicationOutputResource)object;
-                dataObjectType.setKey(outputResource.getOutputKey());
+                dataObjectType.setName(outputResource.getOutputKey());
                 dataObjectType.setValue(outputResource.getValue());
                 if (outputResource.getOutputType() != null){
                     dataObjectType.setType(DataType.valueOf(outputResource.getOutputType()));
                 }
-                dataObjectType.setMetaData(outputResource.getMetadata());
+//                dataObjectType.setMetaData(outputResource.getMetadata());
                 return dataObjectType;
             }else {
                 return null;
@@ -242,66 +253,66 @@ public class ThriftDataModelConversion {
         return null;
     }
 
-    public static List<DataObjectType> getExpInputs (List<ExperimentInputResource> exInputList){
-        List<DataObjectType> expInputs = new ArrayList<DataObjectType>();
+    public static List<InputDataObjectType> getExpInputs (List<ExperimentInputResource> exInputList){
+        List<InputDataObjectType> expInputs = new ArrayList<InputDataObjectType>();
         if (exInputList != null && !exInputList.isEmpty()){
             for (ExperimentInputResource inputResource : exInputList){
-                DataObjectType exInput = getInputOutput(inputResource);
+                InputDataObjectType exInput = getInput(inputResource);
                 expInputs.add(exInput);
             }
         }
         return expInputs;
     }
 
-    public static List<DataObjectType> getExpOutputs (List<ExperimentOutputResource> experimentOutputResourceList){
-        List<DataObjectType> exOutputs = new ArrayList<DataObjectType>();
+    public static List<OutputDataObjectType> getExpOutputs (List<ExperimentOutputResource> experimentOutputResourceList){
+        List<OutputDataObjectType> exOutputs = new ArrayList<OutputDataObjectType>();
         if (experimentOutputResourceList != null && !experimentOutputResourceList.isEmpty()){
             for (ExperimentOutputResource outputResource : experimentOutputResourceList){
-                DataObjectType output = getInputOutput(outputResource);
+                OutputDataObjectType output = getOutput(outputResource);
                 exOutputs.add(output);
             }
         }
         return exOutputs;
     }
 
-    public static List<DataObjectType> getNodeInputs (List<NodeInputResource> nodeInputResources){
-        List<DataObjectType> nodeInputs = new ArrayList<DataObjectType>();
+    public static List<InputDataObjectType> getNodeInputs (List<NodeInputResource> nodeInputResources){
+        List<InputDataObjectType> nodeInputs = new ArrayList<InputDataObjectType>();
         if (nodeInputResources != null && !nodeInputResources.isEmpty()){
             for (NodeInputResource inputResource : nodeInputResources){
-                DataObjectType nodeInput = getInputOutput(inputResource);
+                InputDataObjectType nodeInput = getInput(inputResource);
                 nodeInputs.add(nodeInput);
             }
         }
         return nodeInputs;
     }
 
-    public static List<DataObjectType> getNodeOutputs (List<NodeOutputResource> nodeOutputResourceList){
-        List<DataObjectType> nodeOutputs = new ArrayList<DataObjectType>();
+    public static List<OutputDataObjectType> getNodeOutputs (List<NodeOutputResource> nodeOutputResourceList){
+        List<OutputDataObjectType> nodeOutputs = new ArrayList<OutputDataObjectType>();
         if (nodeOutputResourceList != null && !nodeOutputResourceList.isEmpty()){
             for (NodeOutputResource outputResource : nodeOutputResourceList){
-                DataObjectType output = getInputOutput(outputResource);
+                OutputDataObjectType output = getOutput(outputResource);
                 nodeOutputs.add(output);
             }
         }
         return nodeOutputs;
     }
 
-    public static List<DataObjectType> getApplicationInputs (List<ApplicationInputResource> applicationInputResources){
-        List<DataObjectType> appInputs = new ArrayList<DataObjectType>();
+    public static List<InputDataObjectType> getApplicationInputs (List<ApplicationInputResource> applicationInputResources){
+        List<InputDataObjectType> appInputs = new ArrayList<InputDataObjectType>();
         if (applicationInputResources != null && !applicationInputResources.isEmpty()){
             for (ApplicationInputResource inputResource : applicationInputResources){
-                DataObjectType appInput = getInputOutput(inputResource);
+                InputDataObjectType appInput = getInput(inputResource);
                 appInputs.add(appInput);
             }
         }
         return appInputs;
     }
 
-    public static List<DataObjectType> getApplicationOutputs (List<ApplicationOutputResource> outputResources){
-        List<DataObjectType> appOutputs = new ArrayList<DataObjectType>();
+    public static List<OutputDataObjectType> getApplicationOutputs (List<ApplicationOutputResource> outputResources){
+        List<OutputDataObjectType> appOutputs = new ArrayList<OutputDataObjectType>();
         if (outputResources != null && !outputResources.isEmpty()){
             for (ApplicationOutputResource outputResource : outputResources){
-                DataObjectType output = getInputOutput(outputResource);
+                OutputDataObjectType output = getOutput(outputResource);
                 appOutputs.add(output);
             }
         }


[28/50] [abbrv] airavata git commit: Fixed NPE of scheduler and changed gfac-config.xml

Posted by ch...@apache.org.
Fixed NPE of scheduler and changed gfac-config.xml


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/d8176e81
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/d8176e81
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/d8176e81

Branch: refs/heads/master
Commit: d8176e813b6cf7bb0fe5ec118f502781c4615ff9
Parents: 3eea1a3
Author: shamrath <sh...@gmail.com>
Authored: Thu Nov 6 11:31:39 2014 -0500
Committer: shamrath <sh...@gmail.com>
Committed: Thu Nov 6 11:31:39 2014 -0500

----------------------------------------------------------------------
 .../tools/RegisterSampleApplications.java       |  8 +--
 .../server/src/main/resources/gfac-config.xml   |  6 +--
 .../org/apache/airavata/gfac/Constants.java     |  3 ++
 .../org/apache/airavata/gfac/Scheduler.java     | 56 ++++++++++++++++----
 .../airavata/gfac/core/utils/GFacUtils.java     | 26 ++++++++-
 5 files changed, 82 insertions(+), 17 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/d8176e81/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/tools/RegisterSampleApplications.java
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/tools/RegisterSampleApplications.java b/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/tools/RegisterSampleApplications.java
index bd4023b..b5fc2b2 100644
--- a/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/tools/RegisterSampleApplications.java
+++ b/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/tools/RegisterSampleApplications.java
@@ -1092,12 +1092,12 @@ public class RegisterSampleApplications {
 
 
             ComputeResourcePreference stampedeResourcePreferences = RegisterSampleApplicationsUtils.
-                    createComputeResourcePreference(stampedeResourceId, "TG-STA110014S", false, null, null, null,
-                            "/scratch/01437/ogce/gta-work-dirs");
+                    createComputeResourcePreference(stampedeResourceId, "TG-STA110014S", false, null,
+                            JobSubmissionProtocol.SSH, DataMovementProtocol.SCP, "/scratch/01437/ogce/gta-work-dirs");
 
             ComputeResourcePreference trestlesResourcePreferences = RegisterSampleApplicationsUtils.
-                    createComputeResourcePreference(trestlesResourceId, "sds128", false, null, null, null,
-                            "/oasis/scratch/trestles/ogce/temp_project/gta-work-dirs");
+                    createComputeResourcePreference(trestlesResourceId, "sds128", false, null, JobSubmissionProtocol.SSH,
+                            DataMovementProtocol.SCP, "/oasis/scratch/trestles/ogce/temp_project/gta-work-dirs");
 
             ComputeResourcePreference bigRedResourcePreferences = RegisterSampleApplicationsUtils.
                     createComputeResourcePreference(bigredResourceId, "TG-STA110014S", false, null, null, null,

http://git-wip-us.apache.org/repos/asf/airavata/blob/d8176e81/modules/configuration/server/src/main/resources/gfac-config.xml
----------------------------------------------------------------------
diff --git a/modules/configuration/server/src/main/resources/gfac-config.xml b/modules/configuration/server/src/main/resources/gfac-config.xml
index 85421c4..502192f 100644
--- a/modules/configuration/server/src/main/resources/gfac-config.xml
+++ b/modules/configuration/server/src/main/resources/gfac-config.xml
@@ -22,7 +22,7 @@
         </InHandlers>
         <OutHandlers></OutHandlers>
     </GlobalHandlers>
-    <Provider class="org.apache.airavata.gfac.local.provider.impl.LocalProvider" host="org.apache.airavata.schemas.gfac.impl.HostDescriptionTypeImpl">
+    <Provider class="org.apache.airavata.gfac.local.provider.impl.LocalProvider" submission="LOCAL">
         <InHandlers>
             <Handler class="org.apache.airavata.gfac.local.handler.LocalDirectorySetupHandler"/>
         </InHandlers>
@@ -65,7 +65,7 @@
         </OutHandlers>
     </Application>
 
-     <Provider class="org.apache.airavata.gfac.ssh.provider.impl.SSHProvider" host="org.apache.airavata.schemas.gfac.impl.SSHHostTypeImpl" executionMode="async">
+     <Provider class="org.apache.airavata.gfac.ssh.provider.impl.SSHProvider" submission="SSH" executionMode="async">
          <InHandlers>
             <Handler class="org.apache.airavata.gfac.ssh.handler.SSHDirectorySetupHandler"/>
             <Handler class="org.apache.airavata.gfac.ssh.handler.SSHInputHandler"/>
@@ -74,7 +74,7 @@
             <Handler class="org.apache.airavata.gfac.ssh.handler.SSHOutputHandler"/>
         </OutHandlers>
     </Provider>
-    <Provider class="org.apache.airavata.gfac.gsissh.provider.impl.GSISSHProvider" host="org.apache.airavata.schemas.gfac.impl.GsisshHostTypeImpl" executionMode="async">
+    <Provider class="org.apache.airavata.gfac.gsissh.provider.impl.GSISSHProvider" submission="SSH" security="GSI" executionMode="async">
              <InHandlers>
                 <Handler class="org.apache.airavata.gfac.gsissh.handler.GSISSHDirectorySetupHandler"/>
                  <!--Handler class="org.apache.airavata.gfac.ssh.handler.AdvancedSCPInputHandler">

http://git-wip-us.apache.org/repos/asf/airavata/blob/d8176e81/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/Constants.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/Constants.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/Constants.java
index 69d4a4a..c1550df 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/Constants.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/Constants.java
@@ -34,10 +34,13 @@ public class Constants {
 
 	public static final String XPATH_EXPR_PROVIDER_HANDLERS_START = "/GFac/Provider[@class='";
     public static final String XPATH_EXPR_PROVIDER_ON_HOST = "/GFac/Provider[@host='";
+    public static final String XPATH_EXPR_PROVIDER_ON_SUBMISSION = "/GFac/Provider[@submission='";
 	public static final String XPATH_EXPR_PROVIDER_INFLOW_HANDLERS_END = "']/InHandlers/Handler";
 	public static final String XPATH_EXPR_PROVIDER_OUTFLOW_HANDLERS_END = "']/OutHandlers/Handler";
 
 	public static final String GFAC_CONFIG_CLASS_ATTRIBUTE = "class";
+	public static final String GFAC_CONFIG_SECURITY_ATTRIBUTE = "security";
+	public static final String GFAC_CONFIG_SUBMISSION_ATTRIBUTE = "submission";
     public static final String GFAC_CONFIG_EXECUTION_MODE_ATTRIBUTE = "executionMode";
 	public static final String GFAC_CONFIG_APPLICATION_NAME_ATTRIBUTE = "class";
 	public static final String NEWLINE = System.getProperty("line.separator");

http://git-wip-us.apache.org/repos/asf/airavata/blob/d8176e81/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/Scheduler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/Scheduler.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/Scheduler.java
index 2bd612c..0dae029 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/Scheduler.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/Scheduler.java
@@ -21,15 +21,24 @@
 
 package org.apache.airavata.gfac;
 
+import org.airavata.appcatalog.cpi.AppCatalog;
+import org.airavata.appcatalog.cpi.AppCatalogException;
+import org.apache.aiaravata.application.catalog.data.impl.AppCatalogFactory;
 import org.apache.airavata.commons.gfac.type.HostDescription;
 import org.apache.airavata.gfac.core.context.JobExecutionContext;
 import org.apache.airavata.gfac.core.provider.GFacProvider;
 import org.apache.airavata.gfac.core.provider.GFacProviderConfig;
 import org.apache.airavata.gfac.core.provider.GFacProviderException;
+import org.apache.airavata.gfac.core.utils.GFacUtils;
 import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionInterface;
+import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol;
+import org.apache.airavata.model.appcatalog.computeresource.LOCALSubmission;
+import org.apache.airavata.model.appcatalog.computeresource.SSHJobSubmission;
+import org.apache.airavata.model.appcatalog.computeresource.SecurityProtocol;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.w3c.dom.Document;
+import org.w3c.dom.Element;
 import org.xml.sax.SAXException;
 
 import javax.xml.parsers.DocumentBuilder;
@@ -109,15 +118,44 @@ public class Scheduler {
             if (provider == null) {
 
                 List<JobSubmissionInterface> jobSubmissionInterfaces = jobExecutionContext.getApplicationContext().getComputeResourceDescription().getJobSubmissionInterfaces();
-                String hostClass = jobExecutionContext.getPreferredJobSubmissionProtocol().toString();
-                providerClassName = GFacConfiguration.getAttributeValue(GFacConfiguration.getHandlerDoc(), Constants.XPATH_EXPR_PROVIDER_ON_HOST + hostClass + "']", Constants.GFAC_CONFIG_CLASS_ATTRIBUTE);
-                Class<? extends GFacProvider> aClass1 = Class.forName(providerClassName).asSubclass(GFacProvider.class);
-                provider = aClass1.newInstance();
-                //loading the provider properties
-                aClass = GFacConfiguration.getProviderConfig(GFacConfiguration.getHandlerDoc(), Constants.XPATH_EXPR_PROVIDER_HANDLERS_START +
-                        providerClassName + "']", Constants.GFAC_CONFIG_APPLICATION_NAME_ATTRIBUTE);
-                if(!aClass.isEmpty()){
-                    provider.initProperties(aClass.get(0).getProperties());
+                JobSubmissionProtocol jobSubmissionProtocol = jobExecutionContext.getPreferredJobSubmissionProtocol();
+                SSHJobSubmission sshJobSubmission;
+                LOCALSubmission localSubmission;
+                String securityProtocol = null;
+                try {
+                    AppCatalog appCatalog = AppCatalogFactory.getAppCatalog();
+                    if (jobSubmissionProtocol == JobSubmissionProtocol.SSH) {
+                        sshJobSubmission = appCatalog.getComputeResource().getSSHJobSubmission(
+                                jobExecutionContext.getPreferredJobSubmissionInterface().getJobSubmissionInterfaceId());
+                        if (sshJobSubmission != null) {
+                            securityProtocol  = sshJobSubmission.getSecurityProtocol().toString();
+                        }
+                    }else if (jobSubmissionProtocol == JobSubmissionProtocol.LOCAL) {
+                        localSubmission = appCatalog.getComputeResource().getLocalJobSubmission(jobExecutionContext.getPreferredJobSubmissionInterface().getJobSubmissionInterfaceId());
+                    }
+                    List<Element> elements = GFacUtils.getElementList(GFacConfiguration.getHandlerDoc(), Constants.XPATH_EXPR_PROVIDER_ON_SUBMISSION + jobSubmissionProtocol + "']");
+                    for (Element element : elements) {
+                        String security = element.getAttribute(Constants.GFAC_CONFIG_SECURITY_ATTRIBUTE);
+                        if (securityProtocol == null && security == null) {
+                            providerClassName = element.getAttribute(Constants.GFAC_CONFIG_CLASS_ATTRIBUTE);
+                        }else if (securityProtocol.equals(security)) {
+                            providerClassName = element.getAttribute(Constants.GFAC_CONFIG_CLASS_ATTRIBUTE);
+                        }
+                    }
+                    if (providerClassName == null) {
+                        throw new GFacException("Couldn't find provider class");
+                    }
+
+                    Class<? extends GFacProvider> aClass1 = Class.forName(providerClassName).asSubclass(GFacProvider.class);
+                    provider = aClass1.newInstance();
+                    //loading the provider properties
+                    aClass = GFacConfiguration.getProviderConfig(GFacConfiguration.getHandlerDoc(), Constants.XPATH_EXPR_PROVIDER_HANDLERS_START +
+                            providerClassName + "']", Constants.GFAC_CONFIG_APPLICATION_NAME_ATTRIBUTE);
+                    if (!aClass.isEmpty()) {
+                        provider.initProperties(aClass.get(0).getProperties());
+                    }
+                } catch (AppCatalogException e) {
+                    throw new GFacException("Couldn't retrieve job submission protocol from app catalog ");
                 }
             }
         } catch (XPathExpressionException e) {

http://git-wip-us.apache.org/repos/asf/airavata/blob/d8176e81/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/GFacUtils.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/GFacUtils.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/GFacUtils.java
index 6fb2115..c5a96f9 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/GFacUtils.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/GFacUtils.java
@@ -52,7 +52,16 @@ import org.apache.zookeeper.*;
 import org.apache.zookeeper.data.Stat;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+import org.w3c.dom.Node;
+import org.w3c.dom.NodeList;
+
+import javax.xml.xpath.XPath;
+import javax.xml.xpath.XPathConstants;
+import javax.xml.xpath.XPathExpression;
+import javax.xml.xpath.XPathExpressionException;
+import javax.xml.xpath.XPathFactory;
 import java.io.*;
 import java.net.InetAddress;
 import java.net.URISyntaxException;
@@ -151,6 +160,21 @@ public class GFacUtils {
 		return name + "_" + date;
 	}
 
+    public static List<Element> getElementList(Document doc, String expression) throws XPathExpressionException {
+        XPathFactory xPathFactory = XPathFactory.newInstance();
+        XPath xPath = xPathFactory.newXPath();
+        XPathExpression expr = xPath.compile(expression);
+        NodeList nodeList = (NodeList) expr.evaluate(doc, XPathConstants.NODESET);
+        List<Element> elementList = new ArrayList<Element>();
+        for (int i = 0; i < nodeList.getLength(); i++) {
+            Node item = nodeList.item(i);
+            if (item instanceof Element) {
+                elementList.add((Element) item);
+            }
+        }
+        return elementList;
+    }
+
 	public static String createGsiftpURIAsString(String host, String localPath)
 			throws URISyntaxException {
 		StringBuffer buf = new StringBuffer();


[15/50] [abbrv] airavata git commit: Updated JobExecutionContext with thrift data model types

Posted by ch...@apache.org.
Updated JobExecutionContext with thrift data model types


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/73e21be4
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/73e21be4
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/73e21be4

Branch: refs/heads/master
Commit: 73e21be4cf423dc35fb5f1a3363f989a40e067e7
Parents: 96a673f
Author: shamrath <sh...@gmail.com>
Authored: Thu Oct 30 15:57:33 2014 -0400
Committer: Chathuri Wimalasena <ka...@gmail.com>
Committed: Wed Nov 5 11:16:15 2014 -0500

----------------------------------------------------------------------
 .../gfac/core/context/JobExecutionContext.java  | 91 +++++++++++++++-----
 .../airavata/gfac/core/cpi/BetterGfacImpl.java  | 37 ++++----
 2 files changed, 85 insertions(+), 43 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/73e21be4/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
index 2b2255f..3616b42 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
@@ -33,6 +33,9 @@ import org.apache.airavata.gfac.SecurityContext;
 import org.apache.airavata.gfac.core.cpi.GFac;
 import org.apache.airavata.gfac.core.notification.GFacNotifier;
 import org.apache.airavata.gfac.core.provider.GFacProvider;
+import org.apache.airavata.model.appcatalog.computeresource.DataMovementProtocol;
+import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionInterface;
+import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol;
 import org.apache.airavata.model.workspace.experiment.Experiment;
 import org.apache.airavata.model.workspace.experiment.JobDetails;
 import org.apache.airavata.model.workspace.experiment.TaskDetails;
@@ -67,16 +70,42 @@ public class JobExecutionContext extends AbstractContext implements Serializable
     private ZooKeeper zk;
 
     private String credentialStoreToken;
-
+    /**
+     * User defined working directory.
+     */
     private String workingDir;
-
+    /**
+     * Input data directory
+     */
     private String inputDir;
+    /**
+     * Output data directory
+     */
     private String outputDir;
-    private String standaredOutput;
-    private String standaredError;
-    private String prefferedJobSubmissionProtocal;
-    private String prefferedDataMovementProtocal;
-
+    /**
+     * standard output file path
+     */
+    private String standardOutput;
+    /**
+     * standard error file path
+     */
+    private String standardError;
+    /**
+     * User preferred job submission protocol.
+     */
+    private JobSubmissionProtocol preferredJobSubmissionProtocol;
+    /**
+     * User preferred data movement protocol.
+     */
+    private DataMovementProtocol preferredDataMovementProtocol;
+    /**
+     * List of job submission protocols sorted by priority order.
+      */
+    private List<JobSubmissionInterface> hostPrioritizedJobSubmissionInterfaces;
+    /**
+     * use preferred job submission protocol.
+     */
+    private JobSubmissionInterface preferredJobSubmissionInterface;
 
 //    private ContextHeaderDocument.ContextHeader contextHeader;
 
@@ -354,35 +383,51 @@ public class JobExecutionContext extends AbstractContext implements Serializable
         this.outputDir = outputDir;
     }
 
-    public String getStandaredOutput() {
-        return standaredOutput;
+    public String getStandardOutput() {
+        return standardOutput;
+    }
+
+    public void setStandardOutput(String standardOutput) {
+        this.standardOutput = standardOutput;
+    }
+
+    public String getStandardError() {
+        return standardError;
+    }
+
+    public void setStandardError(String standardError) {
+        this.standardError = standardError;
+    }
+
+    public JobSubmissionProtocol getPreferredJobSubmissionProtocol() {
+        return preferredJobSubmissionProtocol;
     }
 
-    public void setStandaredOutput(String standaredOutput) {
-        this.standaredOutput = standaredOutput;
+    public void setPreferredJobSubmissionProtocol(JobSubmissionProtocol preferredJobSubmissionProtocol) {
+        this.preferredJobSubmissionProtocol = preferredJobSubmissionProtocol;
     }
 
-    public String getStandaredError() {
-        return standaredError;
+    public DataMovementProtocol getPreferredDataMovementProtocol() {
+        return preferredDataMovementProtocol;
     }
 
-    public void setStandaredError(String standaredError) {
-        this.standaredError = standaredError;
+    public void setPreferredDataMovementProtocol(DataMovementProtocol preferredDataMovementProtocol) {
+        this.preferredDataMovementProtocol = preferredDataMovementProtocol;
     }
 
-    public String getPrefferedJobSubmissionProtocal() {
-        return prefferedJobSubmissionProtocal;
+    public List<JobSubmissionInterface> getHostPrioritizedJobSubmissionInterfaces() {
+        return hostPrioritizedJobSubmissionInterfaces;
     }
 
-    public void setPrefferedJobSubmissionProtocal(String prefferedJobSubmissionProtocal) {
-        this.prefferedJobSubmissionProtocal = prefferedJobSubmissionProtocal;
+    public void setHostPrioritizedJobSubmissionInterfaces(List<JobSubmissionInterface> hostPrioritizedJobSubmissionInterfaces) {
+        this.hostPrioritizedJobSubmissionInterfaces = hostPrioritizedJobSubmissionInterfaces;
     }
 
-    public String getPrefferedDataMovementProtocal() {
-        return prefferedDataMovementProtocal;
+    public JobSubmissionInterface getPreferredJobSubmissionInterface() {
+        return preferredJobSubmissionInterface;
     }
 
-    public void setPrefferedDataMovementProtocal(String prefferedDataMovementProtocal) {
-        this.prefferedDataMovementProtocal = prefferedDataMovementProtocal;
+    public void setPreferredJobSubmissionInterface(JobSubmissionInterface preferredJobSubmissionInterface) {
+        this.preferredJobSubmissionInterface = preferredJobSubmissionInterface;
     }
 }

http://git-wip-us.apache.org/repos/asf/airavata/blob/73e21be4/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
index fd43c65..696b61b 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
@@ -53,6 +53,7 @@ import org.apache.airavata.model.appcatalog.appdeployment.ApplicationDeploymentD
 import org.apache.airavata.model.appcatalog.appinterface.ApplicationInterfaceDescription;
 import org.apache.airavata.model.appcatalog.computeresource.ComputeResourceDescription;
 import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionInterface;
+import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol;
 import org.apache.airavata.model.appcatalog.gatewayprofile.ComputeResourcePreference;
 import org.apache.airavata.model.messaging.event.*;
 import org.apache.airavata.model.workspace.experiment.*;
@@ -70,6 +71,8 @@ import java.io.File;
 import java.io.IOException;
 import java.net.URL;
 import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
 import java.util.List;
 import java.util.Properties;
 
@@ -319,31 +322,25 @@ public class BetterGfacImpl implements GFac,Watcher {
             /*
             * Stdout and Stderr for Shell
             */
-            jobExecutionContext.setStandaredOutput(workingDir + File.separator + applicationInterface.getApplicationName().replaceAll("\\s+", "") + ".stdout");
-            jobExecutionContext.setStandaredError(workingDir + File.separator + applicationInterface.getApplicationName().replaceAll("\\s+", "") + ".stderr");
+            jobExecutionContext.setStandardOutput(workingDir + File.separator + applicationInterface.getApplicationName().replaceAll("\\s+", "") + ".stdout");
+            jobExecutionContext.setStandardError(workingDir + File.separator + applicationInterface.getApplicationName().replaceAll("\\s+", "") + ".stderr");
+
+            jobExecutionContext.setPreferredJobSubmissionProtocol(gatewayResourcePreferences.getPreferredJobSubmissionProtocol());
         }
 
         List<JobSubmissionInterface> jobSubmissionInterfaces = computeResource.getJobSubmissionInterfaces();
-        String preferredJobSubmissionProtocol = gatewayResourcePreferences.getPreferredJobSubmissionProtocol();
-        String hostClass;
-        if (preferredJobSubmissionProtocol != null){
-            hostClass = preferredJobSubmissionProtocol;
-        }else {
-            if (jobSubmissionInterfaces != null && !jobSubmissionInterfaces.isEmpty()){
-                int lowestPriority = jobSubmissionInterfaces.get(0).getPriorityOrder();
-                String selectedHost = null;
-                for (int i = 0; i < jobSubmissionInterfaces.size() - 1; i++){
-                    if (jobSubmissionInterfaces.get(i+1).getPriorityOrder() < lowestPriority ){
-                        lowestPriority = jobSubmissionInterfaces.get(i+1).getPriorityOrder();
-                        selectedHost = jobSubmissionInterfaces.get(i+1).getJobSubmissionProtocol().toString();
-                    }
+        if (jobSubmissionInterfaces != null && !jobSubmissionInterfaces.isEmpty()){
+            Collections.sort(jobSubmissionInterfaces, new Comparator<JobSubmissionInterface>() {
+                @Override
+                public int compare(JobSubmissionInterface jobSubmissionInterface, JobSubmissionInterface jobSubmissionInterface2) {
+                    return jobSubmissionInterface.getPriorityOrder() - jobSubmissionInterface2.getPriorityOrder();
                 }
-                hostClass = selectedHost;
-            }else {
-                throw new GFacException("Compute resource should have atleast one job submission interface defined...");
-            }
+            });
+
+            jobExecutionContext.setHostPrioritizedJobSubmissionInterfaces(jobSubmissionInterfaces);
+        }else {
+            throw new GFacException("Compute resource should have at least one job submission interface defined...");
         }
-        jobExecutionContext.setPrefferedJobSubmissionProtocal(hostClass);
 
         return jobExecutionContext;
     }


[02/50] [abbrv] airavata git commit: Removed legacy descriptions from MonitorID, GSISSH provider and utils and AMQPMonitor classes

Posted by ch...@apache.org.
Removed legacy descriptions from MonitorID, GSISSH provider and utils and AMQPMonitor classes


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/f29dfbe7
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/f29dfbe7
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/f29dfbe7

Branch: refs/heads/master
Commit: f29dfbe709c322703225e7f14f1bb1ffd1129a02
Parents: 14bd941
Author: shamrath <sh...@gmail.com>
Authored: Fri Oct 31 12:25:31 2014 -0400
Committer: shamrath <sh...@gmail.com>
Committed: Fri Oct 31 12:25:31 2014 -0400

----------------------------------------------------------------------
 .../data/impl/GwyResourceProfileImpl.java       |   8 +-
 .../data/util/AppCatalogThriftConversion.java   |   4 +-
 .../app/catalog/test/GatewayProfileTest.java    |   8 +-
 .../gfac/core/context/JobExecutionContext.java  |   4 +
 .../airavata/gfac/core/cpi/BetterGfacImpl.java  |  33 +++---
 .../airavata/gfac/core/monitor/MonitorID.java   |  19 ++--
 .../gsissh/provider/impl/GSISSHProvider.java    |  64 ++++++-----
 .../gfac/gsissh/util/GFACGSISSHUtils.java       | 108 ++++++++++---------
 .../monitor/impl/push/amqp/AMQPMonitor.java     |  57 +++++-----
 .../apache/airavata/job/AMQPMonitorTest.java    |  64 +++++++----
 10 files changed, 213 insertions(+), 156 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/f29dfbe7/modules/app-catalog/app-catalog-data/src/main/java/org/apache/aiaravata/application/catalog/data/impl/GwyResourceProfileImpl.java
----------------------------------------------------------------------
diff --git a/modules/app-catalog/app-catalog-data/src/main/java/org/apache/aiaravata/application/catalog/data/impl/GwyResourceProfileImpl.java b/modules/app-catalog/app-catalog-data/src/main/java/org/apache/aiaravata/application/catalog/data/impl/GwyResourceProfileImpl.java
index ed66bff..101b647 100644
--- a/modules/app-catalog/app-catalog-data/src/main/java/org/apache/aiaravata/application/catalog/data/impl/GwyResourceProfileImpl.java
+++ b/modules/app-catalog/app-catalog-data/src/main/java/org/apache/aiaravata/application/catalog/data/impl/GwyResourceProfileImpl.java
@@ -66,8 +66,8 @@ public class GwyResourceProfileImpl implements GwyResourceProfile {
                     resource.setComputeHostResource((ComputeResourceResource)computeHostResource.get(preference.getComputeResourceId()));
                     resource.setGatewayId(profileResource.getGatewayID());
                     resource.setOverrideByAiravata(preference.isOverridebyAiravata());
-                    resource.setPreferredJobProtocol(preference.getPreferredJobSubmissionProtocol());
-                    resource.setPreferedDMProtocol(preference.getPreferredDataMovementProtocol());
+                    resource.setPreferredJobProtocol(preference.getPreferredJobSubmissionProtocol().toString());
+                    resource.setPreferedDMProtocol(preference.getPreferredDataMovementProtocol().toString());
                     resource.setBatchQueue(preference.getPreferredBatchQueue());
                     resource.setProjectNumber(preference.getAllocationProjectNumber());
                     resource.setScratchLocation(preference.getScratchLocation());
@@ -100,8 +100,8 @@ public class GwyResourceProfileImpl implements GwyResourceProfile {
                     resource.setComputeHostResource((ComputeResourceResource)computeHostResource.get(preference.getComputeResourceId()));
                     resource.setGatewayId(gatewayId);
                     resource.setOverrideByAiravata(preference.isOverridebyAiravata());
-                    resource.setPreferredJobProtocol(preference.getPreferredJobSubmissionProtocol());
-                    resource.setPreferedDMProtocol(preference.getPreferredDataMovementProtocol());
+                    resource.setPreferredJobProtocol(preference.getPreferredJobSubmissionProtocol().toString());
+                    resource.setPreferedDMProtocol(preference.getPreferredDataMovementProtocol().toString());
                     resource.setBatchQueue(preference.getPreferredBatchQueue());
                     resource.setProjectNumber(preference.getAllocationProjectNumber());
                     resource.setScratchLocation(preference.getScratchLocation());

http://git-wip-us.apache.org/repos/asf/airavata/blob/f29dfbe7/modules/app-catalog/app-catalog-data/src/main/java/org/apache/aiaravata/application/catalog/data/util/AppCatalogThriftConversion.java
----------------------------------------------------------------------
diff --git a/modules/app-catalog/app-catalog-data/src/main/java/org/apache/aiaravata/application/catalog/data/util/AppCatalogThriftConversion.java b/modules/app-catalog/app-catalog-data/src/main/java/org/apache/aiaravata/application/catalog/data/util/AppCatalogThriftConversion.java
index 05cfa11..bc435f4 100644
--- a/modules/app-catalog/app-catalog-data/src/main/java/org/apache/aiaravata/application/catalog/data/util/AppCatalogThriftConversion.java
+++ b/modules/app-catalog/app-catalog-data/src/main/java/org/apache/aiaravata/application/catalog/data/util/AppCatalogThriftConversion.java
@@ -670,8 +670,8 @@ public class AppCatalogThriftConversion {
         ComputeResourcePreference preference = new ComputeResourcePreference();
         preference.setComputeResourceId(resource.getResourceId());
         preference.setOverridebyAiravata(resource.getOverrideByAiravata());
-        preference.setPreferredJobSubmissionProtocol(resource.getPreferredJobProtocol());
-        preference.setPreferredDataMovementProtocol(resource.getPreferedDMProtocol());
+        preference.setPreferredJobSubmissionProtocol(JobSubmissionProtocol.valueOf(resource.getPreferredJobProtocol()));
+        preference.setPreferredDataMovementProtocol(DataMovementProtocol.valueOf(resource.getPreferedDMProtocol()));
         preference.setPreferredBatchQueue(resource.getBatchQueue());
         preference.setScratchLocation(resource.getScratchLocation());
         preference.setAllocationProjectNumber(resource.getProjectNumber());

http://git-wip-us.apache.org/repos/asf/airavata/blob/f29dfbe7/modules/app-catalog/app-catalog-data/src/test/java/org/apache/airavata/app/catalog/test/GatewayProfileTest.java
----------------------------------------------------------------------
diff --git a/modules/app-catalog/app-catalog-data/src/test/java/org/apache/airavata/app/catalog/test/GatewayProfileTest.java b/modules/app-catalog/app-catalog-data/src/test/java/org/apache/airavata/app/catalog/test/GatewayProfileTest.java
index 66eb6bb..3593e11 100644
--- a/modules/app-catalog/app-catalog-data/src/test/java/org/apache/airavata/app/catalog/test/GatewayProfileTest.java
+++ b/modules/app-catalog/app-catalog-data/src/test/java/org/apache/airavata/app/catalog/test/GatewayProfileTest.java
@@ -84,8 +84,8 @@ public class GatewayProfileTest {
         ComputeResourcePreference preference1 = new ComputeResourcePreference();
         preference1.setComputeResourceId(hostId1);
         preference1.setOverridebyAiravata(true);
-        preference1.setPreferredJobSubmissionProtocol(JobSubmissionProtocol.SSH.toString());
-        preference1.setPreferredDataMovementProtocol(DataMovementProtocol.SCP.toString());
+        preference1.setPreferredJobSubmissionProtocol(JobSubmissionProtocol.SSH);
+                preference1.setPreferredDataMovementProtocol(DataMovementProtocol.SCP);
         preference1.setPreferredBatchQueue("queue1");
         preference1.setScratchLocation("/tmp");
         preference1.setAllocationProjectNumber("project1");
@@ -93,8 +93,8 @@ public class GatewayProfileTest {
         ComputeResourcePreference preference2 = new ComputeResourcePreference();
         preference2.setComputeResourceId(hostId2);
         preference2.setOverridebyAiravata(true);
-        preference2.setPreferredJobSubmissionProtocol(JobSubmissionProtocol.LOCAL.toString());
-        preference2.setPreferredDataMovementProtocol(DataMovementProtocol.GridFTP.toString());
+        preference2.setPreferredJobSubmissionProtocol(JobSubmissionProtocol.LOCAL);
+        preference2.setPreferredDataMovementProtocol(DataMovementProtocol.GridFTP);
         preference2.setPreferredBatchQueue("queue2");
         preference2.setScratchLocation("/tmp");
         preference2.setAllocationProjectNumber("project2");

http://git-wip-us.apache.org/repos/asf/airavata/blob/f29dfbe7/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
index a95540c..891bece 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
@@ -430,4 +430,8 @@ public class JobExecutionContext extends AbstractContext implements Serializable
     public void setPreferredJobSubmissionInterface(JobSubmissionInterface preferredJobSubmissionInterface) {
         this.preferredJobSubmissionInterface = preferredJobSubmissionInterface;
     }
+
+    public String getHostName() {
+        return applicationContext.getComputeResourceDescription().getHostName();
+    }
 }

http://git-wip-us.apache.org/repos/asf/airavata/blob/f29dfbe7/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
index 696b61b..e8e4c66 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
@@ -302,6 +302,20 @@ public class BetterGfacImpl implements GFac,Watcher {
         jobExecutionContext.setGfac(this);
         jobExecutionContext.setZk(zk);
         jobExecutionContext.setCredentialStoreToken(AiravataZKUtils.getExpTokenId(zk, experimentID, taskID));
+
+        List<JobSubmissionInterface> jobSubmissionInterfaces = computeResource.getJobSubmissionInterfaces();
+        if (jobSubmissionInterfaces != null && !jobSubmissionInterfaces.isEmpty()){
+            Collections.sort(jobSubmissionInterfaces, new Comparator<JobSubmissionInterface>() {
+                @Override
+                public int compare(JobSubmissionInterface jobSubmissionInterface, JobSubmissionInterface jobSubmissionInterface2) {
+                    return jobSubmissionInterface.getPriorityOrder() - jobSubmissionInterface2.getPriorityOrder();
+                }
+            });
+
+            jobExecutionContext.setHostPrioritizedJobSubmissionInterfaces(jobSubmissionInterfaces);
+        }else {
+            throw new GFacException("Compute resource should have at least one job submission interface defined...");
+        }
         if (gatewayResourcePreferences != null ) {
             if (gatewayResourcePreferences.getScratchLocation() == null) {
                 gatewayResourcePreferences.setScratchLocation("/tmp");
@@ -326,22 +340,11 @@ public class BetterGfacImpl implements GFac,Watcher {
             jobExecutionContext.setStandardError(workingDir + File.separator + applicationInterface.getApplicationName().replaceAll("\\s+", "") + ".stderr");
 
             jobExecutionContext.setPreferredJobSubmissionProtocol(gatewayResourcePreferences.getPreferredJobSubmissionProtocol());
+            if (gatewayResourcePreferences.getPreferredJobSubmissionProtocol() == null) {
+                jobExecutionContext.setPreferredJobSubmissionInterface(jobExecutionContext.getHostPrioritizedJobSubmissionInterfaces().get(0));
+                jobExecutionContext.setPreferredJobSubmissionProtocol(jobExecutionContext.getPreferredJobSubmissionInterface().getJobSubmissionProtocol());
+            }
         }
-
-        List<JobSubmissionInterface> jobSubmissionInterfaces = computeResource.getJobSubmissionInterfaces();
-        if (jobSubmissionInterfaces != null && !jobSubmissionInterfaces.isEmpty()){
-            Collections.sort(jobSubmissionInterfaces, new Comparator<JobSubmissionInterface>() {
-                @Override
-                public int compare(JobSubmissionInterface jobSubmissionInterface, JobSubmissionInterface jobSubmissionInterface2) {
-                    return jobSubmissionInterface.getPriorityOrder() - jobSubmissionInterface2.getPriorityOrder();
-                }
-            });
-
-            jobExecutionContext.setHostPrioritizedJobSubmissionInterfaces(jobSubmissionInterfaces);
-        }else {
-            throw new GFacException("Compute resource should have at least one job submission interface defined...");
-        }
-
         return jobExecutionContext;
     }
 

http://git-wip-us.apache.org/repos/asf/airavata/blob/f29dfbe7/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/monitor/MonitorID.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/monitor/MonitorID.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/monitor/MonitorID.java
index 6ea1839..55da288 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/monitor/MonitorID.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/monitor/MonitorID.java
@@ -22,7 +22,6 @@ package org.apache.airavata.gfac.core.monitor;
 
 import org.apache.airavata.common.logger.AiravataLogger;
 import org.apache.airavata.common.logger.AiravataLoggerFactory;
-import org.apache.airavata.commons.gfac.type.HostDescription;
 import org.apache.airavata.gfac.core.context.JobExecutionContext;
 import org.apache.airavata.model.appcatalog.computeresource.ComputeResourceDescription;
 import org.apache.airavata.model.workspace.experiment.JobState;
@@ -44,7 +43,7 @@ public class MonitorID {
 
     private Timestamp lastMonitored;
 
-    private HostDescription host;
+    private ComputeResourceDescription computeResourceDescription;
 
     private Map<String, Object> parameters;
 
@@ -67,7 +66,7 @@ public class MonitorID {
     public MonitorID() {
     }
     public MonitorID(MonitorID monitorID){
-        this.host = monitorID.getHost();
+        this.computeResourceDescription = monitorID.getComputeResourceDescription();
         this.jobStartedTime = new Timestamp((new Date()).getTime());
         this.userName = monitorID.getUserName();
         this.jobID = monitorID.getJobID();
@@ -76,8 +75,8 @@ public class MonitorID {
         this.workflowNodeID = monitorID.getWorkflowNodeID();
         this.jobName = monitorID.getJobName();
     }
-    public MonitorID(HostDescription host, String jobID, String taskID, String workflowNodeID, String experimentID, String userName,String jobName) {
-        this.host = host;
+    public MonitorID(ComputeResourceDescription computeResourceDescription, String jobID, String taskID, String workflowNodeID, String experimentID, String userName,String jobName) {
+        this.computeResourceDescription = computeResourceDescription;
         this.jobStartedTime = new Timestamp((new Date()).getTime());
         this.userName = userName;
         this.jobID = jobID;
@@ -89,7 +88,7 @@ public class MonitorID {
 
     public MonitorID(JobExecutionContext jobExecutionContext) {
         this.jobExecutionContext = jobExecutionContext;
-        host = jobExecutionContext.getApplicationContext().getHostDescription();
+        this.computeResourceDescription = jobExecutionContext.getApplicationContext().getComputeResourceDescription();
         userName = jobExecutionContext.getExperiment().getUserName();
         taskID = jobExecutionContext.getTaskData().getTaskID();
         experimentID = jobExecutionContext.getExperiment().getExperimentID();
@@ -102,12 +101,12 @@ public class MonitorID {
         }
     }
 
-    public HostDescription getHost() {
-        return host;
+    public ComputeResourceDescription getComputeResourceDescription() {
+        return computeResourceDescription;
     }
 
-    public void setHost(HostDescription host) {
-        this.host = host;
+    public void setComputeResourceDescription(ComputeResourceDescription computeResourceDescription) {
+        this.computeResourceDescription = computeResourceDescription;
     }
 
     public Timestamp getLastMonitored() {

http://git-wip-us.apache.org/repos/asf/airavata/blob/f29dfbe7/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/provider/impl/GSISSHProvider.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/provider/impl/GSISSHProvider.java b/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/provider/impl/GSISSHProvider.java
index b5a325a..92a50e4 100644
--- a/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/provider/impl/GSISSHProvider.java
+++ b/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/provider/impl/GSISSHProvider.java
@@ -20,6 +20,9 @@
 */
 package org.apache.airavata.gfac.gsissh.provider.impl;
 
+import org.airavata.appcatalog.cpi.AppCatalog;
+import org.airavata.appcatalog.cpi.AppCatalogException;
+import org.apache.aiaravata.application.catalog.data.impl.AppCatalogFactory;
 import org.apache.airavata.common.exception.ApplicationSettingsException;
 import org.apache.airavata.gfac.ExecutionMode;
 import org.apache.airavata.gfac.GFacException;
@@ -36,11 +39,16 @@ import org.apache.airavata.gfac.gsissh.util.GFACGSISSHUtils;
 import org.apache.airavata.gsi.ssh.api.Cluster;
 import org.apache.airavata.gsi.ssh.api.SSHApiException;
 import org.apache.airavata.gsi.ssh.api.job.JobDescriptor;
+import org.apache.airavata.model.appcatalog.appdeployment.ApplicationDeploymentDescription;
+import org.apache.airavata.model.appcatalog.computeresource.ComputeResourceDescription;
+import org.apache.airavata.model.appcatalog.computeresource.MonitorMode;
+import org.apache.airavata.model.appcatalog.computeresource.ResourceJobManager;
+import org.apache.airavata.model.appcatalog.computeresource.SSHJobSubmission;
 import org.apache.airavata.model.workspace.experiment.CorrectiveAction;
 import org.apache.airavata.model.workspace.experiment.ErrorCategory;
 import org.apache.airavata.model.workspace.experiment.JobDetails;
 import org.apache.airavata.model.workspace.experiment.JobState;
-import org.apache.airavata.schemas.gfac.GsisshHostType;
+//import org.apache.airavata.schemas.gfac.GsisshHostType;
 import org.apache.airavata.schemas.gfac.HostDescriptionType;
 import org.apache.airavata.schemas.gfac.HpcApplicationDeploymentType;
 import org.apache.zookeeper.KeeperException;
@@ -48,6 +56,7 @@ import org.apache.zookeeper.ZooKeeper;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import javax.management.monitor.Monitor;
 import java.util.List;
 import java.util.Map;
 
@@ -76,14 +85,18 @@ public class GSISSHProvider extends AbstractRecoverableProvider {
         log.info("Invoking GSISSH Provider Invoke ...");
         StringBuffer data = new StringBuffer();
         jobExecutionContext.getNotifier().publish(new StartExecutionEvent());
-        HostDescriptionType host = jobExecutionContext.getApplicationContext().
-                getHostDescription().getType();
-        HpcApplicationDeploymentType app = (HpcApplicationDeploymentType) jobExecutionContext.getApplicationContext().
-                getApplicationDeploymentDescription().getType();
+        ComputeResourceDescription computeResourceDescription = jobExecutionContext.getApplicationContext()
+                .getComputeResourceDescription();
+        ApplicationDeploymentDescription appDeployDesc = jobExecutionContext.getApplicationContext()
+                .getApplicationDeploymentDescription();
         JobDetails jobDetails = new JobDetails();
         Cluster cluster = null;
-        
+
         try {
+            AppCatalog appCatalog = AppCatalogFactory.getAppCatalog();
+            SSHJobSubmission sshJobSubmission = appCatalog.getComputeResource().getSSHJobSubmission(
+                    jobExecutionContext.getPreferredJobSubmissionInterface().getJobSubmissionInterfaceId());
+
             if (jobExecutionContext.getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT) != null) {
                 cluster = ((GSISecurityContext) jobExecutionContext.getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT)).getPbsCluster();
             }
@@ -93,7 +106,7 @@ public class GSISSHProvider extends AbstractRecoverableProvider {
                 log.info("Successfully retrieved the Security Context");
             }
             // This installed path is a mandetory field, because this could change based on the computing resource
-            JobDescriptor jobDescriptor = GFACGSISSHUtils.createJobDescriptor(jobExecutionContext, app, cluster);
+            JobDescriptor jobDescriptor = GFACGSISSHUtils.createJobDescriptor(jobExecutionContext, cluster);
             jobDetails.setJobName(jobDescriptor.getJobName());
 
             log.info(jobDescriptor.toXML());
@@ -113,10 +126,10 @@ public class GSISSHProvider extends AbstractRecoverableProvider {
 
             // Now job has submitted to the resource, its up to the Provider to parse the information to daemon handler
             // to perform monitoring, daemon handlers can be accessed from anywhere
-            delegateToMonitorHandlers(jobExecutionContext, (GsisshHostType) host, jobDetails.getJobID());
+            delegateToMonitorHandlers(jobExecutionContext, sshJobSubmission , jobDetails.getJobID());
             // we know this host is type GsiSSHHostType
         } catch (Exception e) {
-		    String error = "Error submitting the job to host " + host.getHostAddress() + " message: " + e.getMessage();
+		    String error = "Error submitting the job to host " + computeResourceDescription.getHostName() + " message: " + e.getMessage();
             log.error(error);
             jobDetails.setJobID("none");
             GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.FAILED);
@@ -130,18 +143,18 @@ public class GSISSHProvider extends AbstractRecoverableProvider {
           
     }
 
-    public void delegateToMonitorHandlers(JobExecutionContext jobExecutionContext, GsisshHostType host, String jobID) throws GFacHandlerException {
+    public void delegateToMonitorHandlers(JobExecutionContext jobExecutionContext, SSHJobSubmission sshJobSubmission, String jobID) throws GFacHandlerException {
         List<ThreadedHandler> daemonHandlers = BetterGfacImpl.getDaemonHandlers();
         if (daemonHandlers == null) {
             daemonHandlers = BetterGfacImpl.getDaemonHandlers();
         }
         ThreadedHandler pullMonitorHandler = null;
         ThreadedHandler pushMonitorHandler = null;
-        String monitorMode = host.getMonitorMode();
+        MonitorMode monitorMode = sshJobSubmission.getMonitorMode();
         for (ThreadedHandler threadedHandler : daemonHandlers) {
             if ("org.apache.airavata.gfac.monitor.handlers.GridPullMonitorHandler".equals(threadedHandler.getClass().getName())) {
                 pullMonitorHandler = threadedHandler;
-                if ("".equals(monitorMode) || monitorMode == null || org.apache.airavata.common.utils.Constants.PULL.equals(monitorMode)) {
+                if (monitorMode == null || monitorMode == MonitorMode.POLL_JOB_MANAGER) {
                     log.info("Job is launched successfully now parsing it to monitoring in pull mode, JobID Returned:  " + jobID);
                     pullMonitorHandler.invoke(jobExecutionContext);
                 } else {
@@ -150,7 +163,7 @@ public class GSISSHProvider extends AbstractRecoverableProvider {
                 }
             } else if ("org.apache.airavata.gfac.monitor.handlers.GridPushMonitorHandler".equals(threadedHandler.getClass().getName())) {
                 pushMonitorHandler = threadedHandler;
-                if ("".equals(monitorMode) || monitorMode == null || org.apache.airavata.common.utils.Constants.PUSH.equals(monitorMode)) {
+                if (monitorMode == null || monitorMode == MonitorMode.XSEDE_AMQP_SUBSCRIBE) {
                     log.info("Job is launched successfully now parsing it to monitoring in push mode, JobID Returned:  " + jobID);
                     pushMonitorHandler.invoke(jobExecutionContext);
                 } else {
@@ -166,18 +179,18 @@ public class GSISSHProvider extends AbstractRecoverableProvider {
         }
     }
 
-    public void removeFromMonitorHandlers(JobExecutionContext jobExecutionContext, GsisshHostType host, String jobID) throws GFacHandlerException {
+    public void removeFromMonitorHandlers(JobExecutionContext jobExecutionContext, SSHJobSubmission sshJobSubmission, String jobID) throws GFacHandlerException {
         List<ThreadedHandler> daemonHandlers = BetterGfacImpl.getDaemonHandlers();
         if (daemonHandlers == null) {
             daemonHandlers = BetterGfacImpl.getDaemonHandlers();
         }
         ThreadedHandler pullMonitorHandler = null;
         ThreadedHandler pushMonitorHandler = null;
-        String monitorMode = host.getMonitorMode();
+        MonitorMode monitorMode = sshJobSubmission.getMonitorMode();
         for (ThreadedHandler threadedHandler : daemonHandlers) {
             if ("org.apache.airavata.gfac.monitor.handlers.GridPullMonitorHandler".equals(threadedHandler.getClass().getName())) {
                 pullMonitorHandler = threadedHandler;
-                if ("".equals(monitorMode) || monitorMode == null || org.apache.airavata.common.utils.Constants.PULL.equals(monitorMode)) {
+                if (monitorMode == null || monitorMode == MonitorMode.POLL_JOB_MANAGER) {
                     jobExecutionContext.setProperty("cancel","true");
                     pullMonitorHandler.invoke(jobExecutionContext);
                 } else {
@@ -186,7 +199,7 @@ public class GSISSHProvider extends AbstractRecoverableProvider {
                 }
             } else if ("org.apache.airavata.gfac.monitor.handlers.GridPushMonitorHandler".equals(threadedHandler.getClass().getName())) {
                 pushMonitorHandler = threadedHandler;
-                if ("".equals(monitorMode) || monitorMode == null || org.apache.airavata.common.utils.Constants.PUSH.equals(monitorMode)) {
+                if ( monitorMode == null || monitorMode == MonitorMode.XSEDE_AMQP_SUBSCRIBE) {
                     pushMonitorHandler.invoke(jobExecutionContext);
                 } else {
                     log.error("Currently we only support Pull and Push monitoring and monitorMode should be PUSH" +
@@ -208,8 +221,6 @@ public class GSISSHProvider extends AbstractRecoverableProvider {
     public void cancelJob(JobExecutionContext jobExecutionContext) throws GFacProviderException,GFacException {
         //To change body of implemented methods use File | Settings | File Templates.
         log.info("canceling the job status in GSISSHProvider!!!!!");
-        HostDescriptionType host = jobExecutionContext.getApplicationContext().
-                getHostDescription().getType();
         JobDetails jobDetails = jobExecutionContext.getJobDetails();
         try {
             Cluster cluster = null;
@@ -236,14 +247,14 @@ public class GSISSHProvider extends AbstractRecoverableProvider {
             GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.CANCELED);
             // we know this host is type GsiSSHHostType
         } catch (SSHApiException e) {
-            String error = "Error submitting the job to host " + host.getHostAddress() + " message: " + e.getMessage();
+            String error = "Error submitting the job to host " + jobExecutionContext.getHostName() + " message: " + e.getMessage();
             log.error(error);
             jobDetails.setJobID("none");
             GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.FAILED);
             GFacUtils.saveErrorDetails(jobExecutionContext, error, CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
             throw new GFacProviderException(error, e);
         } catch (Exception e) {
-            String error = "Error submitting the job to host " + host.getHostAddress() + " message: " + e.getMessage();
+            String error = "Error submitting the job to host " + jobExecutionContext.getHostName() + " message: " + e.getMessage();
             log.error(error);
             jobDetails.setJobID("none");
             GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.FAILED);
@@ -255,8 +266,8 @@ public class GSISSHProvider extends AbstractRecoverableProvider {
     public void recover(JobExecutionContext jobExecutionContext) throws GFacProviderException,GFacException {
         // have to implement the logic to recover a gfac failure
         log.info("Invoking Recovering for the Experiment: " + jobExecutionContext.getExperimentID());
-        HostDescriptionType host = jobExecutionContext.getApplicationContext().
-                getHostDescription().getType();
+        ComputeResourceDescription computeResourceDescription = jobExecutionContext.getApplicationContext()
+                .getComputeResourceDescription();
         String jobId = "";
         String jobDesc = "";
         try {
@@ -306,8 +317,11 @@ public class GSISSHProvider extends AbstractRecoverableProvider {
                     throw new GFacHandlerException("Error while creating SSHSecurityContext", e, e.getLocalizedMessage());
                 }
             }
-            delegateToMonitorHandlers(jobExecutionContext, (GsisshHostType) host, jobId);
-        } catch (GFacHandlerException e) {
+            AppCatalog appCatalog = AppCatalogFactory.getAppCatalog();
+            SSHJobSubmission sshJobSubmission = appCatalog.getComputeResource().getSSHJobSubmission(
+                    jobExecutionContext.getPreferredJobSubmissionInterface().getJobSubmissionInterfaceId());
+            delegateToMonitorHandlers(jobExecutionContext, sshJobSubmission, jobId);
+        } catch (Exception e) {
             throw new GFacProviderException("Error delegating already ran job to Monitoring", e);
         }
     }

http://git-wip-us.apache.org/repos/asf/airavata/blob/f29dfbe7/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/util/GFACGSISSHUtils.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/util/GFACGSISSHUtils.java b/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/util/GFACGSISSHUtils.java
index 4d338e3..baca65c 100644
--- a/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/util/GFACGSISSHUtils.java
+++ b/modules/gfac/gfac-gsissh/src/main/java/org/apache/airavata/gfac/gsissh/util/GFACGSISSHUtils.java
@@ -20,21 +20,19 @@
 */
 package org.apache.airavata.gfac.gsissh.util;
 
-import java.sql.SQLException;
-import java.util.*;
-
+import org.airavata.appcatalog.cpi.AppCatalog;
+import org.apache.aiaravata.application.catalog.data.impl.AppCatalogFactory;
 import org.apache.airavata.common.exception.ApplicationSettingsException;
 import org.apache.airavata.common.utils.ServerSettings;
 import org.apache.airavata.common.utils.StringUtil;
 import org.apache.airavata.commons.gfac.type.ActualParameter;
-import org.apache.airavata.commons.gfac.type.HostDescription;
 import org.apache.airavata.commons.gfac.type.MappingFactory;
-import org.apache.airavata.credential.store.credential.Credential;
 import org.apache.airavata.credential.store.credential.impl.certificate.CertificateCredential;
 import org.apache.airavata.credential.store.store.CredentialReader;
 import org.apache.airavata.gfac.Constants;
 import org.apache.airavata.gfac.GFacException;
 import org.apache.airavata.gfac.RequestData;
+import org.apache.airavata.gfac.core.context.ApplicationContext;
 import org.apache.airavata.gfac.core.context.JobExecutionContext;
 import org.apache.airavata.gfac.core.context.MessageContext;
 import org.apache.airavata.gfac.core.utils.GFacUtils;
@@ -47,22 +45,26 @@ import org.apache.airavata.gsi.ssh.api.job.JobManagerConfiguration;
 import org.apache.airavata.gsi.ssh.impl.GSISSHAbstractCluster;
 import org.apache.airavata.gsi.ssh.impl.PBSCluster;
 import org.apache.airavata.gsi.ssh.util.CommonUtils;
+import org.apache.airavata.model.appcatalog.appdeployment.ApplicationDeploymentDescription;
+import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionInterface;
+import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol;
+import org.apache.airavata.model.appcatalog.computeresource.SSHJobSubmission;
+import org.apache.airavata.model.appcatalog.computeresource.SecurityProtocol;
 import org.apache.airavata.model.workspace.experiment.ComputationalResourceScheduling;
 import org.apache.airavata.model.workspace.experiment.TaskDetails;
-import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
 import org.apache.airavata.schemas.gfac.FileArrayType;
-import org.apache.airavata.schemas.gfac.GlobusHostType;
-import org.apache.airavata.schemas.gfac.GsisshHostType;
 import org.apache.airavata.schemas.gfac.HpcApplicationDeploymentType;
-import org.apache.airavata.schemas.gfac.SSHHostType;
 import org.apache.airavata.schemas.gfac.StringArrayType;
 import org.apache.airavata.schemas.gfac.URIArrayType;
-import org.apache.airavata.schemas.gfac.UnicoreHostType;
-import org.apache.openjpa.lib.log.Log;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import javax.validation.constraints.Max;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+import java.util.Set;
 
 
 public class GFACGSISSHUtils {
@@ -74,32 +76,35 @@ public class GFACGSISSHUtils {
     public static int maxClusterCount = 5;
     public static Map<String, List<Cluster>> clusters = new HashMap<String, List<Cluster>>();
     public static void addSecurityContext(JobExecutionContext jobExecutionContext) throws GFacException, ApplicationSettingsException {
-        HostDescription registeredHost = jobExecutionContext.getApplicationContext().getHostDescription();
-        if (registeredHost.getType() instanceof GlobusHostType || registeredHost.getType() instanceof UnicoreHostType
-                || registeredHost.getType() instanceof SSHHostType) {
-            logger.error("This is a wrong method to invoke to non ssh host types,please check your gfac-config.xml");
-        } else if (registeredHost.getType() instanceof GsisshHostType) {
-            String credentialStoreToken = jobExecutionContext.getCredentialStoreToken(); // this is set by the framework
-            RequestData requestData = new RequestData(ServerSettings.getDefaultUserGateway());
-            requestData.setTokenId(credentialStoreToken);
-            PBSCluster pbsCluster = null;
-            GSISecurityContext context = null;
-            try {
+        JobSubmissionInterface jobSubmissionInterface = jobExecutionContext.getPreferredJobSubmissionInterface();
+        JobSubmissionProtocol jobProtocol = jobSubmissionInterface.getJobSubmissionProtocol();
+        try {
+            AppCatalog appCatalog = AppCatalogFactory.getAppCatalog();
+            SSHJobSubmission sshJobSubmission = appCatalog.getComputeResource().getSSHJobSubmission(jobSubmissionInterface.getJobSubmissionInterfaceId());
+            if (jobProtocol == JobSubmissionProtocol.GLOBUS || jobProtocol == JobSubmissionProtocol.UNICORE
+                    || jobProtocol == JobSubmissionProtocol.CLOUD || jobProtocol == JobSubmissionProtocol.LOCAL) {
+                logger.error("This is a wrong method to invoke to non ssh host types,please check your gfac-config.xml");
+            } else if (jobProtocol == JobSubmissionProtocol.SSH && sshJobSubmission.getSecurityProtocol() == SecurityProtocol.GSI) {
+                String credentialStoreToken = jobExecutionContext.getCredentialStoreToken(); // this is set by the framework
+                RequestData requestData = new RequestData(ServerSettings.getDefaultUserGateway());
+                requestData.setTokenId(credentialStoreToken);
+                PBSCluster pbsCluster = null;
+                GSISecurityContext context = null;
+
                 TokenizedMyProxyAuthInfo tokenizedMyProxyAuthInfo = new TokenizedMyProxyAuthInfo(requestData);
                 CredentialReader credentialReader = GFacUtils.getCredentialReader();
-                if(credentialReader != null){
-                	CertificateCredential credential = null;
-					try {
-						credential = (CertificateCredential)credentialReader.getCredential(ServerSettings.getDefaultUserGateway(), credentialStoreToken);
-			      		requestData.setMyProxyUserName(credential.getCommunityUser().getUserName());
-					} catch (Exception e) {
-						logger.error(e.getLocalizedMessage());
-					}
+                if (credentialReader != null) {
+                    CertificateCredential credential = null;
+                    try {
+                        credential = (CertificateCredential) credentialReader.getCredential(ServerSettings.getDefaultUserGateway(), credentialStoreToken);
+                        requestData.setMyProxyUserName(credential.getCommunityUser().getUserName());
+                    } catch (Exception e) {
+                        logger.error(e.getLocalizedMessage());
+                    }
                 }
 
-                GsisshHostType gsisshHostType = (GsisshHostType) registeredHost.getType();
-                String key = requestData.getMyProxyUserName() + registeredHost.getType().getHostAddress() +
-                        gsisshHostType.getPort();
+                String key = requestData.getMyProxyUserName() + jobExecutionContext.getHostName()+
+                        sshJobSubmission.getSshPort();
                 boolean recreate = false;
                 synchronized (clusters) {
                     if (clusters.containsKey(key) && clusters.get(key).size() < maxClusterCount) {
@@ -112,7 +117,7 @@ public class GFACGSISSHUtils {
                             clusters.get(key).remove(i);
                             recreate = true;
                         }
-                        if(!recreate) {
+                        if (!recreate) {
                             try {
                                 pbsCluster.listDirectory("~/"); // its hard to trust isConnected method, so we try to connect if it works we are good,else we recreate
                             } catch (Exception e) {
@@ -129,13 +134,12 @@ public class GFACGSISSHUtils {
                     }
 
                     if (recreate) {
-                        ServerInfo serverInfo = new ServerInfo(requestData.getMyProxyUserName(), registeredHost.getType().getHostAddress(),
-                                gsisshHostType.getPort());
+                        ServerInfo serverInfo = new ServerInfo(requestData.getMyProxyUserName(), jobExecutionContext.getHostName(),
+                                sshJobSubmission.getSshPort());
 
                         JobManagerConfiguration jConfig = null;
-                        String installedParentPath = ((HpcApplicationDeploymentType)
-                                jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription().getType()).getInstalledParentPath();
-                        String jobManager = ((GsisshHostType) registeredHost.getType()).getJobManager();
+                        String installedParentPath = sshJobSubmission.getResourceJobManager().getJobManagerBinPath();
+                        String jobManager = sshJobSubmission.getResourceJobManager().getResourceJobManagerType().toString();
                         if (jobManager == null) {
                             logger.error("No Job Manager is configured, so we are picking pbs as the default job manager");
                             jConfig = CommonUtils.getPBSJobManager(installedParentPath);
@@ -160,28 +164,30 @@ public class GFACGSISSHUtils {
                         clusters.put(key, pbsClusters);
                     }
                 }
-            } catch (Exception e) {
-                throw new GFacException("An error occurred while creating GSI security context", e);
+
+                jobExecutionContext.addSecurityContext(Constants.GSI_SECURITY_CONTEXT, context);
             }
-            jobExecutionContext.addSecurityContext(Constants.GSI_SECURITY_CONTEXT, context);
+        } catch (Exception e) {
+            throw new GFacException("An error occurred while creating GSI security context", e);
         }
     }
 
-    public static JobDescriptor createJobDescriptor(JobExecutionContext jobExecutionContext,
-                                                    ApplicationDeploymentDescriptionType app, Cluster cluster) {
+    public static JobDescriptor createJobDescriptor(JobExecutionContext jobExecutionContext, Cluster cluster) {
         JobDescriptor jobDescriptor = new JobDescriptor();
+        ApplicationContext applicationContext = jobExecutionContext.getApplicationContext();
+        ApplicationDeploymentDescription app = applicationContext.getApplicationDeploymentDescription();
         // this is common for any application descriptor
         jobDescriptor.setCallBackIp(ServerSettings.getIp());
         jobDescriptor.setCallBackPort(ServerSettings.getSetting(org.apache.airavata.common.utils.Constants.GFAC_SERVER_PORT, "8950"));
-        jobDescriptor.setInputDirectory(app.getInputDataDirectory());
-        jobDescriptor.setOutputDirectory(app.getOutputDataDirectory());
-        jobDescriptor.setExecutablePath(app.getExecutableLocation());
-        jobDescriptor.setStandardOutFile(app.getStandardOutput());
-        jobDescriptor.setStandardErrorFile(app.getStandardError());
+        jobDescriptor.setInputDirectory(jobExecutionContext.getInputDir());
+        jobDescriptor.setOutputDirectory(jobExecutionContext.getOutputDir());
+        jobDescriptor.setExecutablePath(app.getExecutablePath());
+        jobDescriptor.setStandardOutFile(jobExecutionContext.getStandardOutput());
+        jobDescriptor.setStandardErrorFile(jobExecutionContext.getStandardError());
         Random random = new Random();
         int i = random.nextInt(Integer.MAX_VALUE); // We always set the job name
         jobDescriptor.setJobName("A" + String.valueOf(i+99999999));
-        jobDescriptor.setWorkingDirectory(app.getStaticWorkingDirectory());
+        jobDescriptor.setWorkingDirectory(jobExecutionContext.getWorkingDir());
 
         List<String> inputValues = new ArrayList<String>();
         MessageContext input = jobExecutionContext.getInMessageContext();

http://git-wip-us.apache.org/repos/asf/airavata/blob/f29dfbe7/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/impl/push/amqp/AMQPMonitor.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/impl/push/amqp/AMQPMonitor.java b/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/impl/push/amqp/AMQPMonitor.java
index baab7b4..28d13f2 100644
--- a/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/impl/push/amqp/AMQPMonitor.java
+++ b/modules/gfac/gfac-monitor/src/main/java/org/apache/airavata/gfac/monitor/impl/push/amqp/AMQPMonitor.java
@@ -30,12 +30,12 @@ import java.util.concurrent.BlockingQueue;
 
 import org.apache.airavata.common.utils.MonitorPublisher;
 import org.apache.airavata.common.utils.ServerSettings;
-import org.apache.airavata.commons.gfac.type.HostDescription;
 import org.apache.airavata.gfac.core.monitor.MonitorID;
 import org.apache.airavata.gfac.monitor.core.PushMonitor;
 import org.apache.airavata.gfac.monitor.exception.AiravataMonitorException;
 import org.apache.airavata.gfac.monitor.util.AMQPConnectionUtil;
 import org.apache.airavata.gfac.monitor.util.CommonUtils;
+import org.apache.airavata.model.appcatalog.computeresource.ComputeResourceDescription;
 import org.apache.airavata.model.messaging.event.JobIdentifier;
 import org.apache.airavata.model.messaging.event.JobStatusChangeEvent;
 import org.apache.airavata.model.workspace.experiment.JobState;
@@ -107,30 +107,37 @@ public class AMQPMonitor extends PushMonitor {
     @Override
     public boolean registerListener(MonitorID monitorID) throws AiravataMonitorException {
         // we subscribe to read user-host based subscription
-        HostDescription host = monitorID.getHost();
-        String hostAddress = host.getType().getHostAddress();
-        // in amqp case there are no multiple jobs per each host, because once a job is put in to the queue it
-        // will be picked by the Monitor, so jobs will not stay in this queueu but jobs will stay in finishQueue
-        String channelID = CommonUtils.getChannelID(monitorID);
-        if(availableChannels.get(channelID) == null){
-        try {
-            //todo need to fix this rather getting it from a file
-            Connection connection = AMQPConnectionUtil.connect(amqpHosts, connectionName, proxyPath);
-            Channel channel = null;
-            channel = connection.createChannel();
-            availableChannels.put(channelID, channel);
-            String queueName = channel.queueDeclare().getQueue();
-
-            BasicConsumer consumer = new
-                    BasicConsumer(new JSONMessageParser(), localPublisher);          // here we use local publisher
-            channel.basicConsume(queueName, true, consumer);
-            String filterString = CommonUtils.getRoutingKey(monitorID.getUserName(), hostAddress);
-            // here we queuebind to a particular user in a particular machine
-            channel.queueBind(queueName, "glue2.computing_activity", filterString);
-            logger.info("Using filtering string to monitor: " + filterString);
-        } catch (IOException e) {
-            logger.error("Error creating the connection to finishQueue the job:" + monitorID.getUserName());
-        }
+        ComputeResourceDescription computeResourceDescription = monitorID.getComputeResourceDescription();
+        if (computeResourceDescription.isSetIpAddresses() && computeResourceDescription.getIpAddresses().size() > 0) {
+            // we get first ip address for the moment
+            String hostAddress = computeResourceDescription.getIpAddresses().get(0);
+            // in amqp case there are no multiple jobs per each host, because once a job is put in to the queue it
+            // will be picked by the Monitor, so jobs will not stay in this queueu but jobs will stay in finishQueue
+            String channelID = CommonUtils.getChannelID(monitorID);
+            if (availableChannels.get(channelID) == null) {
+                try {
+                    //todo need to fix this rather getting it from a file
+                    Connection connection = AMQPConnectionUtil.connect(amqpHosts, connectionName, proxyPath);
+                    Channel channel = null;
+                    channel = connection.createChannel();
+                    availableChannels.put(channelID, channel);
+                    String queueName = channel.queueDeclare().getQueue();
+
+                    BasicConsumer consumer = new
+                            BasicConsumer(new JSONMessageParser(), localPublisher);          // here we use local publisher
+                    channel.basicConsume(queueName, true, consumer);
+                    String filterString = CommonUtils.getRoutingKey(monitorID.getUserName(), hostAddress);
+                    // here we queuebind to a particular user in a particular machine
+                    channel.queueBind(queueName, "glue2.computing_activity", filterString);
+                    logger.info("Using filtering string to monitor: " + filterString);
+                } catch (IOException e) {
+                    logger.error("Error creating the connection to finishQueue the job:" + monitorID.getUserName());
+                }
+            }
+        } else {
+            throw new AiravataMonitorException("Couldn't register monitor for jobId :" + monitorID.getJobID() +
+                    " , ComputeResourceDescription " + computeResourceDescription.getHostName() + " doesn't has an " +
+                    "IpAddress with it");
         }
         return true;
     }

http://git-wip-us.apache.org/repos/asf/airavata/blob/f29dfbe7/modules/gfac/gfac-monitor/src/test/java/org/apache/airavata/job/AMQPMonitorTest.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-monitor/src/test/java/org/apache/airavata/job/AMQPMonitorTest.java b/modules/gfac/gfac-monitor/src/test/java/org/apache/airavata/job/AMQPMonitorTest.java
index 94528b9..a979890 100644
--- a/modules/gfac/gfac-monitor/src/test/java/org/apache/airavata/job/AMQPMonitorTest.java
+++ b/modules/gfac/gfac-monitor/src/test/java/org/apache/airavata/job/AMQPMonitorTest.java
@@ -20,15 +20,11 @@
 */
 package org.apache.airavata.job;
 
-import java.io.File;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import java.util.concurrent.BlockingQueue;
-import java.util.concurrent.LinkedBlockingQueue;
-
+import com.google.common.eventbus.EventBus;
+import com.google.common.eventbus.Subscribe;
+import org.airavata.appcatalog.cpi.AppCatalog;
+import org.apache.aiaravata.application.catalog.data.impl.AppCatalogFactory;
 import org.apache.airavata.common.utils.MonitorPublisher;
-import org.apache.airavata.commons.gfac.type.HostDescription;
 import org.apache.airavata.gfac.core.monitor.MonitorID;
 import org.apache.airavata.gfac.monitor.impl.push.amqp.AMQPMonitor;
 import org.apache.airavata.gsi.ssh.api.Cluster;
@@ -38,14 +34,29 @@ import org.apache.airavata.gsi.ssh.api.authentication.GSIAuthenticationInfo;
 import org.apache.airavata.gsi.ssh.api.job.JobDescriptor;
 import org.apache.airavata.gsi.ssh.impl.PBSCluster;
 import org.apache.airavata.gsi.ssh.impl.authentication.MyProxyAuthenticationInfo;
+import org.apache.airavata.model.appcatalog.computeresource.ComputeResourceDescription;
+import org.apache.airavata.model.appcatalog.computeresource.DataMovementInterface;
+import org.apache.airavata.model.appcatalog.computeresource.DataMovementProtocol;
+import org.apache.airavata.model.appcatalog.computeresource.JobManagerCommand;
+import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionInterface;
+import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol;
+import org.apache.airavata.model.appcatalog.computeresource.ResourceJobManager;
+import org.apache.airavata.model.appcatalog.computeresource.ResourceJobManagerType;
+import org.apache.airavata.model.appcatalog.computeresource.SSHJobSubmission;
+import org.apache.airavata.model.appcatalog.computeresource.SecurityProtocol;
 import org.apache.airavata.model.messaging.event.JobStatusChangeEvent;
-import org.apache.airavata.schemas.gfac.GsisshHostType;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
-import com.google.common.eventbus.EventBus;
-import com.google.common.eventbus.Subscribe;
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.LinkedBlockingQueue;
 
 public class AMQPMonitorTest {
 
@@ -54,12 +65,13 @@ public class AMQPMonitorTest {
     private String certificateLocation;
     private String pbsFilePath;
     private String workingDirectory;
-    private HostDescription hostDescription;
     private MonitorPublisher monitorPublisher;
     private BlockingQueue<MonitorID> finishQueue;
     private BlockingQueue<MonitorID> pushQueue;
     private Thread pushThread;
     private String proxyFilePath;
+    private ComputeResourceDescription computeResourceDescription;
+
     @Before
     public void setUp() throws Exception {
         System.setProperty("myproxy.username", "ogce");
@@ -98,14 +110,26 @@ public class AMQPMonitorTest {
         } catch (Exception e) {
             e.printStackTrace();
         }
+        computeResourceDescription = new ComputeResourceDescription("TestComputerResoruceId", "TestHostName");
+        computeResourceDescription.setHostName("stampede-host");
+        computeResourceDescription.addToIpAddresses("login1.stampede.tacc.utexas.edu");
+        ResourceJobManager resourceJobManager = new ResourceJobManager("1234", ResourceJobManagerType.SLURM);
+        Map<JobManagerCommand, String> commandMap = new HashMap<JobManagerCommand, String>();
+        commandMap.put(JobManagerCommand.SUBMISSION, "test");
+        resourceJobManager.setJobManagerCommands(commandMap);
+        resourceJobManager.setJobManagerBinPath("/usr/bin/");
+        resourceJobManager.setPushMonitoringEndpoint("push"); // TODO - add monitor mode
+        SSHJobSubmission sshJobSubmission = new SSHJobSubmission("TestSSHJobSubmissionInterfaceId", SecurityProtocol.GSI,
+                resourceJobManager);
+
+        AppCatalog appCatalog = AppCatalogFactory.getAppCatalog();
+        String jobSubmissionID = appCatalog.getComputeResource().addSSHJobSubmission(sshJobSubmission);
+
+        JobSubmissionInterface jobSubmissionInterface = new JobSubmissionInterface(jobSubmissionID, JobSubmissionProtocol.SSH, 1);
+
+        computeResourceDescription.addToJobSubmissionInterfaces(jobSubmissionInterface);
+        computeResourceDescription.addToDataMovementInterfaces(new DataMovementInterface("4532", DataMovementProtocol.SCP, 1));
 
-        hostDescription = new HostDescription(GsisshHostType.type);
-        hostDescription.getType().setHostAddress("login1.stampede.tacc.utexas.edu");
-        hostDescription.getType().setHostName("stampede-host");
-        ((GsisshHostType) hostDescription.getType()).setJobManager("slurm");
-        ((GsisshHostType) hostDescription.getType()).setInstalledPath("/usr/bin/");
-        ((GsisshHostType) hostDescription.getType()).setPort(2222);
-        ((GsisshHostType) hostDescription.getType()).setMonitorMode("push");
     }
 
     @Test
@@ -151,7 +175,7 @@ public class AMQPMonitorTest {
         String jobID = pbsCluster.submitBatchJob(jobDescriptor);
         System.out.println(jobID);
         try {
-            pushQueue.add(new MonitorID(hostDescription, jobID,null,null,null, "ogce", jobName));
+            pushQueue.add(new MonitorID(computeResourceDescription, jobID,null,null,null, "ogce", jobName));
         } catch (Exception e) {
             e.printStackTrace();
         }


[12/50] [abbrv] airavata git commit: committing intital gfac app catalog integration

Posted by ch...@apache.org.
committing intital gfac app catalog integration


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/8abe8dca
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/8abe8dca
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/8abe8dca

Branch: refs/heads/master
Commit: 8abe8dca55f49eeac0ed1416b4565a767922b7a0
Parents: 91f5de5
Author: chathuriw <ka...@gmail.com>
Authored: Tue Oct 28 16:23:38 2014 -0400
Committer: Chathuri Wimalasena <ka...@gmail.com>
Committed: Wed Nov 5 11:16:14 2014 -0500

----------------------------------------------------------------------
 .../client/samples/CreateLaunchExperiment.java  |  11 +-
 .../org/apache/airavata/gfac/Scheduler.java     |   5 +-
 .../gfac/core/context/ApplicationContext.java   |  44 +--
 .../gfac/core/context/JobExecutionContext.java  |  47 +++
 .../airavata/gfac/core/cpi/BetterGfacImpl.java  | 292 +++----------------
 .../core/handler/AppDescriptorCheckHandler.java |  61 ++--
 .../gfac/core/provider/utils/ProviderUtils.java |  18 +-
 .../airavata/gfac/core/utils/GFacUtils.java     |  16 +
 8 files changed, 160 insertions(+), 334 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/8abe8dca/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperiment.java
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperiment.java b/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperiment.java
index 2845bc6..a96cba7 100644
--- a/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperiment.java
+++ b/airavata-api/airavata-client-sdks/java-client-samples/src/main/java/org/apache/airavata/client/samples/CreateLaunchExperiment.java
@@ -53,7 +53,7 @@ public class CreateLaunchExperiment {
     private static final String DEFAULT_GATEWAY = "default.registry.gateway";
     private static Airavata.Client airavataClient;
 
-    private static String echoAppId = "Echo_6281480a-9887-4a0f-8311-59bbaf738e54";
+    private static String echoAppId = "Echo_b6782be4-315b-4cbd-9403-aa7ce564548a";
     private static String wrfAppId = "WRF_5f097c9c-7066-49ec-aed7-4e39607b3adc";
     private static String amberAppId = "Amber_89906be6-5678-49a6-9d04-a0604fbdef2e";
 
@@ -70,7 +70,7 @@ public class CreateLaunchExperiment {
     public static void main(String[] args) throws Exception {
                 airavataClient = AiravataClientFactory.createAiravataClient(THRIFT_SERVER_HOST, THRIFT_SERVER_PORT);
                 System.out.println("API version is " + airavataClient.getAPIVersion());
-                registerApplications(); // run this only the first time
+//                registerApplications(); // run this only the first time
                 createAndLaunchExp();
     }
     
@@ -79,12 +79,13 @@ public class CreateLaunchExperiment {
     
     
     public static void createAndLaunchExp() throws TException {
-    	final String expId = createEchoExperimentForFSD(airavataClient);
+//    	final String expId = createEchoExperimentForFSD(airavataClient);
     	try {
-        for (int i = 0; i < 2; i++) {
+        for (int i = 0; i < 1; i++) {
 //    final String expId = createExperimentForSSHHost(airavata);
 //            final String expId = createEchoExperimentForFSD(airavataClient);
 //    final String expId = createEchoExperimentForStampede(airavataClient);
+    final String expId = createEchoExperimentForTrestles(airavataClient);
 //    final String expId = createExperimentEchoForLocalHost(airavataClient);
 //    final String expId = createExperimentWRFTrestles(airavataClient);
 //    final String expId = createExperimentForBR2(airavataClient);
@@ -93,7 +94,7 @@ public class CreateLaunchExperiment {
 //    final String expId = createExperimentForStampedeAmber(airavataClient);
 //    final String expId = createExperimentForTrestlesAmber(airavataClient);
 
-//    System.out.println("Experiment ID : " + expId);
+    System.out.println("Experiment ID : " + expId);
 //    updateExperiment(airavata, expId);
             launchExperiment(airavataClient, expId);
         }

http://git-wip-us.apache.org/repos/asf/airavata/blob/8abe8dca/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/Scheduler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/Scheduler.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/Scheduler.java
index 1b8efe0..9b70fae 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/Scheduler.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/Scheduler.java
@@ -39,6 +39,7 @@ import org.apache.airavata.gfac.core.context.JobExecutionContext;
 import org.apache.airavata.gfac.core.provider.GFacProvider;
 import org.apache.airavata.gfac.core.provider.GFacProviderConfig;
 import org.apache.airavata.gfac.core.provider.GFacProviderException;
+import org.apache.airavata.model.appcatalog.computeresource.ComputeResourceDescription;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.w3c.dom.Document;
@@ -75,7 +76,7 @@ public class Scheduler {
      * @return GFacProvider instance.
      */
     private static GFacProvider getProvider(JobExecutionContext jobExecutionContext) throws GFacException {
-        HostDescription hostDescription = jobExecutionContext.getApplicationContext().getHostDescription();
+        ComputeResourceDescription hostDescription = jobExecutionContext.getApplicationContext().getComputeResourceDescription();
         String applicationName = jobExecutionContext.getServiceName();
 
         URL resource = Scheduler.class.getClassLoader().getResource(org.apache.airavata.common.utils.Constants.GFAC_CONFIG_XML);
@@ -111,6 +112,8 @@ public class Scheduler {
             }
             // We give higher preference to applications specific provider if configured
             if (provider == null) {
+
+                jobExecutionContext.getApplicationContext().getComputeResourcePreference().getPreferredJobSubmissionProtocol()
                 String hostClass = hostDescription.getType().getClass().getName();
                 providerClassName = GFacConfiguration.getAttributeValue(GFacConfiguration.getHandlerDoc(), Constants.XPATH_EXPR_PROVIDER_ON_HOST + hostClass + "']", Constants.GFAC_CONFIG_CLASS_ATTRIBUTE);
                 Class<? extends GFacProvider> aClass1 = Class.forName(providerClassName).asSubclass(GFacProvider.class);

http://git-wip-us.apache.org/repos/asf/airavata/blob/8abe8dca/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/ApplicationContext.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/ApplicationContext.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/ApplicationContext.java
index 4083f29..29197be 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/ApplicationContext.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/ApplicationContext.java
@@ -21,37 +21,47 @@
 
 package org.apache.airavata.gfac.core.context;
 
-import org.apache.airavata.commons.gfac.type.ApplicationDescription;
-import org.apache.airavata.commons.gfac.type.HostDescription;
-import org.apache.airavata.commons.gfac.type.ServiceDescription;
+import org.apache.airavata.model.appcatalog.appdeployment.ApplicationDeploymentDescription;
+import org.apache.airavata.model.appcatalog.appinterface.ApplicationInterfaceDescription;
+import org.apache.airavata.model.appcatalog.computeresource.ComputeResourceDescription;
+import org.apache.airavata.model.appcatalog.gatewayprofile.ComputeResourcePreference;
+import org.apache.airavata.model.appcatalog.gatewayprofile.GatewayResourceProfile;
 
 public class ApplicationContext extends AbstractContext {
+    private ApplicationDeploymentDescription applicationDeploymentDescription;
+    private ComputeResourceDescription computeResourceDescription;
+    private ApplicationInterfaceDescription applicationInterfaceDescription;
+    private ComputeResourcePreference computeResourcePreference;
 
-    private ApplicationDescription applicationDeploymentDescription;
-    private ServiceDescription serviceDescription;
-    private HostDescription hostDescription;
-
-    public ApplicationDescription getApplicationDeploymentDescription() {
+    public ApplicationDeploymentDescription getApplicationDeploymentDescription() {
         return applicationDeploymentDescription;
     }
 
-    public <T extends ApplicationDescription> void setApplicationDeploymentDescription(T applicationDeploymentDescription) {
+    public void setApplicationDeploymentDescription(ApplicationDeploymentDescription applicationDeploymentDescription) {
         this.applicationDeploymentDescription = applicationDeploymentDescription;
     }
 
-    public <T extends ServiceDescription> void setServiceDescription(T serviceDescription) {
-        this.serviceDescription = serviceDescription;
+    public ComputeResourceDescription getComputeResourceDescription() {
+        return computeResourceDescription;
+    }
+
+    public void setComputeResourceDescription(ComputeResourceDescription computeResourceDescription) {
+        this.computeResourceDescription = computeResourceDescription;
+    }
+
+    public ApplicationInterfaceDescription getApplicationInterfaceDescription() {
+        return applicationInterfaceDescription;
     }
 
-    public <T extends HostDescription> void setHostDescription(T hostDescription) {
-        this.hostDescription = hostDescription;
+    public void setApplicationInterfaceDescription(ApplicationInterfaceDescription applicationInterfaceDescription) {
+        this.applicationInterfaceDescription = applicationInterfaceDescription;
     }
 
-    public ServiceDescription getServiceDescription() {
-        return serviceDescription;
+    public ComputeResourcePreference getComputeResourcePreference() {
+        return computeResourcePreference;
     }
 
-    public HostDescription getHostDescription() {
-        return hostDescription;
+    public void setComputeResourcePreference(ComputeResourcePreference computeResourcePreference) {
+        this.computeResourcePreference = computeResourcePreference;
     }
 }

http://git-wip-us.apache.org/repos/asf/airavata/blob/8abe8dca/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
index 2f94ec5..da716c5 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/context/JobExecutionContext.java
@@ -66,6 +66,13 @@ public class JobExecutionContext extends AbstractContext implements Serializable
 
     private String credentialStoreToken;
 
+    private String workingDir;
+
+    private String inputDir;
+    private String outputDir;
+    private String standaredOutput;
+    private String standaredError;
+
 //    private ContextHeaderDocument.ContextHeader contextHeader;
 
     // Keep track of the current path of the message. Before hitting provider its in-path.
@@ -317,4 +324,44 @@ public class JobExecutionContext extends AbstractContext implements Serializable
     public void setCredentialStoreToken(String credentialStoreToken) {
         this.credentialStoreToken = credentialStoreToken;
     }
+
+    public String getWorkingDir() {
+        return workingDir;
+    }
+
+    public void setWorkingDir(String workingDir) {
+        this.workingDir = workingDir;
+    }
+
+    public String getInputDir() {
+        return inputDir;
+    }
+
+    public void setInputDir(String inputDir) {
+        this.inputDir = inputDir;
+    }
+
+    public String getOutputDir() {
+        return outputDir;
+    }
+
+    public void setOutputDir(String outputDir) {
+        this.outputDir = outputDir;
+    }
+
+    public String getStandaredOutput() {
+        return standaredOutput;
+    }
+
+    public void setStandaredOutput(String standaredOutput) {
+        this.standaredOutput = standaredOutput;
+    }
+
+    public String getStandaredError() {
+        return standaredError;
+    }
+
+    public void setStandaredError(String standaredError) {
+        this.standaredError = standaredError;
+    }
 }

http://git-wip-us.apache.org/repos/asf/airavata/blob/8abe8dca/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
index ca7620d..16c49e6 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cpi/BetterGfacImpl.java
@@ -19,14 +19,7 @@
  *
 */
 package org.apache.airavata.gfac.core.cpi;
-import java.io.File;
-import java.io.IOException;
-import java.net.URL;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Properties;
-import javax.xml.parsers.ParserConfigurationException;
-import javax.xml.xpath.XPathExpressionException;
+
 import org.airavata.appcatalog.cpi.AppCatalog;
 import org.apache.aiaravata.application.catalog.data.impl.AppCatalogFactory;
 import org.apache.airavata.common.exception.AiravataException;
@@ -35,9 +28,6 @@ import org.apache.airavata.common.utils.AiravataZKUtils;
 import org.apache.airavata.common.utils.MonitorPublisher;
 import org.apache.airavata.common.utils.ServerSettings;
 import org.apache.airavata.common.utils.listener.AbstractActivityListener;
-import org.apache.airavata.commons.gfac.type.ApplicationDescription;
-import org.apache.airavata.commons.gfac.type.HostDescription;
-import org.apache.airavata.commons.gfac.type.ServiceDescription;
 import org.apache.airavata.gfac.Constants;
 import org.apache.airavata.gfac.GFacConfiguration;
 import org.apache.airavata.gfac.GFacException;
@@ -57,45 +47,16 @@ import org.apache.airavata.gfac.core.provider.GFacRecoverableProvider;
 import org.apache.airavata.gfac.core.states.GfacExperimentState;
 import org.apache.airavata.gfac.core.states.GfacPluginState;
 import org.apache.airavata.gfac.core.utils.GFacUtils;
-
 import org.apache.airavata.messaging.core.Publisher;
-
 import org.apache.airavata.messaging.core.PublisherFactory;
 import org.apache.airavata.model.appcatalog.appdeployment.ApplicationDeploymentDescription;
 import org.apache.airavata.model.appcatalog.appinterface.ApplicationInterfaceDescription;
-import org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType;
-import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
-
 import org.apache.airavata.model.appcatalog.computeresource.ComputeResourceDescription;
-import org.apache.airavata.model.appcatalog.computeresource.JobManagerCommand;
-import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionInterface;
-import org.apache.airavata.model.appcatalog.computeresource.LOCALSubmission;
-import org.apache.airavata.model.appcatalog.computeresource.ResourceJobManager;
-import org.apache.airavata.model.appcatalog.computeresource.SSHJobSubmission;
-import org.apache.airavata.model.appcatalog.computeresource.UnicoreJobSubmission;
-
-import org.apache.airavata.model.appcatalog.computeresource.*;
-
 import org.apache.airavata.model.appcatalog.gatewayprofile.ComputeResourcePreference;
 import org.apache.airavata.model.messaging.event.*;
 import org.apache.airavata.model.workspace.experiment.*;
 import org.apache.airavata.registry.cpi.Registry;
 import org.apache.airavata.registry.cpi.RegistryModelType;
-import org.apache.airavata.schemas.gfac.*;
-import org.apache.airavata.schemas.gfac.DataType;
-
-import org.apache.airavata.schemas.gfac.GsisshHostType;
-import org.apache.airavata.schemas.gfac.HostDescriptionType;
-import org.apache.airavata.schemas.gfac.HpcApplicationDeploymentType;
-import org.apache.airavata.schemas.gfac.InputParameterType;
-import org.apache.airavata.schemas.gfac.JobTypeType;
-import org.apache.airavata.schemas.gfac.OutputParameterType;
-import org.apache.airavata.schemas.gfac.ParameterType;
-import org.apache.airavata.schemas.gfac.ProjectAccountType;
-import org.apache.airavata.schemas.gfac.QueueType;
-import org.apache.airavata.schemas.gfac.SSHHostType;
-import org.apache.airavata.schemas.gfac.ServiceDescriptionType;
-import org.apache.airavata.schemas.gfac.UnicoreHostType;
 import org.apache.zookeeper.*;
 import org.apache.zookeeper.data.Stat;
 import org.slf4j.Logger;
@@ -111,8 +72,6 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.Properties;
 
-//import org.apache.airavata.api.server.listener.ExperimentStatusChangedEvent;
-
 /**
  * This is the GFac CPI class for external usage, this simply have a single method to submit a job to
  * the resource, required data for the job has to be stored in registry prior to invoke this object.
@@ -123,13 +82,8 @@ public class BetterGfacImpl implements GFac,Watcher {
 
     private Registry registry;
 
-//    private AiravataAPI airavataAPI;
-
-//    private AiravataRegistry2 airavataRegistry2;
-
-    private ZooKeeper zk;                       // we are not storing zk instance in to jobExecution context
-
-    private static Integer mutex = new Integer(-1);
+    // we are not storing zk instance in to jobExecution context
+    private ZooKeeper zk;
 
     private static List<ThreadedHandler> daemonHandlers = new ArrayList<ThreadedHandler>();
 
@@ -150,8 +104,6 @@ public class BetterGfacImpl implements GFac,Watcher {
     public BetterGfacImpl(Registry registry, ZooKeeper zooKeeper,
                           MonitorPublisher publisher) {
         this.registry = registry;
-//        this.airavataAPI = airavataAPI;
-//        this.airavataRegistry2 = airavataRegistry2;
         monitorPublisher = publisher;     // This is a EventBus common for gfac
         this.zk = zooKeeper;
     }
@@ -186,10 +138,20 @@ public class BetterGfacImpl implements GFac,Watcher {
 
     public static void startDaemonHandlers() {
         List<GFacHandlerConfig> daemonHandlerConfig = null;
-        URL resource = BetterGfacImpl.class.getClassLoader().getResource(org.apache.airavata.common.utils.Constants.GFAC_CONFIG_XML);
-        gfacConfigFile = new File(resource.getPath());
+        String className = null;
         try {
+            URL resource = BetterGfacImpl.class.getClassLoader().getResource(org.apache.airavata.common.utils.Constants.GFAC_CONFIG_XML);
+            if (resource != null) {
+                gfacConfigFile = new File(resource.getPath());
+            }
             daemonHandlerConfig = GFacConfiguration.getDaemonHandlers(gfacConfigFile);
+            for (GFacHandlerConfig handlerConfig : daemonHandlerConfig) {
+                className = handlerConfig.getClassName();
+                Class<?> aClass = Class.forName(className).asSubclass(ThreadedHandler.class);
+                ThreadedHandler threadedHandler = (ThreadedHandler) aClass.newInstance();
+                threadedHandler.initProperties(handlerConfig.getProperties());
+                daemonHandlers.add(threadedHandler);
+            }
         } catch (ParserConfigurationException e) {
             log.error("Error parsing gfac-config.xml, double check the xml configuration", e);
         } catch (IOException e) {
@@ -198,29 +160,18 @@ public class BetterGfacImpl implements GFac,Watcher {
             log.error("Error parsing gfac-config.xml, double check the xml configuration", e);
         } catch (XPathExpressionException e) {
             log.error("Error parsing gfac-config.xml, double check the xml configuration", e);
-        }
-
-        for (GFacHandlerConfig handlerConfig : daemonHandlerConfig) {
-            String className = handlerConfig.getClassName();
-            try {
-                Class<?> aClass = Class.forName(className).asSubclass(ThreadedHandler.class);
-                ThreadedHandler threadedHandler = (ThreadedHandler) aClass.newInstance();
-                threadedHandler.initProperties(handlerConfig.getProperties());
-                daemonHandlers.add(threadedHandler);
-            } catch (ClassNotFoundException e) {
-                log.error("Error initializing the handler: " + className);
-                log.error(className + " class has to implement " + ThreadedHandler.class);
-            } catch (InstantiationException e) {
-                log.error("Error initializing the handler: " + className);
-                log.error(className + " class has to implement " + ThreadedHandler.class);
-            } catch (IllegalAccessException e) {
-                log.error("Error initializing the handler: " + className);
-                log.error(className + " class has to implement " + ThreadedHandler.class);
-            } catch (GFacHandlerException e) {
-                log.error("Error initializing the handler " + className);
-            } catch (GFacException e) {
-                e.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
-            }
+        } catch (ClassNotFoundException e) {
+            log.error("Error initializing the handler: " + className);
+            log.error(className + " class has to implement " + ThreadedHandler.class);
+        } catch (InstantiationException e) {
+            log.error("Error initializing the handler: " + className);
+            log.error(className + " class has to implement " + ThreadedHandler.class);
+        } catch (GFacHandlerException e) {
+            log.error("Error initializing the handler: " + className);
+            log.error(className + " class has to implement " + ThreadedHandler.class);
+        } catch (IllegalAccessException e) {
+            log.error("Error initializing the handler: " + className);
+            log.error(className + " class has to implement " + ThreadedHandler.class);
         }
         for (ThreadedHandler tHandler : daemonHandlers) {
             (new Thread(tHandler)).start();
@@ -306,173 +257,6 @@ public class BetterGfacImpl implements GFac,Watcher {
                 }
             }
         }
-        //Create the legacy schema docs to fill-in
-        ServiceDescription legacyServiceDescription = new ServiceDescription();
-        ServiceDescriptionType legacyServiceDescType = legacyServiceDescription.getType();
-        ApplicationDescription legacyAppDescription = null;
-        HostDescription legacyHostDescription = null;
-
-        ///////////////SERVICE DESCRIPTOR///////////////////////////////
-        //Fetch the application inputs and outputs from the app interface and create the legacy service description.
-        legacyServiceDescType.setName(applicationInterface.getApplicationName());
-        legacyServiceDescType.setDescription(applicationInterface.getApplicationName());
-        List<InputParameterType> legacyInputParameters = new ArrayList<InputParameterType>();
-        List<OutputParameterType> legacyOutputParameters = new ArrayList<OutputParameterType>();
-        List<InputDataObjectType> applicationInputs = applicationInterface.getApplicationInputs();
-        for (InputDataObjectType dataObjectType : applicationInputs) {
-            InputParameterType parameter = InputParameterType.Factory.newInstance();
-            parameter.setParameterName(dataObjectType.getName());
-            parameter.setParameterDescription(dataObjectType.getUserFriendlyDescription());
-            ParameterType parameterType = parameter.addNewParameterType();
-            switch (dataObjectType.getType()) {
-                case FLOAT:
-                    parameterType.setType(DataType.FLOAT);
-                    break;
-                case INTEGER:
-                    parameterType.setType(DataType.INTEGER);
-                    break;
-                case STRING:
-                    parameterType.setType(DataType.STRING);
-                    break;
-                case URI:
-                    parameterType.setType(DataType.URI);
-                    break;
-            }
-            parameterType.setName(parameterType.getType().toString());
-            parameter.addParameterValue(dataObjectType.getValue());
-            legacyInputParameters.add(parameter);
-        }
-
-        List<OutputDataObjectType> applicationOutputs = applicationInterface.getApplicationOutputs();
-        for (OutputDataObjectType dataObjectType : applicationOutputs) {
-            OutputParameterType parameter = OutputParameterType.Factory.newInstance();
-            parameter.setParameterName(dataObjectType.getName());
-            parameter.setParameterDescription(dataObjectType.getName());
-            ParameterType parameterType = parameter.addNewParameterType();
-            switch (dataObjectType.getType()) {
-                case FLOAT:
-                    parameterType.setType(DataType.FLOAT);
-                    break;
-                case INTEGER:
-                    parameterType.setType(DataType.INTEGER);
-                    break;
-                case STRING:
-                    parameterType.setType(DataType.STRING);
-                    break;
-                case URI:
-                    parameterType.setType(DataType.URI);
-                    break;
-            }
-            parameterType.setName(parameterType.getType().toString());
-            legacyOutputParameters.add(parameter);
-        }
-
-        legacyServiceDescType.setInputParametersArray(legacyInputParameters.toArray(new InputParameterType[]{}));
-        legacyServiceDescType.setOutputParametersArray(legacyOutputParameters.toArray(new OutputParameterType[]{}));
-
-        ////////////////////-----------  HOST DESCRIPTOR  -----------------//////////////////////
-        //Fetch the host description details and fill-in legacy doc
-        ResourceJobManager resourceJobManager = null;
-        for (JobSubmissionInterface jobSubmissionInterface : computeResource.getJobSubmissionInterfaces()) {
-            switch (jobSubmissionInterface.getJobSubmissionProtocol()) {
-                case LOCAL:
-                    legacyHostDescription = new HostDescription();
-                    LOCALSubmission localSubmission =
-                            appCatalog.getComputeResource().getLocalJobSubmission(jobSubmissionInterface.getJobSubmissionInterfaceId());
-                    resourceJobManager = localSubmission.getResourceJobManager();
-                    break;
-                case SSH:
-                    SSHJobSubmission sshJobSubmission =
-                            appCatalog.getComputeResource().getSSHJobSubmission(jobSubmissionInterface.getJobSubmissionInterfaceId());
-                    resourceJobManager = sshJobSubmission.getResourceJobManager();
-                    switch (sshJobSubmission.getSecurityProtocol()) {
-                        case GSI:
-                            legacyHostDescription = new HostDescription(GsisshHostType.type);
-                            ((GsisshHostType) legacyHostDescription.getType()).setJobManager
-                                    (resourceJobManager.getResourceJobManagerType().name());
-                            ((GsisshHostType) legacyHostDescription.getType()).setInstalledPath(resourceJobManager.getJobManagerBinPath());
-                            // applicationDescription.setInstalledParentPath(resourceJobManager.getJobManagerBinPath());
-                            ((GsisshHostType) legacyHostDescription.getType()).setPort(sshJobSubmission.getSshPort());
-                            break;
-                        case SSH_KEYS:
-                            legacyHostDescription = new HostDescription(SSHHostType.type);
-                            ((SSHHostType) legacyHostDescription.getType()).setHpcResource(true);
-                            break;
-                        default:
-                            legacyHostDescription = new HostDescription(SSHHostType.type);
-                            ((SSHHostType) legacyHostDescription.getType()).setHpcResource(true);
-                            break;
-                    }
-                    break;
-                case UNICORE:
-                	UnicoreJobSubmission ucrSubmission = appCatalog.getComputeResource().getUNICOREJobSubmission(jobSubmissionInterface.getJobSubmissionInterfaceId());
-                	String unicoreEndpoint = ucrSubmission.getUnicoreEndPointURL();
-                	legacyHostDescription = new HostDescription(UnicoreHostType.type);
-                	((UnicoreHostType) legacyHostDescription.getType()).setUnicoreBESEndPointArray(new String[]{unicoreEndpoint});
-                	break;
-                default:
-                    break;
-            }
-        }
-        HostDescriptionType legacyHostDescType = legacyHostDescription.getType();
-        legacyHostDescType.setHostName(computeResource.getHostName());
-        String ipAddress = computeResource.getHostName();
-        if (computeResource.getIpAddresses() != null && computeResource.getIpAddresses().size() > 0) {
-            ipAddress = computeResource.getIpAddresses().iterator().next();
-        } else if (computeResource.getHostAliases() != null && computeResource.getHostAliases().size() > 0) {
-            ipAddress = computeResource.getHostAliases().iterator().next();
-        }
-        legacyHostDescType.setHostAddress(ipAddress);
-
-        /////////////////////---------------- APPLICATION DESCRIPTOR ---------------------/////////////////////////
-        //Fetch deployment information and fill-in legacy doc
-        if ((legacyHostDescType instanceof GsisshHostType) 
-        		|| (legacyHostDescType instanceof SSHHostType) 
-        		|| (legacyHostDescType instanceof UnicoreHostType)) {
-            legacyAppDescription = new ApplicationDescription(HpcApplicationDeploymentType.type);
-            HpcApplicationDeploymentType legacyHPCAppDescType = (HpcApplicationDeploymentType) legacyAppDescription.getType();
-            switch (applicationDeployment.getParallelism()) {
-                case SERIAL:
-                    legacyHPCAppDescType.setJobType(JobTypeType.SERIAL);
-                    break;
-                case MPI:
-                    legacyHPCAppDescType.setJobType(JobTypeType.MPI);
-                    break;
-                case OPENMP:
-                    legacyHPCAppDescType.setJobType(JobTypeType.OPEN_MP);
-                    break;
-                default:
-                    break;
-            }
-            //Fetch scheduling information from experiment request
-            ComputationalResourceScheduling taskSchedule = taskData.getTaskScheduling();
-            QueueType queueType = legacyHPCAppDescType.addNewQueue();
-            queueType.setQueueName(taskSchedule.getQueueName());
-            legacyHPCAppDescType.setCpuCount(taskSchedule.getTotalCPUCount());
-            legacyHPCAppDescType.setNodeCount(taskSchedule.getNodeCount());
-            legacyHPCAppDescType.setMaxWallTime(taskSchedule.getWallTimeLimit());
-            if (resourceJobManager != null) {
-                legacyHPCAppDescType.setInstalledParentPath(resourceJobManager.getJobManagerBinPath());
-                if (resourceJobManager.getJobManagerCommands() != null) {
-                    legacyHPCAppDescType.setJobSubmitterCommand(resourceJobManager.getJobManagerCommands().get(JobManagerCommand.SUBMISSION));
-                }
-            }
-            ProjectAccountType projectAccountType = legacyHPCAppDescType.addNewProjectAccount();
-            if (gatewayResourcePreferences != null) {
-                projectAccountType.setProjectAccountNumber(gatewayResourcePreferences.getAllocationProjectNumber());
-            }
-        } else {
-            legacyAppDescription = new ApplicationDescription();
-        }
-        ApplicationDeploymentDescriptionType legacyAppDescType = legacyAppDescription.getType();
-        legacyAppDescType.addNewApplicationName().setStringValue(applicationInterface.getApplicationName().replaceAll(" ", "_"));
-        legacyAppDescType.setExecutableLocation(applicationDeployment.getExecutablePath());
-        if (gatewayResourcePreferences != null) {
-            legacyAppDescType.setScratchWorkingDirectory(gatewayResourcePreferences.getScratchLocation());
-        } else {
-            legacyAppDescType.setScratchWorkingDirectory("/tmp");
-            log.warn("Missing gateway resource profile for gateway id '" + gatewayID + "'.");
-        }
 
         URL resource = BetterGfacImpl.class.getClassLoader().getResource(org.apache.airavata.common.utils.Constants.GFAC_CONFIG_XML);
         Properties configurationProperties = ServerSettings.getProperties();
@@ -498,19 +282,17 @@ public class BetterGfacImpl implements GFac,Watcher {
         jobExecutionContext.setRegistry(registry);
 
         ApplicationContext applicationContext = new ApplicationContext();
-//        applicationContext.setApplicationDeploymentDescription(applicationDescription);
-        applicationContext.setHostDescription(legacyHostDescription);
-        applicationContext.setServiceDescription(legacyServiceDescription);
-        applicationContext.setApplicationDeploymentDescription(legacyAppDescription);
+        applicationContext.setComputeResourceDescription(computeResource);
+        applicationContext.setApplicationDeploymentDescription(applicationDeployment);
+        applicationContext.setApplicationInterfaceDescription(applicationInterface);
+        applicationContext.setComputeResourcePreference(gatewayResourcePreferences);
         jobExecutionContext.setApplicationContext(applicationContext);
 
         List<DataObjectType> experimentInputs = taskData.getApplicationInputs();
-        jobExecutionContext.setInMessageContext(new MessageContext(GFacUtils.getInMessageContext(experimentInputs,
-                legacyServiceDescType.getInputParametersArray())));
+        jobExecutionContext.setInMessageContext(new MessageContext(GFacUtils.getInMessageContext(experimentInputs)));
 
         List<DataObjectType> outputData = taskData.getApplicationOutputs();
-        jobExecutionContext.setOutMessageContext(new MessageContext(GFacUtils.getOutMessageContext(outputData,
-                legacyServiceDescType.getOutputParametersArray())));
+        jobExecutionContext.setOutMessageContext(new MessageContext(GFacUtils.getOutMessageContext(outputData)));
 
         jobExecutionContext.setProperty(Constants.PROP_TOPIC, experimentID);
         jobExecutionContext.setGfac(this);
@@ -1178,14 +960,6 @@ public class BetterGfacImpl implements GFac,Watcher {
         BetterGfacImpl.monitorPublisher = monitorPublisher;
     }
 
-//    public AiravataAPI getAiravataAPI() {
-//        return airavataAPI;
-//    }
-
-//    public AiravataRegistry2 getAiravataRegistry2() {
-//        return airavataRegistry2;
-//    }
-
     public static List<ThreadedHandler> getDaemonHandlers() {
         return daemonHandlers;
     }

http://git-wip-us.apache.org/repos/asf/airavata/blob/8abe8dca/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/handler/AppDescriptorCheckHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/handler/AppDescriptorCheckHandler.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/handler/AppDescriptorCheckHandler.java
index 33c32d3..676a15a 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/handler/AppDescriptorCheckHandler.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/handler/AppDescriptorCheckHandler.java
@@ -20,12 +20,12 @@
 */
 package org.apache.airavata.gfac.core.handler;
 
-import org.apache.airavata.commons.gfac.type.ApplicationDescription;
 import org.apache.airavata.gfac.Constants;
 import org.apache.airavata.gfac.core.context.JobExecutionContext;
 import org.apache.airavata.gfac.core.states.GfacPluginState;
 import org.apache.airavata.gfac.core.utils.GFacUtils;
-import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
+import org.apache.airavata.model.appcatalog.appinterface.ApplicationInterfaceDescription;
+import org.apache.airavata.model.appcatalog.gatewayprofile.ComputeResourcePreference;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -43,47 +43,34 @@ public class AppDescriptorCheckHandler implements GFacRecoverableHandler {
             logger.info("Error saving plugin status to ZK");
         }
         StringBuffer data = new StringBuffer();
-        ApplicationDescription app = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription();
-        ApplicationDeploymentDescriptionType appDesc = app.getType();
+        ApplicationInterfaceDescription appInterface = jobExecutionContext.getApplicationContext().getApplicationInterfaceDescription();
+        ComputeResourcePreference computeResourcePreference = jobExecutionContext.getApplicationContext().getComputeResourcePreference();
 
-        if (appDesc.getScratchWorkingDirectory() == null) {
-            appDesc.setScratchWorkingDirectory("/tmp");
+        if (computeResourcePreference.getScratchLocation() == null) {
+            computeResourcePreference.setScratchLocation("/tmp");
         }
         /*
         * Working dir
         */
-        if (appDesc.getStaticWorkingDirectory() == null || "null".equals(appDesc.getStaticWorkingDirectory())) {
-            String tmpDir = appDesc.getScratchWorkingDirectory() + File.separator
-                    + jobExecutionContext.getExperimentID();
 
-            appDesc.setStaticWorkingDirectory(tmpDir);
-        }
-        data.append(appDesc.getScratchWorkingDirectory());
-        data.append(",").append(appDesc.getStaticWorkingDirectory());
-        //FIXME: Move this input/output to application descrpitor
+        String workingDir = computeResourcePreference.getScratchLocation() + File.separator+ jobExecutionContext.getExperimentID();
+        jobExecutionContext.setWorkingDir(workingDir);
+        data.append(computeResourcePreference.getScratchLocation());
+        data.append(",").append(jobExecutionContext.getWorkingDir());
+
         /*
         * Input and Output Directory
         */
-        if (appDesc.getInputDataDirectory() == null || "".equals(appDesc.getInputDataDirectory())) {
-            appDesc.setInputDataDirectory(appDesc.getStaticWorkingDirectory() + File.separator + Constants.INPUT_DATA_DIR_VAR_NAME);
-        }
-        if (appDesc.getOutputDataDirectory() == null || "".equals(appDesc.getOutputDataDirectory())) {
-            appDesc.setOutputDataDirectory(appDesc.getStaticWorkingDirectory() + File.separator + Constants.OUTPUT_DATA_DIR_VAR_NAME);
-        }
+        jobExecutionContext.setInputDir(workingDir + File.separator + Constants.INPUT_DATA_DIR_VAR_NAME );
+        jobExecutionContext.setOutputDir(workingDir + File.separator + Constants.OUTPUT_DATA_DIR_VAR_NAME);
+        data.append(",").append(jobExecutionContext.getInputDir()).append(",").append(jobExecutionContext.getOutputDir());
 
-        data.append(",").append(appDesc.getInputDataDirectory()).append(",").append(appDesc.getOutputDataDirectory());
         /*
         * Stdout and Stderr for Shell
         */
-        if (appDesc.getStandardOutput() == null || "".equals(appDesc.getStandardOutput())) {
-            appDesc.setStandardOutput(appDesc.getStaticWorkingDirectory() + File.separator
-                    + appDesc.getApplicationName().getStringValue().replaceAll("\\s+","") + ".stdout");
-        }
-        if (appDesc.getStandardError() == null || "".equals(appDesc.getStandardError())) {
-            appDesc.setStandardError(appDesc.getStaticWorkingDirectory() + File.separator
-                    + appDesc.getApplicationName().getStringValue().replaceAll("\\s+","") + ".stderr");
-        }
-        data.append(",").append(appDesc.getStandardOutput()).append(",").append(appDesc.getStandardError());
+        jobExecutionContext.setStandaredOutput(workingDir + File.separator + appInterface.getApplicationName().replaceAll("\\s+", "") + ".stdout");
+        jobExecutionContext.setStandaredError(workingDir + File.separator + appInterface.getApplicationName().replaceAll("\\s+", "") + ".stderr");
+        data.append(",").append(jobExecutionContext.getStandaredOutput()).append(",").append(jobExecutionContext.getStandaredError());
 
 
         logger.info("Recoverable data is saving to zk: " + data.toString());
@@ -97,17 +84,15 @@ public class AppDescriptorCheckHandler implements GFacRecoverableHandler {
     }
 
     public void recover(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-        ApplicationDescription app = jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription();
-        ApplicationDeploymentDescriptionType appDesc = app.getType();
         try {
             String s = GFacUtils.getPluginData(jobExecutionContext, this.getClass().getName());
             String[] split = s.split(",");                   // this is ugly code but nobody else is saving or reading this data, so this is the fastest way
-            appDesc.setScratchWorkingDirectory(split[0]);
-            appDesc.setStaticWorkingDirectory(split[1]);
-            appDesc.setInputDataDirectory(split[2]);
-            appDesc.setOutputDataDirectory(split[3]);
-            appDesc.setStandardOutput(split[4]);
-            appDesc.setStandardError(split[5]);
+            jobExecutionContext.getApplicationContext().getComputeResourcePreference().setScratchLocation(split[0]);
+            jobExecutionContext.setWorkingDir(split[1]);
+            jobExecutionContext.setInputDir(split[2]);
+            jobExecutionContext.setOutputDir(split[3]);
+            jobExecutionContext.setStandaredOutput(split[4]);
+            jobExecutionContext.setStandaredError(split[5]);
         } catch (Exception e) {
             throw new GFacHandlerException(e);
         }

http://git-wip-us.apache.org/repos/asf/airavata/blob/8abe8dca/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/provider/utils/ProviderUtils.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/provider/utils/ProviderUtils.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/provider/utils/ProviderUtils.java
index c98da92..dc8eb1c 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/provider/utils/ProviderUtils.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/provider/utils/ProviderUtils.java
@@ -21,33 +21,23 @@
 
 package org.apache.airavata.gfac.core.provider.utils;
 
-import org.apache.airavata.commons.gfac.type.ActualParameter;
-import org.apache.airavata.commons.gfac.type.MappingFactory;
 import org.apache.airavata.gfac.core.context.JobExecutionContext;
 import org.apache.airavata.gfac.core.context.MessageContext;
 import org.apache.airavata.gfac.core.provider.GFacProviderException;
-import org.apache.airavata.schemas.gfac.InputParameterType;
 
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Map;
 
 public class ProviderUtils {
 
     public static List<String> getInputParameters(JobExecutionContext jobExecutionContext) throws GFacProviderException {
         List<String> parameters = new ArrayList<String>();
         MessageContext inMessageContext = jobExecutionContext.getInMessageContext();
-        InputParameterType[] inputParamDefinitionArray = jobExecutionContext.getApplicationContext().
-                getServiceDescription().getType().getInputParametersArray();
-        for (InputParameterType inputParam : inputParamDefinitionArray) {
-            String parameterName = inputParam.getParameterName();
-            ActualParameter parameter = (ActualParameter)inMessageContext.getParameter(parameterName);
-            if(parameter == null){
-                throw new GFacProviderException("Cannot find required input parameter " + parameterName + ".");
-            }
-
-            parameters.add(MappingFactory.toString(parameter));
+        Map<String, Object> inputs = inMessageContext.getParameters();
+        for (String inputParam : inputs.keySet()) {
+            parameters.add(inputParam);
         }
-
         return parameters;
     }
 

http://git-wip-us.apache.org/repos/asf/airavata/blob/8abe8dca/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/GFacUtils.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/GFacUtils.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/GFacUtils.java
index eef44a4..ce74e4e 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/GFacUtils.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/GFacUtils.java
@@ -819,6 +819,14 @@ public class GFacUtils {
 		return stringObjectHashMap;
 	}
 
+    public static Map<String, Object> getInMessageContext(List<DataObjectType> experimentData) throws GFacException {
+        Map<String, Object> map = new HashMap<String, Object>();
+        for (DataObjectType objectType : experimentData) {
+            map.put(objectType.getKey(), objectType);
+        }
+        return map;
+    }
+
 	public static Map<String, Object> getOutMessageContext(
 			List<DataObjectType> experimentData, Parameter[] parameters)
 			throws GFacException {
@@ -854,6 +862,14 @@ public class GFacUtils {
 		return stringObjectHashMap;
 	}
 
+    public static Map<String, Object> getOutMessageContext(List<DataObjectType> experimentData) throws GFacException {
+        Map<String, Object> map = new HashMap<String, Object>();
+        for (DataObjectType objectType : experimentData) {
+            map.put(objectType.getKey(), objectType);
+        }
+        return map;
+    }
+
 	public static GfacExperimentState getZKExperimentState(ZooKeeper zk,
 			JobExecutionContext jobExecutionContext)
 			throws ApplicationSettingsException, KeeperException,


[16/50] [abbrv] airavata git commit: adding BES provider changes

Posted by ch...@apache.org.
adding BES provider changes


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/3f953e02
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/3f953e02
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/3f953e02

Branch: refs/heads/master
Commit: 3f953e026ab6a5341fe762f6a98fc6807d67ca29
Parents: eb626fa
Author: chathuriw <ka...@gmail.com>
Authored: Fri Oct 31 14:40:50 2014 -0400
Committer: Chathuri Wimalasena <ka...@gmail.com>
Committed: Wed Nov 5 11:23:05 2014 -0500

----------------------------------------------------------------------
 .../gfac/bes/handlers/AbstractSMSHandler.java   |  74 ++--
 .../gfac/bes/provider/impl/BESProvider.java     | 378 +++++++++----------
 .../bes/security/UNICORESecurityContext.java    |   4 +-
 .../gfac/bes/utils/ApplicationProcessor.java    | 212 ++++-------
 .../airavata/gfac/core/utils/GFacUtils.java     |  23 +-
 .../apache/airavata/gfac/ec2/EC2Provider.java   |  15 +-
 6 files changed, 306 insertions(+), 400 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/3f953e02/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/bes/handlers/AbstractSMSHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/bes/handlers/AbstractSMSHandler.java b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/bes/handlers/AbstractSMSHandler.java
index 8f6fcf4..71ca0db 100644
--- a/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/bes/handlers/AbstractSMSHandler.java
+++ b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/bes/handlers/AbstractSMSHandler.java
@@ -2,6 +2,7 @@ package org.apache.airavata.gfac.bes.handlers;
 
 import java.util.Properties;
 
+import org.airavata.appcatalog.cpi.AppCatalogException;
 import org.apache.airavata.gfac.GFacException;
 import org.apache.airavata.gfac.bes.security.UNICORESecurityContext;
 import org.apache.airavata.gfac.bes.security.X509SecurityContext;
@@ -13,6 +14,7 @@ import org.apache.airavata.gfac.core.context.JobExecutionContext;
 import org.apache.airavata.gfac.core.handler.GFacHandler;
 import org.apache.airavata.gfac.core.handler.GFacHandlerException;
 import org.apache.airavata.gfac.core.utils.GFacUtils;
+import org.apache.airavata.model.appcatalog.computeresource.*;
 import org.apache.airavata.model.workspace.experiment.CorrectiveAction;
 import org.apache.airavata.model.workspace.experiment.ErrorCategory;
 import org.apache.airavata.schemas.gfac.JobDirectoryModeDocument.JobDirectoryMode;
@@ -43,42 +45,42 @@ public abstract class AbstractSMSHandler implements BESConstants, GFacHandler{
 	@Override
 	public void invoke(JobExecutionContext jobExecutionContext)
 			throws GFacHandlerException {
-		
-		// if not SMS then not to pass further
-//		if(!isSMSEnabled(jobExecutionContext)) return;
-		
-		initSecurityProperties(jobExecutionContext);
-		
+		try {
+            initSecurityProperties(jobExecutionContext);
+            JobSubmissionInterface preferredJobSubmissionInterface = jobExecutionContext.getPreferredJobSubmissionInterface();
+            JobSubmissionProtocol protocol = preferredJobSubmissionInterface.getJobSubmissionProtocol();
+            String interfaceId = preferredJobSubmissionInterface.getJobSubmissionInterfaceId();
+            String factoryUrl = null;
+            if (protocol.equals(JobSubmissionProtocol.UNICORE)) {
+                    UnicoreJobSubmission unicoreJobSubmission = GFacUtils.getUnicoreJobSubmission(interfaceId);
+                    factoryUrl = unicoreJobSubmission.getUnicoreEndPointURL();
+            }
+            storageClient = null;
 
-		
-		UnicoreHostType host = (UnicoreHostType) jobExecutionContext.getApplicationContext().getHostDescription()
-                .getType();
-        String factoryUrl = host.getUnicoreBESEndPointArray()[0];
-        
-        storageClient = null;
-        
-        if(!isSMSInstanceExisting(jobExecutionContext)) {
-            EndpointReferenceType eprt = EndpointReferenceType.Factory.newInstance();
-            eprt.addNewAddress().setStringValue(factoryUrl);
-            StorageCreator storageCreator = new StorageCreator(secProperties, factoryUrl, 5, null);
-            try {
-                storageClient = storageCreator.createStorage();
-            } catch (Exception e2) {
-                log.error("Cannot create storage..");
-                throw new GFacHandlerException("Cannot create storage..", e2);
+            if (!isSMSInstanceExisting(jobExecutionContext)) {
+                EndpointReferenceType eprt = EndpointReferenceType.Factory.newInstance();
+                eprt.addNewAddress().setStringValue(factoryUrl);
+                StorageCreator storageCreator = new StorageCreator(secProperties, factoryUrl, 5, null);
+                try {
+                    storageClient = storageCreator.createStorage();
+                } catch (Exception e2) {
+                    log.error("Cannot create storage..");
+                    throw new GFacHandlerException("Cannot create storage..", e2);
+                }
+                jobExecutionContext.setProperty(PROP_SMS_EPR, storageClient.getEPR());
+            } else {
+                EndpointReferenceType eprt = (EndpointReferenceType) jobExecutionContext.getProperty(PROP_SMS_EPR);
+                try {
+                    storageClient = new StorageClient(eprt, secProperties);
+                } catch (Exception e) {
+                    throw new GFacHandlerException("Cannot create storage..", e);
+                }
             }
-            jobExecutionContext.setProperty(PROP_SMS_EPR, storageClient.getEPR());
-        }
-        else {
-        	EndpointReferenceType eprt = (EndpointReferenceType)jobExecutionContext.getProperty(PROP_SMS_EPR);
-        		try {
-					storageClient = new StorageClient(eprt, secProperties);
-				} catch (Exception e) {
-					throw new GFacHandlerException("Cannot create storage..", e);
-				}
+            dataTransferrer = new DataTransferrer(jobExecutionContext, storageClient);
+        } catch (AppCatalogException e) {
+            throw new GFacHandlerException("Error occurred while retrieving unicore job submission interface..", e);
         }
-        dataTransferrer = new DataTransferrer(jobExecutionContext, storageClient);
-	}
+    }
 	
 	protected void initSecurityProperties(JobExecutionContext jobExecutionContext) throws GFacHandlerException{
 		log.debug("Initializing SMSInHandler security properties ..");
@@ -136,9 +138,9 @@ public abstract class AbstractSMSHandler implements BESConstants, GFacHandler{
 	 * of the job execution context.
 	 * */
 	protected boolean isSMSEnabled(JobExecutionContext jobExecutionContext){
-		if(((UnicoreHostType)jobExecutionContext.getApplicationContext().getHostDescription().getType()).getJobDirectoryMode() == JobDirectoryMode.SMS_BYTE_IO) {
-			return true;
-		}
+//		if(((UnicoreHostType)jobExecutionContext.getApplicationContext().getHostDescription().getType()).getJobDirectoryMode() == JobDirectoryMode.SMS_BYTE_IO) {
+//			return true;
+//		}
 		return false;
 	}
 	

http://git-wip-us.apache.org/repos/asf/airavata/blob/3f953e02/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/bes/provider/impl/BESProvider.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/bes/provider/impl/BESProvider.java b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/bes/provider/impl/BESProvider.java
index 7ed038a..398f05c 100644
--- a/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/bes/provider/impl/BESProvider.java
+++ b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/bes/provider/impl/BESProvider.java
@@ -23,6 +23,7 @@ package org.apache.airavata.gfac.bes.provider.impl;
 import java.util.Calendar;
 import java.util.Map;
 
+import org.airavata.appcatalog.cpi.AppCatalogException;
 import org.apache.airavata.common.exception.ApplicationSettingsException;
 import org.apache.airavata.gfac.GFacException;
 import org.apache.airavata.gfac.bes.security.UNICORESecurityContext;
@@ -40,6 +41,9 @@ import org.apache.airavata.gfac.core.provider.AbstractProvider;
 import org.apache.airavata.gfac.core.provider.GFacProvider;
 import org.apache.airavata.gfac.core.provider.GFacProviderException;
 import org.apache.airavata.gfac.core.utils.GFacUtils;
+import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionInterface;
+import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol;
+import org.apache.airavata.model.appcatalog.computeresource.UnicoreJobSubmission;
 import org.apache.airavata.model.workspace.experiment.JobDetails;
 import org.apache.airavata.model.workspace.experiment.JobState;
 import org.apache.airavata.schemas.gfac.UnicoreHostType;
@@ -101,209 +105,165 @@ public class BESProvider extends AbstractProvider implements GFacProvider,
 
 	public void execute(JobExecutionContext jobExecutionContext)
 			throws GFacProviderException, GFacException {
-		UnicoreHostType host = (UnicoreHostType) jobExecutionContext
-				.getApplicationContext().getHostDescription().getType();
-
-		String factoryUrl = host.getUnicoreBESEndPointArray()[0];
-
-		EndpointReferenceType eprt = EndpointReferenceType.Factory
-				.newInstance();
-		eprt.addNewAddress().setStringValue(factoryUrl);
-
-		// WSUtilities.addServerIdentity(eprt, serverDN);
-
-		String userDN = getUserName(jobExecutionContext);
-
-		// TODO: to be removed
-		if (userDN == null || userDN.equalsIgnoreCase("admin")) {
-			userDN = "CN=zdv575, O=Ultrascan Gateway, C=DE";
-		}
-
-		StorageClient sc = null;
-
-		try {
-
-			CreateActivityDocument cad = CreateActivityDocument.Factory
-					.newInstance();
-			JobDefinitionDocument jobDefDoc = JobDefinitionDocument.Factory
-					.newInstance();
-
-//			String xlogin = getCNFromUserDN(userDN);
-
-			// create storage
-			StorageCreator storageCreator = new StorageCreator(secProperties,
-					factoryUrl, 5, null);
-
-			try {
-				sc = storageCreator.createStorage();
-			} catch (Exception e2) {
-				log.error("Cannot create storage..");
-				throw new GFacProviderException("Cannot create storage..", e2);
-			}
-
-			JobDefinitionType jobDefinition = jobDefDoc.addNewJobDefinition();
-			try {
-				jobDefinition = JSDLGenerator.buildJSDLInstance(
-						jobExecutionContext, sc.getUrl()).getJobDefinition();
-				cad.addNewCreateActivity().addNewActivityDocument()
-						.setJobDefinition(jobDefinition);
-				log.info("JSDL" + jobDefDoc.toString());
-			} catch (Exception e1) {
-				throw new GFacProviderException(
-						"Cannot generate JSDL instance from the JobExecutionContext.",
-						e1);
-			}
-
-			// upload files if any
-			DataTransferrer dt = new DataTransferrer(jobExecutionContext, sc);
-			dt.uploadLocalFiles();
-
-			FactoryClient factory = null;
-			JobDetails jobDetails = new JobDetails();
-
-			try {
-				factory = new FactoryClient(eprt, secProperties);
-			} catch (Exception e) {
-				throw new GFacProviderException(e.getLocalizedMessage(), e);
-			}
-			CreateActivityResponseDocument response = null;
-			try {
-				log.info(String.format("Activity Submitting to %s ... \n",
-						factoryUrl));
-				jobExecutionContext.getNotifier().publish(new StartExecutionEvent());
-				response = factory.createActivity(cad);
-				log.info(String.format("Activity Submitted to %s \n", factoryUrl));
-			} catch (Exception e) {
-				throw new GFacProviderException("Cannot create activity.", e);
-			}
-			EndpointReferenceType activityEpr = response.getCreateActivityResponse().getActivityIdentifier();
-
-			log.info("Activity : " + activityEpr.getAddress().getStringValue()	+ " Submitted.");
-
-			// factory.waitWhileActivityIsDone(activityEpr, 1000);
-			jobId = WSUtilities.extractResourceID(activityEpr);
-			if (jobId == null) {
-				jobId = new Long(Calendar.getInstance().getTimeInMillis())
-						.toString();
-			}
-			log.info("JobID: " + jobId);
-			jobDetails.setJobID(activityEpr.toString());
-			jobDetails.setJobDescription(activityEpr.toString());
-
-			jobExecutionContext.setJobDetails(jobDetails);
-			try {
-			log.info(formatStatusMessage(activityEpr.getAddress()
-					.getStringValue(), factory.getActivityStatus(activityEpr)
-					.toString()));
-
-			jobExecutionContext.getNotifier().publish(new UnicoreJobIDEvent(jobId));
-			GFacUtils.saveJobStatus(jobExecutionContext, details,JobState.SUBMITTED);
-
-			factory.getActivityStatus(activityEpr);
-			log.info(formatStatusMessage(activityEpr.getAddress()
-					.getStringValue(), factory.getActivityStatus(activityEpr)
-					.toString()));
-
-			// TODO publish the status messages to the message bus
-			while ((factory.getActivityStatus(activityEpr) != ActivityStateEnumeration.FINISHED)
-					&& (factory.getActivityStatus(activityEpr) != ActivityStateEnumeration.FAILED)
-					&& (factory.getActivityStatus(activityEpr) != ActivityStateEnumeration.CANCELLED)) {
-
-				ActivityStatusType activityStatus = null;
-				try {
-					activityStatus = getStatus(factory, activityEpr);
-					JobState applicationJobStatus = getApplicationJobStatus(activityStatus);
-					String jobStatusMessage = "Status of job " + jobId + "is "
-							+ applicationJobStatus;
-					GFacUtils.updateJobStatus(jobExecutionContext, jobDetails,
-							applicationJobStatus);
-
-					jobExecutionContext.getNotifier().publish(
-							new StatusChangeEvent(jobStatusMessage));
-
-					// GFacUtils.updateApplicationJobStatus(jobExecutionContext,jobId,
-					// applicationJobStatus);
-				} catch (UnknownActivityIdentifierFault e) {
-					throw new GFacProviderException(e.getMessage(),
-							e.getCause());
-				}
-
-				try {
-					Thread.sleep(5000);
-				} catch (InterruptedException e) {
-				}
-				continue;
-			}
-			}catch(Exception e) {
-				throw new GFacProviderException(e.getMessage(),
-						e.getCause());
-				
-			}
-			
-			ActivityStatusType activityStatus = null;
-			try {
-				activityStatus = getStatus(factory, activityEpr);
-				log.info(formatStatusMessage(activityEpr.getAddress().getStringValue(), activityStatus.getState().toString()));
-				ActivityClient activityClient;
-				activityClient = new ActivityClient(activityEpr,secProperties);
-				dt.setStorageClient(activityClient.getUspaceClient());
-			} catch (Exception e1) {
-				throw new GFacProviderException(e1.getMessage(),
-						e1.getCause());
-			}
-
-			
-
-			if ((activityStatus.getState() == ActivityStateEnumeration.FAILED)) {
-				String error = activityStatus.getFault().getFaultcode()
-						.getLocalPart()
-						+ "\n"
-						+ activityStatus.getFault().getFaultstring()
-						+ "\n EXITCODE: " + activityStatus.getExitCode();
-				log.info(error);
-				try {
-					Thread.sleep(5000);
-				} catch (InterruptedException e) {
-				}
-				dt.downloadStdOuts();
-			} else if (activityStatus.getState() == ActivityStateEnumeration.CANCELLED) {
-				JobState applicationJobStatus = JobState.CANCELED;
-				String jobStatusMessage = "Status of job " + jobId + "is "
-						+ applicationJobStatus;
-				jobExecutionContext.getNotifier().publish(
-						new StatusChangeEvent(jobStatusMessage));
-				GFacUtils.updateJobStatus(jobExecutionContext, jobDetails,
-						applicationJobStatus);
-				throw new GFacProviderException(
-						jobExecutionContext.getExperimentID() + "Job Canceled");
-			}
-
-			else if (activityStatus.getState() == ActivityStateEnumeration.FINISHED) {
-				try {
-					Thread.sleep(5000);
-				} catch (InterruptedException e) {
-				}
-				if (activityStatus.getExitCode() == 0) {
-					dt.downloadRemoteFiles();
-				} else {
-					dt.downloadStdOuts();
-				}
-			}
-
-		} finally {
-			// destroy sms instance
-			try {
-				if (sc != null) {
-					sc.destroy();
-				}
-			} catch (Exception e) {
-				log.warn(
-						"Cannot destroy temporary SMS instance:" + sc.getUrl(),
-						e);
-			}
-		}
-
-	}
+        StorageClient sc = null;
+        try {
+            JobSubmissionInterface preferredJobSubmissionInterface = jobExecutionContext.getPreferredJobSubmissionInterface();
+            JobSubmissionProtocol protocol = preferredJobSubmissionInterface.getJobSubmissionProtocol();
+            String interfaceId = preferredJobSubmissionInterface.getJobSubmissionInterfaceId();
+            String factoryUrl = null;
+            if (protocol.equals(JobSubmissionProtocol.UNICORE)) {
+                UnicoreJobSubmission unicoreJobSubmission = GFacUtils.getUnicoreJobSubmission(interfaceId);
+                factoryUrl = unicoreJobSubmission.getUnicoreEndPointURL();
+            }
+            EndpointReferenceType eprt = EndpointReferenceType.Factory
+                    .newInstance();
+            eprt.addNewAddress().setStringValue(factoryUrl);
+            String userDN = getUserName(jobExecutionContext);
+
+            // TODO: to be removed
+            if (userDN == null || userDN.equalsIgnoreCase("admin")) {
+                userDN = "CN=zdv575, O=Ultrascan Gateway, C=DE";
+            }
+            CreateActivityDocument cad = CreateActivityDocument.Factory
+                    .newInstance();
+            JobDefinitionDocument jobDefDoc = JobDefinitionDocument.Factory
+                    .newInstance();
+
+            // create storage
+            StorageCreator storageCreator = new StorageCreator(secProperties,
+                    factoryUrl, 5, null);
+            sc = storageCreator.createStorage();
+
+            JobDefinitionType jobDefinition = JSDLGenerator.buildJSDLInstance(
+                    jobExecutionContext, sc.getUrl()).getJobDefinition();
+            cad.addNewCreateActivity().addNewActivityDocument()
+                    .setJobDefinition(jobDefinition);
+            log.info("JSDL" + jobDefDoc.toString());
+
+            // upload files if any
+            DataTransferrer dt = new DataTransferrer(jobExecutionContext, sc);
+            dt.uploadLocalFiles();
+
+            JobDetails jobDetails = new JobDetails();
+            FactoryClient factory = new FactoryClient(eprt, secProperties);
+
+            log.info(String.format("Activity Submitting to %s ... \n",
+                    factoryUrl));
+            jobExecutionContext.getNotifier().publish(new StartExecutionEvent());
+            CreateActivityResponseDocument response = factory.createActivity(cad);
+            log.info(String.format("Activity Submitted to %s \n", factoryUrl));
+
+            EndpointReferenceType activityEpr = response.getCreateActivityResponse().getActivityIdentifier();
+
+            log.info("Activity : " + activityEpr.getAddress().getStringValue() + " Submitted.");
+
+            // factory.waitWhileActivityIsDone(activityEpr, 1000);
+            jobId = WSUtilities.extractResourceID(activityEpr);
+            if (jobId == null) {
+                jobId = new Long(Calendar.getInstance().getTimeInMillis())
+                        .toString();
+            }
+            log.info("JobID: " + jobId);
+            jobDetails.setJobID(activityEpr.toString());
+            jobDetails.setJobDescription(activityEpr.toString());
+
+            jobExecutionContext.setJobDetails(jobDetails);
+            log.info(formatStatusMessage(activityEpr.getAddress()
+                    .getStringValue(), factory.getActivityStatus(activityEpr)
+                    .toString()));
+
+            jobExecutionContext.getNotifier().publish(new UnicoreJobIDEvent(jobId));
+            GFacUtils.saveJobStatus(jobExecutionContext, details, JobState.SUBMITTED);
+
+            factory.getActivityStatus(activityEpr);
+            log.info(formatStatusMessage(activityEpr.getAddress()
+                    .getStringValue(), factory.getActivityStatus(activityEpr)
+                    .toString()));
+
+            // TODO publish the status messages to the message bus
+            while ((factory.getActivityStatus(activityEpr) != ActivityStateEnumeration.FINISHED)
+                    && (factory.getActivityStatus(activityEpr) != ActivityStateEnumeration.FAILED)
+                    && (factory.getActivityStatus(activityEpr) != ActivityStateEnumeration.CANCELLED)) {
+
+                ActivityStatusType activityStatus = getStatus(factory, activityEpr);
+                JobState applicationJobStatus = getApplicationJobStatus(activityStatus);
+                String jobStatusMessage = "Status of job " + jobId + "is "
+                        + applicationJobStatus;
+                GFacUtils.updateJobStatus(jobExecutionContext, jobDetails,
+                        applicationJobStatus);
+
+                jobExecutionContext.getNotifier().publish(
+                        new StatusChangeEvent(jobStatusMessage));
+
+                // GFacUtils.updateApplicationJobStatus(jobExecutionContext,jobId,
+                // applicationJobStatus);
+                try {
+                    Thread.sleep(5000);
+                } catch (InterruptedException e) {
+                }
+                continue;
+            }
+
+            ActivityStatusType activityStatus = null;
+            activityStatus = getStatus(factory, activityEpr);
+            log.info(formatStatusMessage(activityEpr.getAddress().getStringValue(), activityStatus.getState().toString()));
+            ActivityClient activityClient;
+            activityClient = new ActivityClient(activityEpr, secProperties);
+            dt.setStorageClient(activityClient.getUspaceClient());
+
+            if ((activityStatus.getState() == ActivityStateEnumeration.FAILED)) {
+                String error = activityStatus.getFault().getFaultcode()
+                        .getLocalPart()
+                        + "\n"
+                        + activityStatus.getFault().getFaultstring()
+                        + "\n EXITCODE: " + activityStatus.getExitCode();
+                log.info(error);
+                try {
+                    Thread.sleep(5000);
+                } catch (InterruptedException e) {
+                }
+                dt.downloadStdOuts();
+            } else if (activityStatus.getState() == ActivityStateEnumeration.CANCELLED) {
+                JobState applicationJobStatus = JobState.CANCELED;
+                String jobStatusMessage = "Status of job " + jobId + "is "
+                        + applicationJobStatus;
+                jobExecutionContext.getNotifier().publish(
+                        new StatusChangeEvent(jobStatusMessage));
+                GFacUtils.updateJobStatus(jobExecutionContext, jobDetails,
+                        applicationJobStatus);
+                throw new GFacProviderException(
+                        jobExecutionContext.getExperimentID() + "Job Canceled");
+            } else if (activityStatus.getState() == ActivityStateEnumeration.FINISHED) {
+                try {
+                    Thread.sleep(5000);
+                } catch (InterruptedException e) {
+                }
+                if (activityStatus.getExitCode() == 0) {
+                    dt.downloadRemoteFiles();
+                } else {
+                    dt.downloadStdOuts();
+                }
+            }
+        } catch (AppCatalogException e) {
+            log.error("Error while retrieving UNICORE job submission..");
+            throw new GFacProviderException("Error while retrieving UNICORE job submission..", e);
+        } catch (Exception e) {
+            log.error("Cannot create storage..");
+            throw new GFacProviderException("Cannot create storage..", e);
+        } finally {
+            // destroy sms instance
+            try {
+                if (sc != null) {
+                    sc.destroy();
+                }
+            } catch (Exception e) {
+                log.warn(
+                        "Cannot destroy temporary SMS instance:" + sc.getUrl(),
+                        e);
+            }
+        }
+
+    }
 
 	private JobState getApplicationJobStatus(ActivityStatusType activityStatus) {
 		if (activityStatus == null) {
@@ -368,10 +328,14 @@ public class BESProvider extends AbstractProvider implements GFacProvider,
 			// initSecurityProperties(jobExecutionContext);
 			EndpointReferenceType eprt = EndpointReferenceType.Factory
 					.parse(activityEpr);
-			UnicoreHostType host = (UnicoreHostType) jobExecutionContext
-					.getApplicationContext().getHostDescription().getType();
-
-			String factoryUrl = host.getUnicoreBESEndPointArray()[0];
+            JobSubmissionInterface preferredJobSubmissionInterface = jobExecutionContext.getPreferredJobSubmissionInterface();
+            JobSubmissionProtocol protocol = preferredJobSubmissionInterface.getJobSubmissionProtocol();
+            String interfaceId = preferredJobSubmissionInterface.getJobSubmissionInterfaceId();
+            String factoryUrl = null;
+            if (protocol.equals(JobSubmissionProtocol.UNICORE)) {
+                UnicoreJobSubmission unicoreJobSubmission = GFacUtils.getUnicoreJobSubmission(interfaceId);
+                factoryUrl = unicoreJobSubmission.getUnicoreEndPointURL();
+            }
 			EndpointReferenceType epr = EndpointReferenceType.Factory
 					.newInstance();
 			epr.addNewAddress().setStringValue(factoryUrl);

http://git-wip-us.apache.org/repos/asf/airavata/blob/3f953e02/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/bes/security/UNICORESecurityContext.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/bes/security/UNICORESecurityContext.java b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/bes/security/UNICORESecurityContext.java
index 7285c2c..855335f 100644
--- a/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/bes/security/UNICORESecurityContext.java
+++ b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/bes/security/UNICORESecurityContext.java
@@ -38,7 +38,7 @@ public class UNICORESecurityContext extends X509SecurityContext {
 	 * @return an instance of the default client configuration
 	 * @throws GFacException
 	 * @throws ApplicationSettingsException 
-	 * @throws GFacProviderException
+	 * @throws GFacException, ApplicationSettingsException
 	 */
 	public DefaultClientConfiguration getDefaultConfiguration() throws GFacException, ApplicationSettingsException {
 		try{
@@ -69,7 +69,7 @@ public class UNICORESecurityContext extends X509SecurityContext {
 	 * @param caKeyPath
 	 * @param caKeyPwd
 	 * @return
-	 * @throws GFacProviderException
+	 * @throws GFacException
 	 */
 	public DefaultClientConfiguration getServerSignedConfiguration(String userID, String userDN, String caCertPath, String caKeyPath, String caKeyPwd) throws GFacException {
 		try {

http://git-wip-us.apache.org/repos/asf/airavata/blob/3f953e02/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/bes/utils/ApplicationProcessor.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/bes/utils/ApplicationProcessor.java b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/bes/utils/ApplicationProcessor.java
index d624340..ee58565 100644
--- a/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/bes/utils/ApplicationProcessor.java
+++ b/modules/gfac/gfac-bes/src/main/java/org/apache/airavata/gfac/bes/utils/ApplicationProcessor.java
@@ -22,21 +22,18 @@
 package org.apache.airavata.gfac.bes.utils;
 
 import org.apache.airavata.gfac.core.context.JobExecutionContext;
+import org.apache.airavata.model.appcatalog.appdeployment.ApplicationDeploymentDescription;
+import org.apache.airavata.model.appcatalog.appdeployment.ApplicationParallelismType;
 import org.apache.airavata.schemas.gfac.ExtendedKeyValueType;
 import org.apache.airavata.schemas.gfac.HpcApplicationDeploymentType;
-import org.apache.airavata.schemas.gfac.JobTypeType;
-import org.apache.airavata.schemas.gfac.NameValuePairType;
 import org.ggf.schemas.jsdl.x2005.x11.jsdl.ApplicationType;
 import org.ggf.schemas.jsdl.x2005.x11.jsdl.JobDefinitionType;
-import org.ggf.schemas.jsdl.x2005.x11.jsdlPosix.EnvironmentType;
 import org.ggf.schemas.jsdl.x2005.x11.jsdlPosix.FileNameType;
 import org.ggf.schemas.jsdl.x2005.x11.jsdlPosix.UserNameType;
 import org.ogf.schemas.jsdl.x2007.x02.jsdlSpmd.NumberOfProcessesType;
 import org.ogf.schemas.jsdl.x2007.x02.jsdlSpmd.ProcessesPerHostType;
 import org.ogf.schemas.jsdl.x2007.x02.jsdlSpmd.ThreadsPerProcessType;
 
-import java.io.File;
-
 
 public class ApplicationProcessor {
 	
@@ -47,40 +44,50 @@ public class ApplicationProcessor {
 			userName = "CN=zdv575, O=Ultrascan Gateway, C=DE";
 		}
 		
-		HpcApplicationDeploymentType appDepType = (HpcApplicationDeploymentType) context
-				.getApplicationContext().getApplicationDeploymentDescription()
-				.getType();
-		
-		createGenericApplication(value, appDepType);
-		
-		if (appDepType.getApplicationEnvironmentArray().length > 0) {
-			createApplicationEnvironment(value,
-					appDepType.getApplicationEnvironmentArray(), appDepType);
-		}
+		ApplicationDeploymentDescription appDep= context.getApplicationContext().getApplicationDeploymentDescription();
+        String appname = context.getApplicationContext().getApplicationInterfaceDescription().getApplicationName();
+        ApplicationParallelismType parallelism = appDep.getParallelism();
 
-		
-		if (appDepType.getExecutableLocation() != null) {
+        ApplicationType appType = JSDLUtils.getOrCreateApplication(value);
+        appType.setApplicationName(appname);
+        JSDLUtils.getOrCreateJobIdentification(value).setJobName(appname);
+
+//		if (appDep.getSetEnvironment().size() > 0) {
+//            createApplicationEnvironment(value, appDep.getSetEnvironment(), parallelism);
+//		}
+//
+        String stdout = context.getStandardOutput();
+        String stderr = context.getStandardError();
+        if (appDep.getExecutablePath() != null) {
 			FileNameType fNameType = FileNameType.Factory.newInstance();
-			fNameType.setStringValue(appDepType.getExecutableLocation());
-			if(isParallelJob(appDepType)) {
+			fNameType.setStringValue(appDep.getExecutablePath());
+			if(parallelism.equals(ApplicationParallelismType.MPI) || parallelism.equals(ApplicationParallelismType.OPENMP_MPI)) {
 				JSDLUtils.getOrCreateSPMDApplication(value).setExecutable(fNameType);
-				JSDLUtils.getSPMDApplication(value).setSPMDVariation(getSPMDVariation(appDepType));
-				
-				if(getValueFromMap(appDepType, JSDLUtils.NUMBEROFPROCESSES)!=null){
+                if (parallelism.equals(ApplicationParallelismType.OPENMP_MPI)){
+                    JSDLUtils.getSPMDApplication(value).setSPMDVariation(SPMDVariations.OpenMPI.value());
+                }else if (parallelism.equals(ApplicationParallelismType.MPI)){
+                    JSDLUtils.getSPMDApplication(value).setSPMDVariation(SPMDVariations.MPI.value());
+                }
+
+                int totalCPUCount = context.getTaskData().getTaskScheduling().getTotalCPUCount();
+                if(totalCPUCount > 0){
 					NumberOfProcessesType num = NumberOfProcessesType.Factory.newInstance();
-					num.setStringValue(getValueFromMap(appDepType, JSDLUtils.NUMBEROFPROCESSES));
+                    num.setStringValue(String.valueOf(totalCPUCount));
 					JSDLUtils.getSPMDApplication(value).setNumberOfProcesses(num);
 				}
-							
-				if(getValueFromMap(appDepType, JSDLUtils.PROCESSESPERHOST)!=null){
-					ProcessesPerHostType pph = ProcessesPerHostType.Factory.newInstance();
-					pph.setStringValue(getValueFromMap(appDepType, JSDLUtils.PROCESSESPERHOST));
-					JSDLUtils.getSPMDApplication(value).setProcessesPerHost(pph);
-				}
-				
-				if(getValueFromMap(appDepType, JSDLUtils.THREADSPERHOST)!=null){
+
+                int totalNodeCount = context.getTaskData().getTaskScheduling().getNodeCount();
+                if(totalNodeCount > 0){
+                    int ppn = totalCPUCount / totalNodeCount;
+                    ProcessesPerHostType pph = ProcessesPerHostType.Factory.newInstance();
+                    pph.setStringValue(String.valueOf(ppn));
+                    JSDLUtils.getSPMDApplication(value).setProcessesPerHost(pph);
+                }
+
+                int totalThreadCount = context.getTaskData().getTaskScheduling().getNumberOfThreads();
+                if(totalThreadCount > 0){
 					ThreadsPerProcessType tpp = ThreadsPerProcessType.Factory.newInstance();
-					tpp.setStringValue(getValueFromMap(appDepType, JSDLUtils.THREADSPERHOST));
+					tpp.setStringValue(String.valueOf(totalThreadCount));
 					JSDLUtils.getSPMDApplication(value).setThreadsPerProcess(tpp);
 					
 				}
@@ -90,6 +97,18 @@ public class ApplicationProcessor {
 					userNameType.setStringValue(userName);
 					JSDLUtils.getSPMDApplication(value).setUserName(userNameType);
 				}
+                if (stdout != null){
+                    FileNameType fName = FileNameType.Factory.newInstance();
+                    fName.setStringValue(stdout);
+                    JSDLUtils.getOrCreateSPMDApplication(value).setOutput(fName);
+                }
+                if (stderr != null){
+                    FileNameType fName = FileNameType.Factory.newInstance();
+                    fName.setStringValue(stderr);
+                    JSDLUtils.getOrCreateSPMDApplication(value).setError(fName);
+                }
+
+
 			}
 			else {
 				JSDLUtils.getOrCreatePOSIXApplication(value).setExecutable(fNameType);
@@ -98,17 +117,18 @@ public class ApplicationProcessor {
 					userNameType.setStringValue(userName);
 					JSDLUtils.getOrCreatePOSIXApplication(value).setUserName(userNameType);
 				}
+                if (stdout != null){
+                    FileNameType fName = FileNameType.Factory.newInstance();
+                    fName.setStringValue(stdout);
+                    JSDLUtils.getOrCreatePOSIXApplication(value).setOutput(fName);
+                }
+                if (stderr != null){
+                    FileNameType fName = FileNameType.Factory.newInstance();
+                    fName.setStringValue(stderr);
+                    JSDLUtils.getOrCreatePOSIXApplication(value).setError(fName);
+                }
 			}
 		}
-		
-
-		String stdout = (appDepType.getStandardOutput() != null) ? new File(appDepType.getStandardOutput()).getName(): "stdout"; 
-		ApplicationProcessor.setApplicationStdOut(value, appDepType, stdout);
-		
-	
-		String stderr = (appDepType.getStandardError() != null) ? new File(appDepType.getStandardError()).getName() : "stderr"; 
-		ApplicationProcessor.setApplicationStdErr(value, appDepType, stderr);
-	
 	}
 	
 	public static String getUserNameFromContext(JobExecutionContext jobContext) {
@@ -117,79 +137,7 @@ public class ApplicationProcessor {
 		//FIXME: Discuss to get user and change this
 		return "admin";
 	}
-	public static boolean isParallelJob(HpcApplicationDeploymentType appDepType) {
-		
-		boolean isParallel = false;
-		
-		if (appDepType.getJobType() != null) {
-			// TODO set data output directory
-			int status = appDepType.getJobType().intValue();
-
-			switch (status) {
-			// TODO: this check should be done outside this class
-			case JobTypeType.INT_MPI:
-			case JobTypeType.INT_OPEN_MP:
-				isParallel = true;
-				break;
-				
-			case JobTypeType.INT_SERIAL:
-			case JobTypeType.INT_SINGLE:
-				isParallel = false;
-				break;
 
-			default:
-				isParallel = false;
-				break;
-			}
-		}
-		return isParallel;
-	}
-
-	
-	public static void createApplicationEnvironment(JobDefinitionType value, NameValuePairType[] nameValuePairs, HpcApplicationDeploymentType appDepType) {
-		
-		if(isParallelJob(appDepType)) {
-			for (NameValuePairType nv : nameValuePairs) {
-				EnvironmentType envType = JSDLUtils.getOrCreateSPMDApplication(value).addNewEnvironment();
-				envType.setName(nv.getName());
-				envType.setStringValue(nv.getValue());
-			}
-		}
-		else {
-			for (NameValuePairType nv : nameValuePairs) {
-				EnvironmentType envType = JSDLUtils.getOrCreatePOSIXApplication(value).addNewEnvironment();
-				envType.setName(nv.getName());
-				envType.setStringValue(nv.getValue());
-			}
-		}
-
-	}
-	
-	
-	public static String getSPMDVariation (HpcApplicationDeploymentType appDepType) {
-		
-		String variation = null;
-		
-		if (appDepType.getJobType() != null) {
-			// TODO set data output directory
-			int status = appDepType.getJobType().intValue();
-
-			switch (status) {
-			// TODO: this check should be done outside this class
-			case JobTypeType.INT_MPI:
-				variation = SPMDVariations.MPI.value();				
-				break;
-				
-			case JobTypeType.INT_OPEN_MP:
-				variation = SPMDVariations.OpenMPI.value();
-				break;
-				
-			}
-		}
-		return variation;
-	}
-	
-	
 	public static void addApplicationArgument(JobDefinitionType value, HpcApplicationDeploymentType appDepType, String stringPrm) {
 		if(isParallelJob(appDepType)) 		
 			JSDLUtils.getOrCreateSPMDApplication(value)
@@ -200,24 +148,6 @@ public class ApplicationProcessor {
 
 	}
 	
-	public static void setApplicationStdErr(JobDefinitionType value, HpcApplicationDeploymentType appDepType, String stderr) {
-		FileNameType fName = FileNameType.Factory.newInstance();
-		fName.setStringValue(stderr);
-		if (isParallelJob(appDepType)) 
-			JSDLUtils.getOrCreateSPMDApplication(value).setError(fName);
-		else 
-			JSDLUtils.getOrCreatePOSIXApplication(value).setError(fName);
-	}
-	
-	public static void setApplicationStdOut(JobDefinitionType value, HpcApplicationDeploymentType appDepType, String stderr) {
-		FileNameType fName = FileNameType.Factory.newInstance();
-		fName.setStringValue(stderr);
-		if (isParallelJob(appDepType)) 
-			JSDLUtils.getOrCreateSPMDApplication(value).setOutput(fName);
-		else 
-			JSDLUtils.getOrCreatePOSIXApplication(value).setOutput(fName);
-	}
-	
 	public static String getApplicationStdOut(JobDefinitionType value, HpcApplicationDeploymentType appDepType) throws RuntimeException {
 		if (isParallelJob(appDepType)) return JSDLUtils.getOrCreateSPMDApplication(value).getOutput().getStringValue();
 		else return JSDLUtils.getOrCreatePOSIXApplication(value).getOutput().getStringValue();
@@ -228,18 +158,14 @@ public class ApplicationProcessor {
 		else return JSDLUtils.getOrCreatePOSIXApplication(value).getError().getStringValue();
 	}
 	
-	public static void createGenericApplication(JobDefinitionType value, HpcApplicationDeploymentType appDepType) {
-		if (appDepType.getApplicationName() != null) {
-			ApplicationType appType = JSDLUtils.getOrCreateApplication(value);
-			String appName = appDepType.getApplicationName()
-					.getStringValue();
-			appType.setApplicationName(appName);
-			JSDLUtils.getOrCreateJobIdentification(value).setJobName(appName);
-		}
-	}
-	
-	
-	public static String getValueFromMap(HpcApplicationDeploymentType appDepType, String name) {
+	public static void createGenericApplication(JobDefinitionType value, String appName) {
+        ApplicationType appType = JSDLUtils.getOrCreateApplication(value);
+        appType.setApplicationName(appName);
+        JSDLUtils.getOrCreateJobIdentification(value).setJobName(appName);
+    }
+
+
+    public static String getValueFromMap(HpcApplicationDeploymentType appDepType, String name) {
 		ExtendedKeyValueType[] extended = appDepType.getKeyValuePairsArray();
 		for(ExtendedKeyValueType e: extended) {
 			if(e.getName().equalsIgnoreCase(name)) {

http://git-wip-us.apache.org/repos/asf/airavata/blob/3f953e02/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/GFacUtils.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/GFacUtils.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/GFacUtils.java
index ff6f2c2..b38808b 100644
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/GFacUtils.java
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/utils/GFacUtils.java
@@ -39,7 +39,9 @@ import org.apache.airavata.gfac.core.context.JobExecutionContext;
 import org.apache.airavata.gfac.core.handler.GFacHandlerException;
 import org.apache.airavata.gfac.core.states.GfacExperimentState;
 import org.apache.airavata.gfac.core.states.GfacPluginState;
+import org.apache.airavata.model.appcatalog.computeresource.GlobusJobSubmission;
 import org.apache.airavata.model.appcatalog.computeresource.LOCALSubmission;
+import org.apache.airavata.model.appcatalog.computeresource.SSHJobSubmission;
 import org.apache.airavata.model.appcatalog.computeresource.UnicoreJobSubmission;
 import org.apache.airavata.model.workspace.experiment.*;
 import org.apache.airavata.model.workspace.experiment.DataType;
@@ -1258,21 +1260,34 @@ public class GFacUtils {
             AppCatalog appCatalog = AppCatalogFactory.getAppCatalog();
             return appCatalog.getComputeResource().getUNICOREJobSubmission(submissionId);
         }catch (Exception e){
-            String errorMsg = "Error while retrieving local job submission with submission id : " + submissionId;
+            String errorMsg = "Error while retrieving UNICORE job submission with submission id : " + submissionId;
             log.error(errorMsg, e);
             throw new AppCatalogException(errorMsg, e);
         }
     }
 
-    public static UnicoreJobSubmission getJobSubmission (String submissionId) throws AppCatalogException{
+    public static GlobusJobSubmission getGlobusJobSubmission (String submissionId) throws AppCatalogException{
+        return null;
+//        try {
+//            AppCatalog appCatalog = AppCatalogFactory.getAppCatalog();
+//            return appCatalog.getComputeResource().getGlobus(submissionId);
+//        }catch (Exception e){
+//            String errorMsg = "Error while retrieving local job submission with submission id : " + submissionId;
+//            log.error(errorMsg, e);
+//            throw new AppCatalogException(errorMsg, e);
+//        }
+    }
+
+    public static SSHJobSubmission getSSHJobSubmission (String submissionId) throws AppCatalogException{
         try {
             AppCatalog appCatalog = AppCatalogFactory.getAppCatalog();
-            return appCatalog.getComputeResource().getUNICOREJobSubmission(submissionId);
+            return appCatalog.getComputeResource().getSSHJobSubmission(submissionId);
         }catch (Exception e){
-            String errorMsg = "Error while retrieving local job submission with submission id : " + submissionId;
+            String errorMsg = "Error while retrieving SSH job submission with submission id : " + submissionId;
             log.error(errorMsg, e);
             throw new AppCatalogException(errorMsg, e);
         }
     }
 
+
 }

http://git-wip-us.apache.org/repos/asf/airavata/blob/3f953e02/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/EC2Provider.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/EC2Provider.java b/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/EC2Provider.java
index 940fff3..5c5af53 100644
--- a/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/EC2Provider.java
+++ b/modules/gfac/gfac-ec2/src/main/java/org/apache/airavata/gfac/ec2/EC2Provider.java
@@ -38,6 +38,7 @@ import org.apache.airavata.gfac.core.provider.utils.ProviderUtils;
 import org.apache.airavata.gfac.core.utils.GFacUtils;
 import org.apache.airavata.gfac.ec2.util.AmazonEC2Util;
 import org.apache.airavata.gfac.ec2.util.EC2ProviderUtil;
+import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
 import org.apache.airavata.model.workspace.experiment.JobState;
 import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
 import org.apache.airavata.schemas.gfac.Ec2ApplicationDeploymentType;
@@ -90,7 +91,7 @@ public class EC2Provider extends AbstractProvider {
 
     public void initialize(JobExecutionContext jobExecutionContext) throws GFacProviderException,GFacException{
         if (jobExecutionContext != null) {
-    		jobId="EC2_"+jobExecutionContext.getApplicationContext().getHostDescription().getType().getHostAddress()+"_"+Calendar.getInstance().getTimeInMillis();
+    		jobId="EC2_"+jobExecutionContext.getHostName()+"_"+Calendar.getInstance().getTimeInMillis();
             if (jobExecutionContext.getSecurityContext(AmazonSecurityContext.AMAZON_SECURITY_CONTEXT)
                     instanceof AmazonSecurityContext) {
                 this.amazonSecurityContext = (AmazonSecurityContext) jobExecutionContext.
@@ -156,10 +157,9 @@ public class EC2Provider extends AbstractProvider {
         try
         {
             String outParamName;
-            OutputParameterType[] outputParametersArray = jobExecutionContext.getApplicationContext().
-                    getServiceDescription().getType().getOutputParametersArray();
-            if(outputParametersArray != null) {
-                outParamName = outputParametersArray[0].getParameterName();
+            List<OutputDataObjectType> outputs = jobExecutionContext.getApplicationContext().getApplicationInterfaceDescription().getApplicationOutputs();
+            if(outputs != null && !outputs.isEmpty()) {
+                outParamName = outputs.get(0).getName();
             } else {
                 throw new GFacProviderException("Output parameter name is not set. Therefore, not being able " +
                         "to filter the job result from standard out ");
@@ -217,11 +217,10 @@ public class EC2Provider extends AbstractProvider {
             executionResult = executionResult.replace("\r","").replace("\n","");
             log.info("Result of the job : " + executionResult);
 
-            for(OutputParameterType outparamType : outputParametersArray){
+            for(OutputDataObjectType outparamType : outputs){
                 /* Assuming that there is just a single result. If you want to add more results, update the necessary
                    logic below */
-                String paramName = outparamType.getParameterName();
-                ActualParameter outParam = new ActualParameter();
+                String paramName = outparamType.getName();
                 outParam.getType().changeType(StringParameterType.type);
                 ((StringParameterType) outParam.getType()).setValue(executionResult);
                 jobExecutionContext.getOutMessageContext().addParameter(paramName, outParam);