You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airavata.apache.org by sh...@apache.org on 2015/06/05 20:53:38 UTC

[1/2] airavata git commit: Rename few interfaces to have generic names which self descriptive

Repository: airavata
Updated Branches:
  refs/heads/master 2f3da0104 -> bad2c1c0a


http://git-wip-us.apache.org/repos/asf/airavata/blob/bad2c1c0/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/provider/impl/SSHProvider.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/provider/impl/SSHProvider.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/provider/impl/SSHProvider.java
index f162fd5..d1b35cd 100644
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/provider/impl/SSHProvider.java
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/provider/impl/SSHProvider.java
@@ -21,6 +21,7 @@
 
 package org.apache.airavata.gfac.ssh.provider.impl;
 
+import org.apache.airavata.gfac.core.cluster.RemoteCluster;
 import org.apache.airavata.registry.cpi.AppCatalogException;
 import org.apache.airavata.common.exception.AiravataException;
 import org.apache.airavata.common.exception.ApplicationSettingsException;
@@ -29,7 +30,6 @@ import org.apache.airavata.gfac.core.Constants;
 import org.apache.airavata.gfac.core.GFacException;
 import org.apache.airavata.gfac.core.JobDescriptor;
 import org.apache.airavata.gfac.core.SSHApiException;
-import org.apache.airavata.gfac.core.cluster.Cluster;
 import org.apache.airavata.gfac.core.cluster.JobStatus;
 import org.apache.airavata.gfac.core.context.JobExecutionContext;
 import org.apache.airavata.gfac.core.context.MessageContext;
@@ -73,7 +73,7 @@ import java.util.*;
  */
 public class SSHProvider extends AbstractProvider {
     private static final Logger log = LoggerFactory.getLogger(SSHProvider.class);
-    private Cluster cluster;
+    private RemoteCluster remoteCluster;
     private String jobID = null;
     private String taskID = null;
     // we keep gsisshprovider to support qsub submission incase of hpc scenario with ssh
@@ -93,20 +93,20 @@ public class SSHProvider extends AbstractProvider {
             JobSubmissionProtocol preferredJobSubmissionProtocol = jobExecutionContext.getPreferredJobSubmissionProtocol();
             if (preferredJobSubmissionProtocol == JobSubmissionProtocol.SSH && resourceJobManagerType == ResourceJobManagerType.FORK) {
                 jobID = "SSH_" + jobExecutionContext.getHostName() + "_" + Calendar.getInstance().getTimeInMillis();
-                cluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getPbsCluster();
+                remoteCluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getRemoteCluster();
 
                 String remoteFile = jobExecutionContext.getWorkingDir() + File.separatorChar + Constants.EXECUTABLE_NAME;
                 details.setJobID(taskID);
                 details.setJobDescription(remoteFile);
                 jobExecutionContext.setJobDetails(details);
-                // FIXME : Why cluster is passed as null
-                JobDescriptor jobDescriptor = GFACSSHUtils.createJobDescriptor(jobExecutionContext, cluster);
+                // FIXME : Why remoteCluster is passed as null
+                JobDescriptor jobDescriptor = GFACSSHUtils.createJobDescriptor(jobExecutionContext, remoteCluster);
                 details.setJobDescription(jobDescriptor.toXML());
 
                 GFacUtils.saveJobStatus(jobExecutionContext, details, JobState.SETUP);
                 log.info(remoteFile);
                 File runscript = createShellScript(jobExecutionContext);
-                cluster.scpTo(remoteFile, runscript.getAbsolutePath());
+                remoteCluster.scpTo(remoteFile, runscript.getAbsolutePath());
             } else {
                 hpcType = true;
             }
@@ -130,7 +130,7 @@ public class SSHProvider extends AbstractProvider {
                 RawCommandInfo rawCommandInfo = new RawCommandInfo("/bin/chmod 755 " + executable + "; " + executable);
                 StandardOutReader jobIDReaderCommandOutput = new StandardOutReader();
                 log.info("Executing RawCommand : " + rawCommandInfo.getCommand());
-                CommandExecutor.executeCommand(rawCommandInfo, cluster.getSession(), jobIDReaderCommandOutput);
+                CommandExecutor.executeCommand(rawCommandInfo, remoteCluster.getSession(), jobIDReaderCommandOutput);
                 String stdOutputString = getOutputifAvailable(jobIDReaderCommandOutput, "Error submitting job to resource");
                 log.info("stdout=" + stdOutputString);
             } catch (Exception e) {
@@ -143,36 +143,36 @@ public class SSHProvider extends AbstractProvider {
                 String hostAddress = jobExecutionContext.getHostName();
                 MonitorPublisher monitorPublisher = jobExecutionContext.getMonitorPublisher();
                 try {
-                    Cluster cluster = null;
+                    RemoteCluster remoteCluster = null;
                     if (jobExecutionContext.getSecurityContext(hostAddress) == null) {
                         GFACSSHUtils.addSecurityContext(jobExecutionContext);
                     }
-                    cluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getPbsCluster();
-                    if (cluster == null) {
+                    remoteCluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getRemoteCluster();
+                    if (remoteCluster == null) {
                         throw new GFacProviderException("Security context is not set properly");
                     } else {
                         log.info("Successfully retrieved the Security Context");
                     }
                     // This installed path is a mandetory field, because this could change based on the computing resource
-                    JobDescriptor jobDescriptor = GFACSSHUtils.createJobDescriptor(jobExecutionContext, cluster);
+                    JobDescriptor jobDescriptor = GFACSSHUtils.createJobDescriptor(jobExecutionContext, remoteCluster);
                     jobDetails.setJobName(jobDescriptor.getJobName());
                     log.info(jobDescriptor.toXML());
                     jobDetails.setJobDescription(jobDescriptor.toXML());
-                    String jobID = cluster.submitBatchJob(jobDescriptor);
+                    String jobID = remoteCluster.submitBatchJob(jobDescriptor);
                     if (jobID != null && !jobID.isEmpty()) {
                         jobDetails.setJobID(jobID);
                         GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.SUBMITTED);
                                 monitorPublisher.publish(new GfacExperimentStateChangeRequest(new MonitorID(jobExecutionContext)
                                         , GfacExperimentState.JOBSUBMITTED));
                         jobExecutionContext.setJobDetails(jobDetails);
-                        if (verifyJobSubmissionByJobId(cluster, jobID)) {
+                        if (verifyJobSubmissionByJobId(remoteCluster, jobID)) {
                             monitorPublisher.publish(new GfacExperimentStateChangeRequest(new MonitorID(jobExecutionContext)
                                     , GfacExperimentState.JOBSUBMITTED));
                             GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.QUEUED);
                         }
                     } else {
                         jobExecutionContext.setJobDetails(jobDetails);
-                        String verifyJobId = verifyJobSubmission(cluster, jobDetails);
+                        String verifyJobId = verifyJobSubmission(remoteCluster, jobDetails);
                         if (verifyJobId != null && !verifyJobId.isEmpty()) {
                             // JobStatus either changed from SUBMITTED to QUEUED or directly to QUEUED
                             jobID = verifyJobId;
@@ -216,16 +216,16 @@ public class SSHProvider extends AbstractProvider {
         }
     }
 
-    private boolean verifyJobSubmissionByJobId(Cluster cluster, String jobID) throws SSHApiException {
-        JobStatus status = cluster.getJobStatus(jobID);
+    private boolean verifyJobSubmissionByJobId(RemoteCluster remoteCluster, String jobID) throws SSHApiException {
+        JobStatus status = remoteCluster.getJobStatus(jobID);
         return status != null &&  status != JobStatus.U;
     }
 
-    private String verifyJobSubmission(Cluster cluster, JobDetails jobDetails) {
+    private String verifyJobSubmission(RemoteCluster remoteCluster, JobDetails jobDetails) {
         String jobName = jobDetails.getJobName();
         String jobId = null;
         try {
-          jobId  = cluster.getJobIdByJobName(jobName, cluster.getServerInfo().getUserName());
+          jobId  = remoteCluster.getJobIdByJobName(jobName, remoteCluster.getServerInfo().getUserName());
         } catch (SSHApiException e) {
             log.error("Error while verifying JobId from JobName");
         }
@@ -243,8 +243,8 @@ public class SSHProvider extends AbstractProvider {
         if (!hpcType) {
             throw new NotImplementedException();
         } else {
-            Cluster cluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getPbsCluster();
-            if (cluster == null) {
+            RemoteCluster remoteCluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getRemoteCluster();
+            if (remoteCluster == null) {
                 throw new GFacProviderException("Security context is not set properly");
             } else {
                 log.info("Successfully retrieved the Security Context");
@@ -256,7 +256,7 @@ public class SSHProvider extends AbstractProvider {
             }
             try {
                 if (jobDetails.getJobID() != null) {
-                    if (cluster.cancelJob(jobDetails.getJobID()) != null) {
+                    if (remoteCluster.cancelJob(jobDetails.getJobID()) != null) {
                         // if this operation success without any exceptions, we can assume cancel operation succeeded.
                         GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.CANCELED);
                         return true;

http://git-wip-us.apache.org/repos/asf/airavata/blob/bad2c1c0/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/security/SSHSecurityContext.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/security/SSHSecurityContext.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/security/SSHSecurityContext.java
index a624efc..d6464e2 100644
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/security/SSHSecurityContext.java
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/security/SSHSecurityContext.java
@@ -27,7 +27,7 @@ import net.schmizz.sshj.connection.channel.direct.Session;
 import net.schmizz.sshj.userauth.keyprovider.KeyProvider;
 
 import org.apache.airavata.gfac.core.SecurityContext;
-import org.apache.airavata.gfac.core.cluster.Cluster;
+import org.apache.airavata.gfac.core.cluster.RemoteCluster;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -43,7 +43,7 @@ public class SSHSecurityContext implements SecurityContext {
 	private SSHClient sshClient;
 	private Session session;
 
-    private Cluster pbsCluster;
+    private RemoteCluster remoteCluster;
 
 	public String getUsername() {
 		return username;
@@ -108,11 +108,11 @@ public class SSHSecurityContext implements SecurityContext {
 		return sshClient;
 	}
 
-    public void setPbsCluster(Cluster pbsCluster) {
-        this.pbsCluster = pbsCluster;
+    public void setRemoteCluster(RemoteCluster remoteCluster) {
+        this.remoteCluster = remoteCluster;
     }
 
-    public Cluster getPbsCluster() {
-        return this.pbsCluster;
+    public RemoteCluster getRemoteCluster() {
+        return this.remoteCluster;
     }
 }

http://git-wip-us.apache.org/repos/asf/airavata/blob/bad2c1c0/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/util/GFACSSHUtils.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/util/GFACSSHUtils.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/util/GFACSSHUtils.java
index 2fba9a9..69c7df4 100644
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/util/GFACSSHUtils.java
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/util/GFACSSHUtils.java
@@ -20,6 +20,8 @@
 */
 package org.apache.airavata.gfac.ssh.util;
 
+import org.apache.airavata.gfac.core.cluster.RemoteCluster;
+import org.apache.airavata.gfac.gsi.ssh.impl.HPCRemoteCluster;
 import org.apache.airavata.registry.cpi.AppCatalog;
 import org.apache.airavata.registry.cpi.AppCatalogException;
 import org.apache.airavata.common.exception.ApplicationSettingsException;
@@ -30,14 +32,12 @@ import org.apache.airavata.gfac.core.GFacException;
 import org.apache.airavata.gfac.core.RequestData;
 import org.apache.airavata.gfac.core.JobDescriptor;
 import org.apache.airavata.gfac.core.JobManagerConfiguration;
-import org.apache.airavata.gfac.core.cluster.Cluster;
 import org.apache.airavata.gfac.core.cluster.ServerInfo;
 import org.apache.airavata.gfac.core.context.JobExecutionContext;
 import org.apache.airavata.gfac.core.context.MessageContext;
 import org.apache.airavata.gfac.core.handler.GFacHandlerException;
 import org.apache.airavata.gfac.core.GFacUtils;
 import org.apache.airavata.gfac.gsi.ssh.impl.GSISSHAbstractCluster;
-import org.apache.airavata.gfac.gsi.ssh.impl.PBSCluster;
 import org.apache.airavata.gfac.gsi.ssh.impl.authentication.DefaultPasswordAuthenticationInfo;
 import org.apache.airavata.gfac.gsi.ssh.util.CommonUtils;
 import org.apache.airavata.gfac.ssh.context.SSHAuthWrapper;
@@ -66,7 +66,7 @@ import java.util.*;
 public class GFACSSHUtils {
     private final static Logger logger = LoggerFactory.getLogger(GFACSSHUtils.class);
 
-    public static Map<String, List<Cluster>> clusters = new HashMap<String, List<Cluster>>();
+    public static Map<String, List<RemoteCluster>> clusters = new HashMap<String, List<RemoteCluster>>();
 
     public static final String PBS_JOB_MANAGER = "pbs";
     public static final String SLURM_JOB_MANAGER = "slurm";
@@ -99,7 +99,7 @@ public class GFACSSHUtils {
 
                     ServerInfo serverInfo = new ServerInfo(null, jobExecutionContext.getHostName());
 
-                    Cluster pbsCluster = null;
+                    RemoteCluster pbsRemoteCluster = null;
                     try {
                         AuthenticationInfo tokenizedSSHAuthInfo = new TokenizedSSHAuthInfo(requestData);
                         String installedParentPath = jobExecutionContext.getResourceJobManager().getJobManagerBinPath();
@@ -132,19 +132,19 @@ public class GFACSSHUtils {
                             } else if (clusters.containsKey(key)) {
                                 int i = new Random().nextInt(Integer.MAX_VALUE) % maxClusterCount;
                                 if (clusters.get(key).get(i).getSession().isConnected()) {
-                                    pbsCluster = clusters.get(key).get(i);
+                                    pbsRemoteCluster = clusters.get(key).get(i);
                                 } else {
                                     clusters.get(key).remove(i);
                                     recreate = true;
                                 }
                                 if (!recreate) {
                                     try {
-                                        pbsCluster.listDirectory("~/"); // its hard to trust isConnected method, so we try to connect if it works we are good,else we recreate
+                                        pbsRemoteCluster.listDirectory("~/"); // its hard to trust isConnected method, so we try to connect if it works we are good,else we recreate
                                     } catch (Exception e) {
                                         clusters.get(key).remove(i);
                                         logger.info("Connection found the connection map is expired, so we create from the scratch");
                                         maxClusterCount++;
-                                        recreate = true; // we make the pbsCluster to create again if there is any exception druing connection
+                                        recreate = true; // we make the pbsRemoteCluster to create again if there is any exception druing connection
                                     }
                                 }
                                 logger.info("Re-using the same connection used with the connection string:" + key);
@@ -169,21 +169,21 @@ public class GFACSSHUtils {
                                      }
                                  }
 
-                                pbsCluster = new PBSCluster(serverInfo, tokenizedSSHAuthInfo,jConfig);
-                                List<Cluster> pbsClusters = null;
+                                pbsRemoteCluster = new HPCRemoteCluster(serverInfo, tokenizedSSHAuthInfo,jConfig);
+                                List<RemoteCluster> pbsRemoteClusters = null;
                                 if (!(clusters.containsKey(key))) {
-                                    pbsClusters = new ArrayList<Cluster>();
+                                    pbsRemoteClusters = new ArrayList<RemoteCluster>();
                                 } else {
-                                    pbsClusters = clusters.get(key);
+                                    pbsRemoteClusters = clusters.get(key);
                                 }
-                                pbsClusters.add(pbsCluster);
-                                clusters.put(key, pbsClusters);
+                                pbsRemoteClusters.add(pbsRemoteCluster);
+                                clusters.put(key, pbsRemoteClusters);
                             }
                         }
                     } catch (Exception e) {
                         throw new GFacException("Error occurred...", e);
                     }
-                    sshSecurityContext.setPbsCluster(pbsCluster);
+                    sshSecurityContext.setRemoteCluster(pbsRemoteCluster);
                     jobExecutionContext.addSecurityContext(jobExecutionContext.getHostName(), sshSecurityContext);
                 }
             } catch (AppCatalogException e) {
@@ -214,7 +214,7 @@ public class GFACSSHUtils {
 				 logger.error("Not able to get SSHJobSubmission from registry");
 			}
 
-            Cluster pbsCluster = null;
+            RemoteCluster pbsRemoteCluster = null;
             String key=sshAuth.getKey();
             boolean recreate = false;
             synchronized (clusters) {
@@ -223,19 +223,19 @@ public class GFACSSHUtils {
                 } else if (clusters.containsKey(key)) {
                     int i = new Random().nextInt(Integer.MAX_VALUE) % maxClusterCount;
                     if (clusters.get(key).get(i).getSession().isConnected()) {
-                        pbsCluster = clusters.get(key).get(i);
+                        pbsRemoteCluster = clusters.get(key).get(i);
                     } else {
                         clusters.get(key).remove(i);
                         recreate = true;
                     }
                     if (!recreate) {
                         try {
-                            pbsCluster.listDirectory("~/"); // its hard to trust isConnected method, so we try to connect if it works we are good,else we recreate
+                            pbsRemoteCluster.listDirectory("~/"); // its hard to trust isConnected method, so we try to connect if it works we are good,else we recreate
                         } catch (Exception e) {
                             clusters.get(key).remove(i);
                             logger.info("Connection found the connection map is expired, so we create from the scratch");
                             maxClusterCount++;
-                            recreate = true; // we make the pbsCluster to create again if there is any exception druing connection
+                            recreate = true; // we make the pbsRemoteCluster to create again if there is any exception druing connection
                         }
                     }
                     logger.info("Re-using the same connection used with the connection string:" + key);
@@ -268,19 +268,19 @@ public class GFACSSHUtils {
 							}
 						}
 					}
-                    pbsCluster = new PBSCluster(sshAuth.getServerInfo(), sshAuth.getAuthenticationInfo(),jConfig);
+                    pbsRemoteCluster = new HPCRemoteCluster(sshAuth.getServerInfo(), sshAuth.getAuthenticationInfo(),jConfig);
                     key = sshAuth.getKey();
-                    List<Cluster> pbsClusters = null;
+                    List<RemoteCluster> pbsRemoteClusters = null;
                     if (!(clusters.containsKey(key))) {
-                        pbsClusters = new ArrayList<Cluster>();
+                        pbsRemoteClusters = new ArrayList<RemoteCluster>();
                     } else {
-                        pbsClusters = clusters.get(key);
+                        pbsRemoteClusters = clusters.get(key);
                     }
-                    pbsClusters.add(pbsCluster);
-                    clusters.put(key, pbsClusters);
+                    pbsRemoteClusters.add(pbsRemoteCluster);
+                    clusters.put(key, pbsRemoteClusters);
                 }
             }
-            sshSecurityContext.setPbsCluster(pbsCluster);
+            sshSecurityContext.setRemoteCluster(pbsRemoteCluster);
             jobExecutionContext.addSecurityContext(key, sshSecurityContext);
         } catch (Exception e) {
             logger.error(e.getMessage(), e);
@@ -289,7 +289,7 @@ public class GFACSSHUtils {
     }
 
 
-    public static JobDescriptor createJobDescriptor(JobExecutionContext jobExecutionContext, Cluster cluster) throws AppCatalogException, ApplicationSettingsException {
+    public static JobDescriptor createJobDescriptor(JobExecutionContext jobExecutionContext, RemoteCluster remoteCluster) throws AppCatalogException, ApplicationSettingsException {
         JobDescriptor jobDescriptor = new JobDescriptor();
         TaskDetails taskData = jobExecutionContext.getTaskData();
 
@@ -415,10 +415,10 @@ public class GFACSSHUtils {
         }
 
         jobDescriptor.setInputValues(inputValues);
-        jobDescriptor.setUserName(((GSISSHAbstractCluster) cluster).getServerInfo().getUserName());
+        jobDescriptor.setUserName(((GSISSHAbstractCluster) remoteCluster).getServerInfo().getUserName());
         jobDescriptor.setShellName("/bin/bash");
         jobDescriptor.setAllEnvExport(true);
-        jobDescriptor.setOwner(((PBSCluster) cluster).getServerInfo().getUserName());
+        jobDescriptor.setOwner(((HPCRemoteCluster) remoteCluster).getServerInfo().getUserName());
 
         ResourceJobManager resourceJobManager = jobExecutionContext.getResourceJobManager();
 

http://git-wip-us.apache.org/repos/asf/airavata/blob/bad2c1c0/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/util/HandleOutputs.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/util/HandleOutputs.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/util/HandleOutputs.java
index 31550fd..890b858 100644
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/util/HandleOutputs.java
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/util/HandleOutputs.java
@@ -8,7 +8,7 @@ import java.util.List;
 import org.apache.airavata.gfac.core.context.JobExecutionContext;
 import org.apache.airavata.gfac.core.handler.GFacHandlerException;
 import org.apache.airavata.gfac.core.GFacUtils;
-import org.apache.airavata.gfac.core.cluster.Cluster;
+import org.apache.airavata.gfac.core.cluster.RemoteCluster;
 import org.apache.airavata.model.appcatalog.appinterface.DataType;
 import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
 import org.slf4j.Logger;
@@ -21,14 +21,14 @@ import org.slf4j.LoggerFactory;
 public class HandleOutputs {
 	private static final Logger log = LoggerFactory.getLogger(HandleOutputs.class);
 
-	public static List<OutputDataObjectType> handleOutputs(JobExecutionContext jobExecutionContext, Cluster cluster) throws GFacHandlerException {
+	public static List<OutputDataObjectType> handleOutputs(JobExecutionContext jobExecutionContext, RemoteCluster remoteCluster) throws GFacHandlerException {
 		List<OutputDataObjectType> outputArray = new ArrayList<OutputDataObjectType>();
 		try {
 			String outputDataDir = File.separator + "tmp" + File.separator + jobExecutionContext.getExperimentID();
 			(new File(outputDataDir)).mkdirs();
 
 			List<OutputDataObjectType> outputs = jobExecutionContext.getTaskData().getApplicationOutputs();
-			List<String> outputList = cluster.listDirectory(jobExecutionContext.getWorkingDir());
+			List<String> outputList = remoteCluster.listDirectory(jobExecutionContext.getWorkingDir());
 			boolean missingOutput = false;
 
 			for (OutputDataObjectType output : outputs) {
@@ -45,7 +45,7 @@ public class HandleOutputs {
 					if (output.getLocation() == null && !outputList.contains(fileName) && output.isIsRequired()) {
 						missingOutput = true;
 					} else {
-						cluster.scpFrom(outputFile, outputDataDir);
+						remoteCluster.scpFrom(outputFile, outputDataDir);
 						String localFile = outputDataDir + File.separator + fileName;
 						jobExecutionContext.addOutputFile(localFile);
 						output.setValue(localFile);
@@ -55,7 +55,7 @@ public class HandleOutputs {
 				} else if (DataType.STDOUT == output.getType()) {
 					String downloadFile = jobExecutionContext.getStandardOutput();
 					String fileName = downloadFile.substring(downloadFile.lastIndexOf(File.separatorChar) + 1, downloadFile.length());
-					cluster.scpFrom(downloadFile, outputDataDir);
+					remoteCluster.scpFrom(downloadFile, outputDataDir);
 					String localFile = outputDataDir + File.separator + fileName;
 					jobExecutionContext.addOutputFile(localFile);
 					jobExecutionContext.setStandardOutput(localFile);
@@ -65,7 +65,7 @@ public class HandleOutputs {
 				} else if (DataType.STDERR == output.getType()) {
 					String downloadFile = jobExecutionContext.getStandardError();
 					String fileName = downloadFile.substring(downloadFile.lastIndexOf(File.separatorChar) + 1, downloadFile.length());
-					cluster.scpFrom(downloadFile, outputDataDir);
+					remoteCluster.scpFrom(downloadFile, outputDataDir);
 					String localFile = outputDataDir + File.separator + fileName;
 					jobExecutionContext.addOutputFile(localFile);
 					jobExecutionContext.setStandardError(localFile);

http://git-wip-us.apache.org/repos/asf/airavata/blob/bad2c1c0/modules/gfac/gfac-impl/src/test/java/org/apache/airavata/core/gfac/services/impl/BigRed2TestWithSSHAuth.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/test/java/org/apache/airavata/core/gfac/services/impl/BigRed2TestWithSSHAuth.java b/modules/gfac/gfac-impl/src/test/java/org/apache/airavata/core/gfac/services/impl/BigRed2TestWithSSHAuth.java
index c83bf6a..65d52c3 100644
--- a/modules/gfac/gfac-impl/src/test/java/org/apache/airavata/core/gfac/services/impl/BigRed2TestWithSSHAuth.java
+++ b/modules/gfac/gfac-impl/src/test/java/org/apache/airavata/core/gfac/services/impl/BigRed2TestWithSSHAuth.java
@@ -32,12 +32,12 @@
 //import org.apache.airavata.gfac.core.context.MessageContext;
 //import org.apache.airavata.gfac.impl.BetterGfacImpl;
 //import org.apache.airavata.gfac.ssh.security.SSHSecurityContext;
-//import org.apache.airavata.gfac.ssh.api.Cluster;
+//import org.apache.airavata.gfac.ssh.api.RemoteCluster;
 //import org.apache.airavata.gfac.ssh.api.SSHApiException;
 //import org.apache.airavata.gfac.ssh.api.ServerInfo;
 //import AuthenticationInfo;
 //import org.apache.airavata.gfac.ssh.api.job.JobManagerConfiguration;
-//import org.apache.airavata.gfac.ssh.impl.PBSCluster;
+//import org.apache.airavata.gfac.ssh.impl.HPCRemoteCluster;
 //import org.apache.airavata.gfac.ssh.impl.authentication.DefaultPasswordAuthenticationInfo;
 //import org.apache.airavata.gfac.ssh.impl.authentication.DefaultPublicKeyFileAuthentication;
 //import org.apache.airavata.gfac.ssh.util.CommonUtils;
@@ -222,15 +222,15 @@
 //        // Server info
 //        ServerInfo serverInfo = new ServerInfo(this.userName, this.hostName);
 //
-//        Cluster pbsCluster = null;
+//        RemoteCluster pbsCluster = null;
 //        SSHSecurityContext sshSecurityContext = null;
 //
 //            JobManagerConfiguration pbsJobManager = CommonUtils.getPBSJobManager(app.getInstalledParentPath());
-//            pbsCluster = new PBSCluster(serverInfo, authenticationInfo, pbsJobManager);
+//            pbsCluster = new HPCRemoteCluster(serverInfo, authenticationInfo, pbsJobManager);
 //
 //
 //            sshSecurityContext = new SSHSecurityContext();
-//            sshSecurityContext.setPbsCluster(pbsCluster);
+//            sshSecurityContext.setRemoteCluster(pbsCluster);
 //            sshSecurityContext.setUsername(userName);
 //            sshSecurityContext.setKeyPass(passPhrase);
 //            sshSecurityContext.setPrivateKeyLoc(privateKeyPath);

http://git-wip-us.apache.org/repos/asf/airavata/blob/bad2c1c0/modules/gfac/gfac-impl/src/test/java/org/apache/airavata/core/gfac/services/impl/CredentialStoreTest.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/test/java/org/apache/airavata/core/gfac/services/impl/CredentialStoreTest.java b/modules/gfac/gfac-impl/src/test/java/org/apache/airavata/core/gfac/services/impl/CredentialStoreTest.java
index babbaf7..8158fc5 100644
--- a/modules/gfac/gfac-impl/src/test/java/org/apache/airavata/core/gfac/services/impl/CredentialStoreTest.java
+++ b/modules/gfac/gfac-impl/src/test/java/org/apache/airavata/core/gfac/services/impl/CredentialStoreTest.java
@@ -37,7 +37,7 @@
 //import org.apache.airavata.gfac.ssh.security.TokenizedSSHAuthInfo;
 //import org.apache.airavata.gfac.ssh.api.SSHApiException;
 //import org.apache.airavata.gfac.ssh.api.ServerInfo;
-//import org.apache.airavata.gfac.ssh.impl.PBSCluster;
+//import org.apache.airavata.gfac.ssh.impl.HPCRemoteCluster;
 //import org.apache.airavata.gfac.ssh.util.CommonUtils;
 //import org.apache.airavata.registry.api.AiravataRegistry2;
 //import org.apache.airavata.registry.api.AiravataRegistryFactory;
@@ -91,7 +91,7 @@
 //            SSHCredential credentials = tokenizedSSHAuthInfo.getCredentials();
 //            ServerInfo serverInfo = new ServerInfo("cgateway", "bigred2.uits.iu.edu");
 //
-//            PBSCluster pbsCluster = new PBSCluster(serverInfo, tokenizedSSHAuthInfo, CommonUtils.getPBSJobManager("/opt/torque/bin/"));
+//            HPCRemoteCluster pbsCluster = new HPCRemoteCluster(serverInfo, tokenizedSSHAuthInfo, CommonUtils.getPBSJobManager("/opt/torque/bin/"));
 //            Assert.assertNotNull(pbsCluster);
 //            return;
 //

http://git-wip-us.apache.org/repos/asf/airavata/blob/bad2c1c0/modules/gfac/gfac-impl/src/test/java/org/apache/airavata/core/gfac/services/impl/GSISSHProviderTestWithMyProxyAuth.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/test/java/org/apache/airavata/core/gfac/services/impl/GSISSHProviderTestWithMyProxyAuth.java b/modules/gfac/gfac-impl/src/test/java/org/apache/airavata/core/gfac/services/impl/GSISSHProviderTestWithMyProxyAuth.java
index c1bc393..9891485 100644
--- a/modules/gfac/gfac-impl/src/test/java/org/apache/airavata/core/gfac/services/impl/GSISSHProviderTestWithMyProxyAuth.java
+++ b/modules/gfac/gfac-impl/src/test/java/org/apache/airavata/core/gfac/services/impl/GSISSHProviderTestWithMyProxyAuth.java
@@ -44,7 +44,7 @@
 //import org.apache.airavata.gfac.ssh.api.SSHApiException;
 //import org.apache.airavata.gfac.ssh.api.ServerInfo;
 //import GSIAuthenticationInfo;
-//import org.apache.airavata.gfac.ssh.impl.PBSCluster;
+//import org.apache.airavata.gfac.ssh.impl.HPCRemoteCluster;
 //import org.apache.airavata.gfac.ssh.impl.authentication.MyProxyAuthenticationInfo;
 //import org.apache.airavata.gfac.ssh.util.CommonUtils;
 //import org.apache.airavata.model.appcatalog.appdeployment.ApplicationDeploymentDescription;
@@ -211,7 +211,7 @@
 //        ServerInfo serverInfo = new ServerInfo("ogce", "trestles.sdsc.edu");
 //        Cluster pbsCluster = null;
 //        try {
-//            pbsCluster = new PBSCluster(serverInfo, authenticationInfo, CommonUtils.getPBSJobManager(app.getInstalledParentPath()));
+//            pbsCluster = new HPCRemoteCluster(serverInfo, authenticationInfo, CommonUtils.getPBSJobManager(app.getInstalledParentPath()));
 //        } catch (SSHApiException e) {
 //            e.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
 //        }

http://git-wip-us.apache.org/repos/asf/airavata/blob/bad2c1c0/modules/gfac/gfac-impl/src/test/java/org/apache/airavata/gfac/ssh/impl/DefaultSSHApiTestWithMyProxyAuth.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/test/java/org/apache/airavata/gfac/ssh/impl/DefaultSSHApiTestWithMyProxyAuth.java b/modules/gfac/gfac-impl/src/test/java/org/apache/airavata/gfac/ssh/impl/DefaultSSHApiTestWithMyProxyAuth.java
index 45d8ca2..4932fcf 100644
--- a/modules/gfac/gfac-impl/src/test/java/org/apache/airavata/gfac/ssh/impl/DefaultSSHApiTestWithMyProxyAuth.java
+++ b/modules/gfac/gfac-impl/src/test/java/org/apache/airavata/gfac/ssh/impl/DefaultSSHApiTestWithMyProxyAuth.java
@@ -28,7 +28,7 @@ import org.apache.airavata.gfac.core.cluster.RawCommandInfo;
 import org.apache.airavata.gfac.core.cluster.ServerInfo;
 import org.apache.airavata.gfac.gsi.ssh.api.CommandExecutor;
 import org.apache.airavata.gfac.gsi.ssh.config.ConfigReader;
-import org.apache.airavata.gfac.gsi.ssh.impl.PBSCluster;
+import org.apache.airavata.gfac.gsi.ssh.impl.HPCRemoteCluster;
 import org.apache.airavata.gfac.gsi.ssh.impl.SystemCommandOutput;
 import org.apache.airavata.gfac.gsi.ssh.impl.authentication.DefaultPublicKeyAuthentication;
 import org.apache.commons.io.IOUtils;
@@ -38,7 +38,7 @@ import org.slf4j.LoggerFactory;
 import java.io.*;
 
 public class DefaultSSHApiTestWithMyProxyAuth {
-    private static final Logger log = LoggerFactory.getLogger(PBSCluster.class);
+    private static final Logger log = LoggerFactory.getLogger(HPCRemoteCluster.class);
 
 
 

http://git-wip-us.apache.org/repos/asf/airavata/blob/bad2c1c0/modules/gfac/gfac-impl/src/test/java/org/apache/airavata/gfac/ssh/impl/VanilaTestWithSSHAuth.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/test/java/org/apache/airavata/gfac/ssh/impl/VanilaTestWithSSHAuth.java b/modules/gfac/gfac-impl/src/test/java/org/apache/airavata/gfac/ssh/impl/VanilaTestWithSSHAuth.java
index 8bc8c66..f262930 100644
--- a/modules/gfac/gfac-impl/src/test/java/org/apache/airavata/gfac/ssh/impl/VanilaTestWithSSHAuth.java
+++ b/modules/gfac/gfac-impl/src/test/java/org/apache/airavata/gfac/ssh/impl/VanilaTestWithSSHAuth.java
@@ -23,9 +23,9 @@ package org.apache.airavata.gfac.ssh.impl;
 
 import org.apache.airavata.gfac.core.JobDescriptor;
 import org.apache.airavata.gfac.core.authentication.AuthenticationInfo;
-import org.apache.airavata.gfac.core.cluster.Cluster;
+import org.apache.airavata.gfac.core.cluster.RemoteCluster;
 import org.apache.airavata.gfac.core.cluster.ServerInfo;
-import org.apache.airavata.gfac.gsi.ssh.impl.PBSCluster;
+import org.apache.airavata.gfac.gsi.ssh.impl.HPCRemoteCluster;
 import org.apache.airavata.gfac.gsi.ssh.impl.authentication.DefaultPasswordAuthenticationInfo;
 import org.apache.airavata.gfac.gsi.ssh.impl.authentication.DefaultPublicKeyFileAuthentication;
 import org.apache.airavata.gfac.gsi.ssh.util.CommonUtils;
@@ -112,7 +112,7 @@ public class VanilaTestWithSSHAuth {
         }
         // Server info
         ServerInfo serverInfo = new ServerInfo(this.userName, this.hostName);
-        Cluster pbsCluster = new PBSCluster(serverInfo, authenticationInfo, CommonUtils.getPBSJobManager(path));
+        RemoteCluster pbsRemoteCluster = new HPCRemoteCluster(serverInfo, authenticationInfo, CommonUtils.getPBSJobManager(path));
 
         String date = new Date().toString();
         date = date.replaceAll(" ", "_");
@@ -122,16 +122,16 @@ public class VanilaTestWithSSHAuth {
 
         workingDirectory = workingDirectory + File.separator
                 + date + "_" + UUID.randomUUID();
-        pbsCluster.makeDirectory(workingDirectory);
+        pbsRemoteCluster.makeDirectory(workingDirectory);
         Thread.sleep(1000);
-        pbsCluster.makeDirectory(workingDirectory + File.separator + "inputs");
+        pbsRemoteCluster.makeDirectory(workingDirectory + File.separator + "inputs");
         Thread.sleep(1000);
-        pbsCluster.makeDirectory(workingDirectory + File.separator + "outputs");
+        pbsRemoteCluster.makeDirectory(workingDirectory + File.separator + "outputs");
 
 
         // doing file transfer to the remote resource
         String remoteLocation = workingDirectory + File.separator + "inputs";
-        pbsCluster.scpTo(remoteLocation, pomFile);
+        pbsRemoteCluster.scpTo(remoteLocation, pomFile);
 
         int i = pomFile.lastIndexOf(File.separator);
         String fileName = pomFile.substring(i + 1);
@@ -156,12 +156,12 @@ public class VanilaTestWithSSHAuth {
         //finished construction of job object
         System.out.println(jobDescriptor.toXML());
         if(hostName.contains("trestles")){
-        String jobID = pbsCluster.submitBatchJob(jobDescriptor);
+        String jobID = pbsRemoteCluster.submitBatchJob(jobDescriptor);
         System.out.println("JobID returned : " + jobID);
 
-//        Cluster cluster = sshApi.getCluster(serverInfo, authenticationInfo);
+//        RemoteCluster cluster = sshApi.getCluster(serverInfo, authenticationInfo);
         Thread.sleep(1000);
-        JobDescriptor jobById = pbsCluster.getJobDescriptorById(jobID);
+        JobDescriptor jobById = pbsRemoteCluster.getJobDescriptorById(jobID);
 
         //printing job data got from previous call
         AssertJUnit.assertEquals(jobById.getJobId(), jobID);
@@ -224,8 +224,8 @@ public class VanilaTestWithSSHAuth {
         jobDescriptor.setInputValues(inputs);
         //finished construction of job object
         System.out.println(jobDescriptor.toXML());
-        Cluster pbsCluster = new PBSCluster(CommonUtils.getLSFJobManager(""));
-        ((PBSCluster) pbsCluster).generateJobScript(jobDescriptor);
+        RemoteCluster pbsRemoteCluster = new HPCRemoteCluster(CommonUtils.getLSFJobManager(""));
+        ((HPCRemoteCluster) pbsRemoteCluster).generateJobScript(jobDescriptor);
     }
 
     @Test
@@ -240,8 +240,8 @@ public class VanilaTestWithSSHAuth {
         }
         // Server info
         ServerInfo serverInfo = new ServerInfo(this.userName, this.hostName);
-        Cluster pbsCluster = new PBSCluster(serverInfo, authenticationInfo, CommonUtils.getPBSJobManager(path));
-        new PBSCluster(serverInfo, authenticationInfo, CommonUtils.getPBSJobManager(path));;
+        RemoteCluster pbsRemoteCluster = new HPCRemoteCluster(serverInfo, authenticationInfo, CommonUtils.getPBSJobManager(path));
+        new HPCRemoteCluster(serverInfo, authenticationInfo, CommonUtils.getPBSJobManager(path));;
 
         String date = new Date().toString();
         date = date.replaceAll(" ", "_");
@@ -256,9 +256,9 @@ public class VanilaTestWithSSHAuth {
         // resource
         workingDirectory = workingDirectory + File.separator
                 + date + "_" + UUID.randomUUID();
-        pbsCluster.makeDirectory(workingDirectory);
-        pbsCluster.scpTo(workingDirectory, pomFile);
+        pbsRemoteCluster.makeDirectory(workingDirectory);
+        pbsRemoteCluster.scpTo(workingDirectory, pomFile);
         Thread.sleep(1000);
-        pbsCluster.scpFrom(workingDirectory + File.separator + "pom.xml", (new File(".")).getAbsolutePath());
+        pbsRemoteCluster.scpFrom(workingDirectory + File.separator + "pom.xml", (new File(".")).getAbsolutePath());
     }
 }

http://git-wip-us.apache.org/repos/asf/airavata/blob/bad2c1c0/modules/gfac/gfac-impl/src/test/java/org/apache/airavata/job/AMQPMonitorTest.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/test/java/org/apache/airavata/job/AMQPMonitorTest.java b/modules/gfac/gfac-impl/src/test/java/org/apache/airavata/job/AMQPMonitorTest.java
index fc0ea30..f4da3d0 100644
--- a/modules/gfac/gfac-impl/src/test/java/org/apache/airavata/job/AMQPMonitorTest.java
+++ b/modules/gfac/gfac-impl/src/test/java/org/apache/airavata/job/AMQPMonitorTest.java
@@ -22,16 +22,16 @@ package org.apache.airavata.job;
 
 import com.google.common.eventbus.EventBus;
 import com.google.common.eventbus.Subscribe;
+import org.apache.airavata.gfac.core.cluster.RemoteCluster;
+import org.apache.airavata.gfac.gsi.ssh.impl.HPCRemoteCluster;
 import org.apache.airavata.registry.cpi.AppCatalog;
 import org.apache.aiaravata.application.catalog.data.impl.AppCatalogFactory;
 import org.apache.airavata.common.utils.MonitorPublisher;
 import org.apache.airavata.gfac.core.JobDescriptor;
 import org.apache.airavata.gfac.core.SSHApiException;
 import org.apache.airavata.gfac.core.authentication.GSIAuthenticationInfo;
-import org.apache.airavata.gfac.core.cluster.Cluster;
 import org.apache.airavata.gfac.core.cluster.ServerInfo;
 import org.apache.airavata.gfac.core.monitor.MonitorID;
-import org.apache.airavata.gfac.gsi.ssh.impl.PBSCluster;
 import org.apache.airavata.gfac.gsi.ssh.impl.authentication.MyProxyAuthenticationInfo;
 import org.apache.airavata.gfac.gsi.ssh.util.CommonUtils;
 import org.apache.airavata.gfac.monitor.impl.push.amqp.AMQPMonitor;
@@ -148,8 +148,8 @@ public class AMQPMonitorTest {
         ServerInfo serverInfo = new ServerInfo("ogce", "login1.stampede.tacc.utexas.edu",2222);
 
 
-        Cluster pbsCluster = new
-                PBSCluster(serverInfo, authenticationInfo, CommonUtils.getPBSJobManager("/usr/bin/"));
+        RemoteCluster pbsRemoteCluster = new
+                HPCRemoteCluster(serverInfo, authenticationInfo, CommonUtils.getPBSJobManager("/usr/bin/"));
 
 
         // Execute command
@@ -176,7 +176,7 @@ public class AMQPMonitorTest {
         jobDescriptor.setInputValues(inputs);
         //finished construction of job object
         System.out.println(jobDescriptor.toXML());
-        String jobID = pbsCluster.submitBatchJob(jobDescriptor);
+        String jobID = pbsRemoteCluster.submitBatchJob(jobDescriptor);
         System.out.println(jobID);
         try {
             pushQueue.add(new MonitorID(computeResourceDescription, jobID,null,null,null, "ogce", jobName));

http://git-wip-us.apache.org/repos/asf/airavata/blob/bad2c1c0/modules/gfac/gfac-impl/src/test/java/org/apache/airavata/job/QstatMonitorTestWithMyProxyAuth.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/test/java/org/apache/airavata/job/QstatMonitorTestWithMyProxyAuth.java b/modules/gfac/gfac-impl/src/test/java/org/apache/airavata/job/QstatMonitorTestWithMyProxyAuth.java
index c405e8c..70727f7 100644
--- a/modules/gfac/gfac-impl/src/test/java/org/apache/airavata/job/QstatMonitorTestWithMyProxyAuth.java
+++ b/modules/gfac/gfac-impl/src/test/java/org/apache/airavata/job/QstatMonitorTestWithMyProxyAuth.java
@@ -37,7 +37,7 @@
 //import org.apache.airavata.gfac.ssh.api.ServerInfo;
 //import org.apache.airavata.gfac.ssh.api.authentication.GSIAuthenticationInfo;
 //import org.apache.airavata.gfac.ssh.api.job.JobDescriptor;
-//import org.apache.airavata.gfac.ssh.impl.PBSCluster;
+//import org.apache.airavata.gfac.ssh.impl.HPCRemoteCluster;
 //import org.apache.airavata.gfac.ssh.impl.authentication.MyProxyAuthenticationInfo;
 //import org.apache.airavata.gfac.ssh.util.CommonUtils;
 //import org.apache.airavata.model.messaging.event.JobStatusChangeEvent;
@@ -119,7 +119,7 @@
 //        ServerInfo serverInfo = new ServerInfo("ogce", hostDescription.getType().getHostAddress());
 //
 //
-//        Cluster pbsCluster = new PBSCluster(serverInfo, authenticationInfo, CommonUtils.getPBSJobManager("/opt/torque/bin/"));
+//        Cluster pbsCluster = new HPCRemoteCluster(serverInfo, authenticationInfo, CommonUtils.getPBSJobManager("/opt/torque/bin/"));
 //
 //
 //        // Execute command


[2/2] airavata git commit: Rename few interfaces to have generic names which self descriptive

Posted by sh...@apache.org.
Rename few interfaces to have generic names which self descriptive


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/bad2c1c0
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/bad2c1c0
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/bad2c1c0

Branch: refs/heads/master
Commit: bad2c1c0a60fa605defaa4c9e45665d2f91a722a
Parents: 2f3da01
Author: Shameera Rathanyaka <sh...@gmail.com>
Authored: Fri Jun 5 14:53:35 2015 -0400
Committer: Shameera Rathanyaka <sh...@gmail.com>
Committed: Fri Jun 5 14:53:35 2015 -0400

----------------------------------------------------------------------
 .../airavata/gfac/core/cluster/Cluster.java     | 161 -------------------
 .../gfac/core/cluster/RemoteCluster.java        | 161 +++++++++++++++++++
 .../gsi/ssh/impl/GSISSHAbstractCluster.java     |   4 +-
 .../gfac/gsi/ssh/impl/HPCRemoteCluster.java     |  46 ++++++
 .../airavata/gfac/gsi/ssh/impl/PBSCluster.java  |  46 ------
 .../handler/GSISSHDirectorySetupHandler.java    |  14 +-
 .../gfac/gsissh/handler/GSISSHInputHandler.java |  16 +-
 .../gsissh/handler/GSISSHOutputHandler.java     |  18 +--
 .../gsissh/handler/NewGSISSHOutputHandler.java  |  10 +-
 .../gsissh/provider/impl/GSISSHProvider.java    |  20 +--
 .../gsissh/security/GSISecurityContext.java     |  20 +--
 .../gfac/gsissh/util/GFACGSISSHUtils.java       |  36 ++---
 .../airavata/gfac/monitor/HPCMonitorID.java     |   4 +-
 .../impl/pull/qstat/ResourceConnection.java     |  16 +-
 .../ssh/handler/AdvancedSCPInputHandler.java    |  16 +-
 .../ssh/handler/AdvancedSCPOutputHandler.java   |  18 +--
 .../gfac/ssh/handler/NewSSHOutputHandler.java   |  10 +-
 .../ssh/handler/SSHDirectorySetupHandler.java   |  14 +-
 .../gfac/ssh/handler/SSHInputHandler.java       |  18 +--
 .../gfac/ssh/handler/SSHOutputHandler.java      |  20 +--
 .../gfac/ssh/provider/impl/SSHProvider.java     |  42 ++---
 .../gfac/ssh/security/SSHSecurityContext.java   |  12 +-
 .../airavata/gfac/ssh/util/GFACSSHUtils.java    |  56 +++----
 .../airavata/gfac/ssh/util/HandleOutputs.java   |  12 +-
 .../services/impl/BigRed2TestWithSSHAuth.java   |  10 +-
 .../gfac/services/impl/CredentialStoreTest.java |   4 +-
 .../impl/GSISSHProviderTestWithMyProxyAuth.java |   4 +-
 .../impl/DefaultSSHApiTestWithMyProxyAuth.java  |   4 +-
 .../gfac/ssh/impl/VanilaTestWithSSHAuth.java    |  34 ++--
 .../apache/airavata/job/AMQPMonitorTest.java    |  10 +-
 .../job/QstatMonitorTestWithMyProxyAuth.java    |   4 +-
 31 files changed, 430 insertions(+), 430 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/bad2c1c0/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cluster/Cluster.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cluster/Cluster.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cluster/Cluster.java
deleted file mode 100644
index b116bdc..0000000
--- a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cluster/Cluster.java
+++ /dev/null
@@ -1,161 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.core.cluster;
-
-import com.jcraft.jsch.Session;
-import org.apache.airavata.gfac.core.JobDescriptor;
-import org.apache.airavata.gfac.core.SSHApiException;
-
-import java.util.List;
-import java.util.Map;
-
-/**
- * This interface represents a Cluster machine
- * End users of the API can implement this and come up with their own
- * implementations, but mostly this interface is for internal usage.
- */
-public interface Cluster {
-
-    /**
-     * This will submit a job to the cluster with a given pbs file and some parameters
-     *
-     * @param pbsFilePath  path of the pbs file
-     * @param workingDirectory working directory where pbs should has to copy
-     * @return jobId after successful job submission
-     * @throws SSHApiException throws exception during error
-     */
-    public String submitBatchJobWithScript(String pbsFilePath, String workingDirectory) throws SSHApiException;
-
-    /**
-     * This will submit the given job and not performing any monitoring
-     *
-     * @param jobDescriptor  job descriptor to submit to cluster, this contains all the parameter
-     * @return jobID after successful job submission.
-     * @throws SSHApiException  throws exception during error
-     */
-    public String submitBatchJob(JobDescriptor jobDescriptor) throws SSHApiException;
-
-    /**
-     * This will copy the localFile to remoteFile location in configured cluster
-     *
-     * @param remoteFile remote file location, this can be a directory too
-     * @param localFile local file path of the file which needs to copy to remote location
-     * @throws SSHApiException throws exception during error
-     */
-    public void scpTo(String remoteFile, String localFile) throws SSHApiException;
-
-    /**
-     * This will copy a remote file in path rFile to local file lFile
-     * @param remoteFile remote file path, this has to be a full qualified path
-     * @param localFile This is the local file to copy, this can be a directory too
-     * @throws SSHApiException
-     */
-    public void scpFrom(String remoteFile, String localFile) throws SSHApiException;
-
-    /**
-     * This will copy a remote file in path rFile to local file lFile
-     * @param remoteFile remote file path, this has to be a full qualified path
-     * @param localFile This is the local file to copy, this can be a directory too
-     * @throws SSHApiException
-     */
-    public void scpThirdParty(String remoteFileSorce, String remoteFileTarget) throws SSHApiException;
-    
-    /**
-     * This will create directories in computing resources
-     * @param directoryPath the full qualified path for the directory user wants to create
-     * @throws SSHApiException throws during error
-     */
-    public void makeDirectory(String directoryPath) throws SSHApiException;
-
-
-    /**
-     * This will get the job description of a job which is there in the cluster
-     * if jbo is not available with the given ID it returns
-     * @param jobID jobId has to pass
-     * @return Returns full job description of the job which submitted successfully
-     * @throws SSHApiException throws exception during error
-     */
-    public JobDescriptor getJobDescriptorById(String jobID) throws SSHApiException;
-
-    /**
-     * This will delete the given job from the queue
-     *
-     * @param jobID  jobId of the job which user wants to delete
-     * @return return the description of the deleted job
-     * @throws SSHApiException throws exception during error
-     */
-    public JobDescriptor cancelJob(String jobID) throws SSHApiException;
-
-    /**
-     * This will get the job status of the the job associated with this jobId
-     *
-     * @param jobID jobId of the job user want to get the status
-     * @return job status of the given jobID
-     * @throws SSHApiException throws exception during error
-     */
-    public JobStatus getJobStatus(String jobID) throws SSHApiException;
-    /**
-     * This will get the job status of the the job associated with this jobId
-     *
-     * @param jobName jobName of the job user want to get the status
-     * @return jobId of the given jobName
-     * @throws SSHApiException throws exception during error
-     */
-    public String getJobIdByJobName(String jobName, String userName) throws SSHApiException;
-
-    /**
-     * This method can be used to poll the jobstatuses based on the given
-     * user but we should pass the jobID list otherwise we will get unwanted
-     * job statuses which submitted by different middleware outside apache
-     * airavata with the same uername which we are not considering
-     * @param userName userName of the jobs which required to get the status
-     * @param jobIDs precises set of jobIDs
-     * @return
-     */
-    public void getJobStatuses(String userName,Map<String,JobStatus> jobIDs)throws SSHApiException;
-    /**
-     * This will list directories in computing resources
-     * @param directoryPath the full qualified path for the directory user wants to create
-     * @throws SSHApiException throws during error
-     */
-    public List<String> listDirectory(String directoryPath) throws SSHApiException;
-
-    /**
-     * This method can be used to get created ssh session
-     * to reuse the created session.
-     * @throws SSHApiException
-     */
-    public Session getSession() throws SSHApiException;
-    
-    /**
-     * This method can be used to close the connections initialized
-     * to handle graceful shutdown of the system
-     * @throws SSHApiException
-     */
-    public void disconnect() throws SSHApiException;
-
-    /**
-     * This gives the server Info
-     * @return
-     */
-    public ServerInfo getServerInfo();
-
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/bad2c1c0/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cluster/RemoteCluster.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cluster/RemoteCluster.java b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cluster/RemoteCluster.java
new file mode 100644
index 0000000..2bd4558
--- /dev/null
+++ b/modules/gfac/gfac-core/src/main/java/org/apache/airavata/gfac/core/cluster/RemoteCluster.java
@@ -0,0 +1,161 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+*/
+package org.apache.airavata.gfac.core.cluster;
+
+import com.jcraft.jsch.Session;
+import org.apache.airavata.gfac.core.JobDescriptor;
+import org.apache.airavata.gfac.core.SSHApiException;
+
+import java.util.List;
+import java.util.Map;
+
+/**
+ * This interface represents a RemoteCluster machine
+ * End users of the API can implement this and come up with their own
+ * implementations, but mostly this interface is for internal usage.
+ */
+public interface RemoteCluster {
+
+    /**
+     * This will submit a job to the cluster with a given pbs file and some parameters
+     *
+     * @param pbsFilePath  path of the pbs file
+     * @param workingDirectory working directory where pbs should has to copy
+     * @return jobId after successful job submission
+     * @throws SSHApiException throws exception during error
+     */
+    public String submitBatchJobWithScript(String pbsFilePath, String workingDirectory) throws SSHApiException;
+
+    /**
+     * This will submit the given job and not performing any monitoring
+     *
+     * @param jobDescriptor  job descriptor to submit to cluster, this contains all the parameter
+     * @return jobID after successful job submission.
+     * @throws SSHApiException  throws exception during error
+     */
+    public String submitBatchJob(JobDescriptor jobDescriptor) throws SSHApiException;
+
+    /**
+     * This will copy the localFile to remoteFile location in configured cluster
+     *
+     * @param remoteFile remote file location, this can be a directory too
+     * @param localFile local file path of the file which needs to copy to remote location
+     * @throws SSHApiException throws exception during error
+     */
+    public void scpTo(String remoteFile, String localFile) throws SSHApiException;
+
+    /**
+     * This will copy a remote file in path rFile to local file lFile
+     * @param remoteFile remote file path, this has to be a full qualified path
+     * @param localFile This is the local file to copy, this can be a directory too
+     * @throws SSHApiException
+     */
+    public void scpFrom(String remoteFile, String localFile) throws SSHApiException;
+
+    /**
+     * This will copy a remote file in path rFile to local file lFile
+     * @param remoteFile remote file path, this has to be a full qualified path
+     * @param localFile This is the local file to copy, this can be a directory too
+     * @throws SSHApiException
+     */
+    public void scpThirdParty(String remoteFileSorce, String remoteFileTarget) throws SSHApiException;
+    
+    /**
+     * This will create directories in computing resources
+     * @param directoryPath the full qualified path for the directory user wants to create
+     * @throws SSHApiException throws during error
+     */
+    public void makeDirectory(String directoryPath) throws SSHApiException;
+
+
+    /**
+     * This will get the job description of a job which is there in the cluster
+     * if jbo is not available with the given ID it returns
+     * @param jobID jobId has to pass
+     * @return Returns full job description of the job which submitted successfully
+     * @throws SSHApiException throws exception during error
+     */
+    public JobDescriptor getJobDescriptorById(String jobID) throws SSHApiException;
+
+    /**
+     * This will delete the given job from the queue
+     *
+     * @param jobID  jobId of the job which user wants to delete
+     * @return return the description of the deleted job
+     * @throws SSHApiException throws exception during error
+     */
+    public JobDescriptor cancelJob(String jobID) throws SSHApiException;
+
+    /**
+     * This will get the job status of the the job associated with this jobId
+     *
+     * @param jobID jobId of the job user want to get the status
+     * @return job status of the given jobID
+     * @throws SSHApiException throws exception during error
+     */
+    public JobStatus getJobStatus(String jobID) throws SSHApiException;
+    /**
+     * This will get the job status of the the job associated with this jobId
+     *
+     * @param jobName jobName of the job user want to get the status
+     * @return jobId of the given jobName
+     * @throws SSHApiException throws exception during error
+     */
+    public String getJobIdByJobName(String jobName, String userName) throws SSHApiException;
+
+    /**
+     * This method can be used to poll the jobstatuses based on the given
+     * user but we should pass the jobID list otherwise we will get unwanted
+     * job statuses which submitted by different middleware outside apache
+     * airavata with the same uername which we are not considering
+     * @param userName userName of the jobs which required to get the status
+     * @param jobIDs precises set of jobIDs
+     * @return
+     */
+    public void getJobStatuses(String userName,Map<String,JobStatus> jobIDs)throws SSHApiException;
+    /**
+     * This will list directories in computing resources
+     * @param directoryPath the full qualified path for the directory user wants to create
+     * @throws SSHApiException throws during error
+     */
+    public List<String> listDirectory(String directoryPath) throws SSHApiException;
+
+    /**
+     * This method can be used to get created ssh session
+     * to reuse the created session.
+     * @throws SSHApiException
+     */
+    public Session getSession() throws SSHApiException;
+    
+    /**
+     * This method can be used to close the connections initialized
+     * to handle graceful shutdown of the system
+     * @throws SSHApiException
+     */
+    public void disconnect() throws SSHApiException;
+
+    /**
+     * This gives the server Info
+     * @return
+     */
+    public ServerInfo getServerInfo();
+
+}

http://git-wip-us.apache.org/repos/asf/airavata/blob/bad2c1c0/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/GSISSHAbstractCluster.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/GSISSHAbstractCluster.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/GSISSHAbstractCluster.java
index f89cb98..04241c8 100644
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/GSISSHAbstractCluster.java
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/GSISSHAbstractCluster.java
@@ -36,7 +36,7 @@ import org.apache.airavata.gfac.core.authentication.SSHKeyAuthentication;
 import org.apache.airavata.gfac.core.authentication.SSHPasswordAuthentication;
 import org.apache.airavata.gfac.core.authentication.SSHPublicKeyAuthentication;
 import org.apache.airavata.gfac.core.authentication.SSHPublicKeyFileAuthentication;
-import org.apache.airavata.gfac.core.cluster.Cluster;
+import org.apache.airavata.gfac.core.cluster.RemoteCluster;
 import org.apache.airavata.gfac.core.cluster.JobStatus;
 import org.apache.airavata.gfac.core.cluster.OutputParser;
 import org.apache.airavata.gfac.core.cluster.RawCommandInfo;
@@ -69,7 +69,7 @@ import java.security.SecureRandom;
 import java.util.List;
 import java.util.Map;
 
-public class GSISSHAbstractCluster implements Cluster {
+public class GSISSHAbstractCluster implements RemoteCluster {
 
     private static final Logger log = LoggerFactory.getLogger(GSISSHAbstractCluster.class);
     public static final String X509_CERT_DIR = "X509_CERT_DIR";

http://git-wip-us.apache.org/repos/asf/airavata/blob/bad2c1c0/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/HPCRemoteCluster.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/HPCRemoteCluster.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/HPCRemoteCluster.java
new file mode 100644
index 0000000..cc688e2
--- /dev/null
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/HPCRemoteCluster.java
@@ -0,0 +1,46 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+*/
+package org.apache.airavata.gfac.gsi.ssh.impl;
+
+import org.apache.airavata.gfac.core.authentication.AuthenticationInfo;
+import org.apache.airavata.gfac.core.SSHApiException;
+import org.apache.airavata.gfac.core.cluster.ServerInfo;
+import org.apache.airavata.gfac.core.JobManagerConfiguration;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+
+/**
+ * This is the default implementation of a cluster.
+ * this has most of the methods to be used by the end user of the
+ * library.
+ */
+public class HPCRemoteCluster extends GSISSHAbstractCluster {
+    private static final Logger log = LoggerFactory.getLogger(HPCRemoteCluster.class);
+
+
+    public HPCRemoteCluster(JobManagerConfiguration jobManagerConfiguration) {
+        super(jobManagerConfiguration);
+    }
+    public HPCRemoteCluster(ServerInfo serverInfo, AuthenticationInfo authenticationInfo, JobManagerConfiguration config) throws SSHApiException {
+        super(serverInfo, authenticationInfo,config);
+    }
+}

http://git-wip-us.apache.org/repos/asf/airavata/blob/bad2c1c0/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/PBSCluster.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/PBSCluster.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/PBSCluster.java
deleted file mode 100644
index 1d514f8..0000000
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsi/ssh/impl/PBSCluster.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
-*/
-package org.apache.airavata.gfac.gsi.ssh.impl;
-
-import org.apache.airavata.gfac.core.authentication.AuthenticationInfo;
-import org.apache.airavata.gfac.core.SSHApiException;
-import org.apache.airavata.gfac.core.cluster.ServerInfo;
-import org.apache.airavata.gfac.core.JobManagerConfiguration;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-
-/**
- * This is the default implementation of a cluster.
- * this has most of the methods to be used by the end user of the
- * library.
- */
-public class PBSCluster extends GSISSHAbstractCluster {
-    private static final Logger log = LoggerFactory.getLogger(PBSCluster.class);
-
-
-    public PBSCluster(JobManagerConfiguration jobManagerConfiguration) {
-        super(jobManagerConfiguration);
-    }
-    public PBSCluster(ServerInfo serverInfo, AuthenticationInfo authenticationInfo, JobManagerConfiguration config) throws SSHApiException {
-        super(serverInfo, authenticationInfo,config);
-    }
-}

http://git-wip-us.apache.org/repos/asf/airavata/blob/bad2c1c0/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHDirectorySetupHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHDirectorySetupHandler.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHDirectorySetupHandler.java
index eeae272..8e243d3 100644
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHDirectorySetupHandler.java
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHDirectorySetupHandler.java
@@ -21,13 +21,13 @@
 package org.apache.airavata.gfac.gsissh.handler;
 
 import org.apache.airavata.gfac.core.GFacException;
+import org.apache.airavata.gfac.core.cluster.RemoteCluster;
 import org.apache.airavata.gfac.core.context.JobExecutionContext;
 import org.apache.airavata.gfac.core.handler.AbstractHandler;
 import org.apache.airavata.gfac.core.handler.GFacHandlerException;
 import org.apache.airavata.gfac.core.GFacUtils;
 import org.apache.airavata.gfac.gsissh.security.GSISecurityContext;
 import org.apache.airavata.gfac.gsissh.util.GFACGSISSHUtils;
-import org.apache.airavata.gfac.core.cluster.Cluster;
 import org.apache.airavata.model.workspace.experiment.*;
 import org.apache.airavata.registry.cpi.ExpCatChildDataType;
 import org.slf4j.Logger;
@@ -62,11 +62,11 @@ public class GSISSHDirectorySetupHandler extends AbstractHandler {
         makeDirectory(jobExecutionContext);
 	}
 	private void makeDirectory(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-        Cluster cluster = null;
+        RemoteCluster remoteCluster = null;
         try {
             String hostAddress = jobExecutionContext.getHostName();
-            cluster = ((GSISecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getPbsCluster();
-            if (cluster == null) {
+            remoteCluster = ((GSISecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getRemoteCluster();
+            if (remoteCluster == null) {
                 try {
                     GFacUtils.saveErrorDetails(jobExecutionContext, "Security context is not set properly", CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
                 } catch (GFacException e1) {
@@ -78,11 +78,11 @@ public class GSISSHDirectorySetupHandler extends AbstractHandler {
             }
 
             String workingDirectory = jobExecutionContext.getWorkingDir();
-            cluster.makeDirectory(workingDirectory);
+            remoteCluster.makeDirectory(workingDirectory);
             if(!jobExecutionContext.getInputDir().equals(workingDirectory))
-                cluster.makeDirectory(jobExecutionContext.getInputDir());
+                remoteCluster.makeDirectory(jobExecutionContext.getInputDir());
             if(!jobExecutionContext.getOutputDir().equals(workingDirectory))
-            	cluster.makeDirectory(jobExecutionContext.getOutputDir());
+            	remoteCluster.makeDirectory(jobExecutionContext.getOutputDir());
             
             DataTransferDetails detail = new DataTransferDetails();
             TransferStatus status = new TransferStatus();

http://git-wip-us.apache.org/repos/asf/airavata/blob/bad2c1c0/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHInputHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHInputHandler.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHInputHandler.java
index 958e04c..7b4f350 100644
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHInputHandler.java
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHInputHandler.java
@@ -22,6 +22,7 @@ package org.apache.airavata.gfac.gsissh.handler;
 
 import org.apache.airavata.common.exception.ApplicationSettingsException;
 import org.apache.airavata.gfac.core.GFacException;
+import org.apache.airavata.gfac.core.cluster.RemoteCluster;
 import org.apache.airavata.gfac.core.context.JobExecutionContext;
 import org.apache.airavata.gfac.core.context.MessageContext;
 import org.apache.airavata.gfac.core.handler.AbstractHandler;
@@ -29,7 +30,6 @@ import org.apache.airavata.gfac.core.handler.GFacHandlerException;
 import org.apache.airavata.gfac.core.GFacUtils;
 import org.apache.airavata.gfac.gsissh.security.GSISecurityContext;
 import org.apache.airavata.gfac.gsissh.util.GFACGSISSHUtils;
-import org.apache.airavata.gfac.core.cluster.Cluster;
 import org.apache.airavata.model.appcatalog.appinterface.DataType;
 import org.apache.airavata.model.appcatalog.appinterface.InputDataObjectType;
 import org.apache.airavata.model.workspace.experiment.CorrectiveAction;
@@ -66,7 +66,7 @@ public class GSISSHInputHandler extends AbstractHandler {
         DataTransferDetails detail = new DataTransferDetails();
         TransferStatus status = new TransferStatus();
         StringBuffer data = new StringBuffer("|");
-        Cluster cluster = null;
+        RemoteCluster remoteCluster = null;
 
         try {
             String hostAddress = jobExecutionContext.getHostName();
@@ -74,8 +74,8 @@ public class GSISSHInputHandler extends AbstractHandler {
                 GFACGSISSHUtils.addSecurityContext(jobExecutionContext);
             }
 
-            cluster = ((GSISecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getPbsCluster();
-            if (cluster == null) {
+            remoteCluster = ((GSISecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getRemoteCluster();
+            if (remoteCluster == null) {
                 throw new GFacException("Security context is not set properly");
             } else {
                 log.info("Successfully retrieved the Security Context");
@@ -123,7 +123,7 @@ public class GSISSHInputHandler extends AbstractHandler {
                         inputParamType.setValue(oldFiles.get(index));
                         data.append(oldFiles.get(index++)).append(","); // we get already transfered file and increment the index
                     } else {
-                        String stageInputFile = stageInputFiles(cluster, jobExecutionContext, paramValue);
+                        String stageInputFile = stageInputFiles(remoteCluster, jobExecutionContext, paramValue);
                         inputParamType.setValue(stageInputFile);
                         StringBuffer temp = new StringBuffer(data.append(stageInputFile).append(",").toString());
                         status.setTransferState(TransferState.UPLOAD);
@@ -143,7 +143,7 @@ public class GSISSHInputHandler extends AbstractHandler {
 //                            newFiles.add(oldFiles.get(index));
 //                            data.append(oldFiles.get(index++)).append(",");
 //                        } else {
-//                            String stageInputFiles = stageInputFiles(cluster, jobExecutionContext, paramValueEach);
+//                            String stageInputFiles = stageInputFiles(remoteCluster, jobExecutionContext, paramValueEach);
 //                            status.setTransferState(TransferState.UPLOAD);
 //                            detail.setTransferStatus(status);
 //                            detail.setTransferDescription("Input Data Staged: " + stageInputFiles);
@@ -174,7 +174,7 @@ public class GSISSHInputHandler extends AbstractHandler {
         jobExecutionContext.setInMessageContext(inputNew);
     }
 
-    private static String stageInputFiles(Cluster cluster, JobExecutionContext jobExecutionContext, String paramValue) throws IOException, GFacException {
+    private static String stageInputFiles(RemoteCluster remoteCluster, JobExecutionContext jobExecutionContext, String paramValue) throws IOException, GFacException {
         int i = paramValue.lastIndexOf(File.separator);
         String substring = paramValue.substring(i + 1);
         try {
@@ -186,7 +186,7 @@ public class GSISSHInputHandler extends AbstractHandler {
             int j = 1;
             while(!success){
             try {
-				cluster.scpTo(targetFile, paramValue);
+				remoteCluster.scpTo(targetFile, paramValue);
 				success = true;
 			} catch (Exception e) {
 				log.info(e.getLocalizedMessage());

http://git-wip-us.apache.org/repos/asf/airavata/blob/bad2c1c0/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHOutputHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHOutputHandler.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHOutputHandler.java
index d93c2e3..a11b688 100644
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHOutputHandler.java
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/handler/GSISSHOutputHandler.java
@@ -31,7 +31,7 @@ import org.apache.airavata.gfac.core.GFacUtils;
 import org.apache.airavata.gfac.impl.OutputUtils;
 import org.apache.airavata.gfac.gsissh.security.GSISecurityContext;
 import org.apache.airavata.gfac.gsissh.util.GFACGSISSHUtils;
-import org.apache.airavata.gfac.core.cluster.Cluster;
+import org.apache.airavata.gfac.core.cluster.RemoteCluster;
 import org.apache.airavata.model.appcatalog.appinterface.DataType;
 import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
 import org.apache.airavata.model.workspace.experiment.CorrectiveAction;
@@ -78,11 +78,11 @@ public class GSISSHOutputHandler extends AbstractHandler {
         DataTransferDetails detail = new DataTransferDetails();
         TransferStatus status = new TransferStatus();
 
-        Cluster cluster = null;
+        RemoteCluster remoteCluster = null;
         
         try {
-            cluster = ((GSISecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getPbsCluster();
-            if (cluster == null) {
+            remoteCluster = ((GSISecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getRemoteCluster();
+            if (remoteCluster == null) {
                 GFacUtils.saveErrorDetails(jobExecutionContext, "Security context is not set properly", CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.FILE_SYSTEM_FAILURE);
                 
                 throw new GFacProviderException("Security context is not set properly");
@@ -132,7 +132,7 @@ public class GSISSHOutputHandler extends AbstractHandler {
                 localStdOutFile = new File(outputDataDir + File.separator + jobExecutionContext.getApplicationName() + ".stdout");
                 while(stdOutStr.isEmpty()){
                 try {
-                	cluster.scpFrom(jobExecutionContext.getStandardOutput(), localStdOutFile.getAbsolutePath());
+                	remoteCluster.scpFrom(jobExecutionContext.getStandardOutput(), localStdOutFile.getAbsolutePath());
                 	stdOutStr = GFacUtils.readFileToString(localStdOutFile.getAbsolutePath());
 				} catch (Exception e) {
 					log.error(e.getLocalizedMessage());
@@ -150,7 +150,7 @@ public class GSISSHOutputHandler extends AbstractHandler {
                 data.append(oldFiles.get(index++)).append(",");
             } else {
                 localStdErrFile = new File(outputDataDir + File.separator + jobExecutionContext.getApplicationName() + ".stderr");
-                cluster.scpFrom(jobExecutionContext.getStandardError(), localStdErrFile.getAbsolutePath());
+                remoteCluster.scpFrom(jobExecutionContext.getStandardError(), localStdErrFile.getAbsolutePath());
                 StringBuffer temp = new StringBuffer(data.append(localStdErrFile.getAbsolutePath()).append(",").toString());
                 GFacUtils.saveHandlerData(jobExecutionContext, temp.insert(0, ++index), this.getClass().getName());
             }
@@ -177,7 +177,7 @@ public class GSISSHOutputHandler extends AbstractHandler {
                     List<String> outputList = null;
                     int retry=3;
                     while(retry>0){
-                    	 outputList = cluster.listDirectory(jobExecutionContext.getOutputDir());
+                    	 outputList = remoteCluster.listDirectory(jobExecutionContext.getOutputDir());
                         if (outputList.size() == 1 && outputList.get(0).isEmpty()) {
                             Thread.sleep(10000);
                         } else if (outputList.size() > 0) {
@@ -203,7 +203,7 @@ public class GSISSHOutputHandler extends AbstractHandler {
                                     localFile = oldFiles.get(index);
                                     data.append(oldFiles.get(index++)).append(",");
                                 } else {
-                                    cluster.scpFrom(downloadFile, outputDataDir);
+                                    remoteCluster.scpFrom(downloadFile, outputDataDir);
                                     String fileName = downloadFile.substring(downloadFile.lastIndexOf(File.separatorChar) + 1, downloadFile.length());
                                     localFile = outputDataDir + File.separator + fileName;
                                     StringBuffer temp = new StringBuffer(data.append(localFile).append(",").toString());
@@ -262,7 +262,7 @@ public class GSISSHOutputHandler extends AbstractHandler {
                             outputFile = oldFiles.get(index);
                             data.append(oldFiles.get(index++)).append(",");
                         } else {
-                            cluster.scpFrom(jobExecutionContext.getOutputDir() + File.separator + valueList, outputDataDir);
+                            remoteCluster.scpFrom(jobExecutionContext.getOutputDir() + File.separator + valueList, outputDataDir);
                             outputFile = outputDataDir + File.separator + valueList;
                             jobExecutionContext.addOutputFile(outputFile);
                             StringBuffer temp = new StringBuffer(data.append(outputFile).append(",").toString());

http://git-wip-us.apache.org/repos/asf/airavata/blob/bad2c1c0/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/handler/NewGSISSHOutputHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/handler/NewGSISSHOutputHandler.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/handler/NewGSISSHOutputHandler.java
index 4c6d3bd..238d4e3 100644
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/handler/NewGSISSHOutputHandler.java
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/handler/NewGSISSHOutputHandler.java
@@ -12,7 +12,7 @@ import org.apache.airavata.gfac.core.GFacUtils;
 import org.apache.airavata.gfac.gsissh.security.GSISecurityContext;
 import org.apache.airavata.gfac.gsissh.util.GFACGSISSHUtils;
 import org.apache.airavata.gfac.ssh.util.HandleOutputs;
-import org.apache.airavata.gfac.core.cluster.Cluster;
+import org.apache.airavata.gfac.core.cluster.RemoteCluster;
 import org.apache.airavata.model.appcatalog.appinterface.OutputDataObjectType;
 import org.apache.airavata.model.workspace.experiment.CorrectiveAction;
 import org.apache.airavata.model.workspace.experiment.ErrorCategory;
@@ -39,11 +39,11 @@ public class NewGSISSHOutputHandler extends AbstractHandler{
 	            log.error(e.getMessage());
 	            throw new GFacHandlerException("Error while creating SSHSecurityContext", e, e.getLocalizedMessage());
 	        }
-	        Cluster cluster = null;
+	        RemoteCluster remoteCluster = null;
 	        
 	        try {
-	            cluster = ((GSISecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getPbsCluster();
-	            if (cluster == null) {
+	            remoteCluster = ((GSISecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getRemoteCluster();
+	            if (remoteCluster == null) {
 	                GFacUtils.saveErrorDetails(jobExecutionContext, "Security context is not set properly", CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.FILE_SYSTEM_FAILURE);
 	                
 	                throw new GFacProviderException("Security context is not set properly");
@@ -61,7 +61,7 @@ public class NewGSISSHOutputHandler extends AbstractHandler{
 	        }
 
 	        super.invoke(jobExecutionContext);
-	        List<OutputDataObjectType> outputArray =  HandleOutputs.handleOutputs(jobExecutionContext, cluster);
+	        List<OutputDataObjectType> outputArray =  HandleOutputs.handleOutputs(jobExecutionContext, remoteCluster);
             try {
 				experimentCatalog.add(ExpCatChildDataType.EXPERIMENT_OUTPUT, outputArray, jobExecutionContext.getExperimentID());
 			} catch (RegistryException e) {

http://git-wip-us.apache.org/repos/asf/airavata/blob/bad2c1c0/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/provider/impl/GSISSHProvider.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/provider/impl/GSISSHProvider.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/provider/impl/GSISSHProvider.java
index a4418fc..69edb7b 100644
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/provider/impl/GSISSHProvider.java
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/provider/impl/GSISSHProvider.java
@@ -33,7 +33,7 @@ import org.apache.airavata.gfac.gsissh.security.GSISecurityContext;
 import org.apache.airavata.gfac.gsissh.util.GFACGSISSHUtils;
 import org.apache.airavata.gfac.monitor.email.EmailBasedMonitor;
 import org.apache.airavata.gfac.monitor.email.EmailMonitorFactory;
-import org.apache.airavata.gfac.core.cluster.Cluster;
+import org.apache.airavata.gfac.core.cluster.RemoteCluster;
 import org.apache.airavata.gfac.core.SSHApiException;
 import org.apache.airavata.gfac.core.JobDescriptor;
 import org.apache.airavata.model.appcatalog.appdeployment.ApplicationDeploymentDescription;
@@ -84,25 +84,25 @@ public class GSISSHProvider extends AbstractProvider {
         ApplicationDeploymentDescription appDeployDesc = jobExecutionContext.getApplicationContext()
                 .getApplicationDeploymentDescription();
         JobDetails jobDetails = new JobDetails();
-        Cluster cluster = null;
+        RemoteCluster remoteCluster = null;
 
         try {
             if (jobExecutionContext.getSecurityContext(jobExecutionContext.getHostName()) != null) {
-                cluster = ((GSISecurityContext) jobExecutionContext.getSecurityContext(jobExecutionContext.getHostName())).getPbsCluster();
+                remoteCluster = ((GSISecurityContext) jobExecutionContext.getSecurityContext(jobExecutionContext.getHostName())).getRemoteCluster();
             }
-            if (cluster == null) {
+            if (remoteCluster == null) {
                 throw new GFacProviderException("Security context is not set properly");
             } else {
                 log.info("Successfully retrieved the Security Context");
             }
             // This installed path is a mandetory field, because this could change based on the computing resource
-            JobDescriptor jobDescriptor = GFACGSISSHUtils.createJobDescriptor(jobExecutionContext, cluster);
+            JobDescriptor jobDescriptor = GFACGSISSHUtils.createJobDescriptor(jobExecutionContext, remoteCluster);
             jobDetails.setJobName(jobDescriptor.getJobName());
 
             log.info(jobDescriptor.toXML());
             data.append("jobDesc=").append(jobDescriptor.toXML());
             jobDetails.setJobDescription(jobDescriptor.toXML());
-            String jobID = cluster.submitBatchJob(jobDescriptor);
+            String jobID = remoteCluster.submitBatchJob(jobDescriptor);
             jobExecutionContext.setJobDetails(jobDetails);
             if (jobID == null) {
                 jobDetails.setJobID("none");
@@ -179,12 +179,12 @@ public class GSISSHProvider extends AbstractProvider {
         JobDetails jobDetails = jobExecutionContext.getJobDetails();
         String hostName = jobExecutionContext.getHostName();
         try {
-            Cluster cluster = null;
+            RemoteCluster remoteCluster = null;
             if (jobExecutionContext.getSecurityContext(hostName) == null) {
                 GFACGSISSHUtils.addSecurityContext(jobExecutionContext);
             }
-            cluster = ((GSISecurityContext) jobExecutionContext.getSecurityContext(hostName)).getPbsCluster();
-            if (cluster == null) {
+            remoteCluster = ((GSISecurityContext) jobExecutionContext.getSecurityContext(hostName)).getRemoteCluster();
+            if (remoteCluster == null) {
                 throw new GFacProviderException("Security context is not set properly");
             } else {
                 log.info("Successfully retrieved the Security Context");
@@ -196,7 +196,7 @@ public class GSISSHProvider extends AbstractProvider {
             }
             if (jobDetails.getJobID() != null) {
                 // if this operation success without any exceptions, we can assume cancel operation succeeded.
-                cluster.cancelJob(jobDetails.getJobID());
+                remoteCluster.cancelJob(jobDetails.getJobID());
             } else {
                 log.error("No Job Id is set, so cannot perform the cancel operation !!!");
                 return false;

http://git-wip-us.apache.org/repos/asf/airavata/blob/bad2c1c0/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/security/GSISecurityContext.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/security/GSISecurityContext.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/security/GSISecurityContext.java
index 7059475..dcdb3b0 100644
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/security/GSISecurityContext.java
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/security/GSISecurityContext.java
@@ -23,7 +23,7 @@ package org.apache.airavata.gfac.gsissh.security;
 import org.apache.airavata.credential.store.store.CredentialReader;
 import org.apache.airavata.gfac.core.AbstractSecurityContext;
 import org.apache.airavata.gfac.core.RequestData;
-import org.apache.airavata.gfac.core.cluster.Cluster;
+import org.apache.airavata.gfac.core.cluster.RemoteCluster;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -37,12 +37,12 @@ public class GSISecurityContext extends AbstractSecurityContext {
      * context name
      */
 
-    private Cluster pbsCluster = null;
+    private RemoteCluster remoteCluster = null;
 
 
-    public GSISecurityContext(CredentialReader credentialReader, RequestData requestData, Cluster pbsCluster) {
+    public GSISecurityContext(CredentialReader credentialReader, RequestData requestData, RemoteCluster remoteCluster) {
         super(credentialReader, requestData);
-        this.pbsCluster = pbsCluster;
+        this.remoteCluster = remoteCluster;
     }
 
 
@@ -51,17 +51,17 @@ public class GSISecurityContext extends AbstractSecurityContext {
     }
 
 
-    public GSISecurityContext(Cluster pbsCluster) {
-        this.setPbsCluster(pbsCluster);
+    public GSISecurityContext(RemoteCluster remoteCluster) {
+        this.setRemoteCluster(remoteCluster);
     }
 
 
 
-    public Cluster getPbsCluster() {
-        return pbsCluster;
+    public RemoteCluster getRemoteCluster() {
+        return remoteCluster;
     }
 
-    public void setPbsCluster(Cluster pbsCluster) {
-        this.pbsCluster = pbsCluster;
+    public void setRemoteCluster(RemoteCluster remoteCluster) {
+        this.remoteCluster = remoteCluster;
     }
 }

http://git-wip-us.apache.org/repos/asf/airavata/blob/bad2c1c0/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/util/GFACGSISSHUtils.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/util/GFACGSISSHUtils.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/util/GFACGSISSHUtils.java
index aa0b60e..6a4a23e 100644
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/util/GFACGSISSHUtils.java
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/gsissh/util/GFACGSISSHUtils.java
@@ -20,6 +20,8 @@
 */
 package org.apache.airavata.gfac.gsissh.util;
 
+import org.apache.airavata.gfac.core.cluster.RemoteCluster;
+import org.apache.airavata.gfac.gsi.ssh.impl.HPCRemoteCluster;
 import org.apache.airavata.registry.cpi.AppCatalog;
 import org.apache.airavata.common.exception.ApplicationSettingsException;
 import org.apache.airavata.common.utils.ServerSettings;
@@ -30,10 +32,8 @@ import org.apache.airavata.gfac.core.RequestData;
 import org.apache.airavata.gfac.core.context.JobExecutionContext;
 import org.apache.airavata.gfac.core.context.MessageContext;
 import org.apache.airavata.gfac.core.GFacUtils;
-import org.apache.airavata.gfac.gsi.ssh.impl.PBSCluster;
 import org.apache.airavata.gfac.gsissh.security.GSISecurityContext;
 import org.apache.airavata.gfac.gsissh.security.TokenizedMyProxyAuthInfo;
-import org.apache.airavata.gfac.core.cluster.Cluster;
 import org.apache.airavata.gfac.core.cluster.ServerInfo;
 import org.apache.airavata.gfac.core.JobDescriptor;
 import org.apache.airavata.gfac.core.JobManagerConfiguration;
@@ -64,7 +64,7 @@ public class GFACGSISSHUtils {
     public static final String LSF_JOB_MANAGER = "lsf";
 
     public static int maxClusterCount = 5;
-    public static Map<String, List<Cluster>> clusters = new HashMap<String, List<Cluster>>();
+    public static Map<String, List<RemoteCluster>> clusters = new HashMap<String, List<RemoteCluster>>();
 
     public static void addSecurityContext(JobExecutionContext jobExecutionContext) throws GFacException, ApplicationSettingsException {
         JobSubmissionInterface jobSubmissionInterface = jobExecutionContext.getPreferredJobSubmissionInterface();
@@ -79,7 +79,7 @@ public class GFACGSISSHUtils {
                 String credentialStoreToken = jobExecutionContext.getCredentialStoreToken(); // this is set by the framework
                 RequestData requestData = new RequestData(jobExecutionContext.getGatewayID());
                 requestData.setTokenId(credentialStoreToken);
-                PBSCluster pbsCluster = null;
+                HPCRemoteCluster HPCRemoteCluster = null;
                 GSISecurityContext context = null;
 
                 TokenizedMyProxyAuthInfo tokenizedMyProxyAuthInfo = new TokenizedMyProxyAuthInfo(requestData);
@@ -103,22 +103,22 @@ public class GFACGSISSHUtils {
                     } else if (clusters.containsKey(key)) {
                         int i = new Random().nextInt(Integer.MAX_VALUE) % maxClusterCount;
                         if (clusters.get(key).get(i).getSession().isConnected()) {
-                            pbsCluster = (PBSCluster) clusters.get(key).get(i);
+                            HPCRemoteCluster = (HPCRemoteCluster) clusters.get(key).get(i);
                         } else {
                             clusters.get(key).remove(i);
                             recreate = true;
                         }
                         if (!recreate) {
                             try {
-                                pbsCluster.listDirectory("~/"); // its hard to trust isConnected method, so we try to connect if it works we are good,else we recreate
+                                HPCRemoteCluster.listDirectory("~/"); // its hard to trust isConnected method, so we try to connect if it works we are good,else we recreate
                             } catch (Exception e) {
                                 clusters.get(key).remove(i);
                                 logger.info("Connection found the connection map is expired, so we create from the scratch");
                                 maxClusterCount++;
-                                recreate = true; // we make the pbsCluster to create again if there is any exception druing connection
+                                recreate = true; // we make the HPCRemoteCluster to create again if there is any exception druing connection
                             }
                             logger.info("Re-using the same connection used with the connection string:" + key);
-                            context = new GSISecurityContext(tokenizedMyProxyAuthInfo.getCredentialReader(), requestData, pbsCluster);
+                            context = new GSISecurityContext(tokenizedMyProxyAuthInfo.getCredentialReader(), requestData, HPCRemoteCluster);
                         }
                     } else {
                         recreate = true;
@@ -145,16 +145,16 @@ public class GFACGSISSHUtils {
                                 jConfig = CommonUtils.getLSFJobManager(installedParentPath);
                             }
                         }
-                        pbsCluster = new PBSCluster(serverInfo, tokenizedMyProxyAuthInfo, jConfig);
-                        context = new GSISecurityContext(tokenizedMyProxyAuthInfo.getCredentialReader(), requestData, pbsCluster);
-                        List<Cluster> pbsClusters = null;
+                        HPCRemoteCluster = new HPCRemoteCluster(serverInfo, tokenizedMyProxyAuthInfo, jConfig);
+                        context = new GSISecurityContext(tokenizedMyProxyAuthInfo.getCredentialReader(), requestData, HPCRemoteCluster);
+                        List<RemoteCluster> pbsRemoteClusters = null;
                         if (!(clusters.containsKey(key))) {
-                            pbsClusters = new ArrayList<Cluster>();
+                            pbsRemoteClusters = new ArrayList<RemoteCluster>();
                         } else {
-                            pbsClusters = clusters.get(key);
+                            pbsRemoteClusters = clusters.get(key);
                         }
-                        pbsClusters.add(pbsCluster);
-                        clusters.put(key, pbsClusters);
+                        pbsRemoteClusters.add(HPCRemoteCluster);
+                        clusters.put(key, pbsRemoteClusters);
                     }
                 }
 
@@ -165,7 +165,7 @@ public class GFACGSISSHUtils {
         }
     }
 
-    public static JobDescriptor createJobDescriptor(JobExecutionContext jobExecutionContext, Cluster cluster) {
+    public static JobDescriptor createJobDescriptor(JobExecutionContext jobExecutionContext, RemoteCluster remoteCluster) {
         JobDescriptor jobDescriptor = new JobDescriptor();
         TaskDetails taskData = jobExecutionContext.getTaskData();
         ResourceJobManager resourceJobManager = jobExecutionContext.getResourceJobManager();
@@ -274,10 +274,10 @@ public class GFACGSISSHUtils {
         }
         jobDescriptor.setInputValues(inputValues);
 
-        jobDescriptor.setUserName(((GSISSHAbstractCluster) cluster).getServerInfo().getUserName());
+        jobDescriptor.setUserName(((GSISSHAbstractCluster) remoteCluster).getServerInfo().getUserName());
         jobDescriptor.setShellName("/bin/bash");
         jobDescriptor.setAllEnvExport(true);
-        jobDescriptor.setOwner(((PBSCluster) cluster).getServerInfo().getUserName());
+        jobDescriptor.setOwner(((HPCRemoteCluster) remoteCluster).getServerInfo().getUserName());
 
         ComputationalResourceScheduling taskScheduling = taskData.getTaskScheduling();
         if (taskScheduling != null) {

http://git-wip-us.apache.org/repos/asf/airavata/blob/bad2c1c0/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/HPCMonitorID.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/HPCMonitorID.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/HPCMonitorID.java
index 22c3a14..69119d2 100644
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/HPCMonitorID.java
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/HPCMonitorID.java
@@ -63,13 +63,13 @@ public class HPCMonitorID extends MonitorID {
                 ServerInfo serverInfo = null;
                 if (securityContext != null) {
                     if (securityContext instanceof  GSISecurityContext){
-                        serverInfo = (((GSISecurityContext) securityContext).getPbsCluster()).getServerInfo();
+                        serverInfo = (((GSISecurityContext) securityContext).getRemoteCluster()).getServerInfo();
                         if (serverInfo.getUserName() != null) {
                             setUserName(serverInfo.getUserName());
                         }
                     }
                     if (securityContext instanceof SSHSecurityContext){
-                        serverInfo = (((SSHSecurityContext) securityContext).getPbsCluster()).getServerInfo();
+                        serverInfo = (((SSHSecurityContext) securityContext).getRemoteCluster()).getServerInfo();
                         if (serverInfo.getUserName() != null) {
                             setUserName(serverInfo.getUserName());
                         }

http://git-wip-us.apache.org/repos/asf/airavata/blob/bad2c1c0/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/ResourceConnection.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/ResourceConnection.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/ResourceConnection.java
index 7429381..f388de5 100644
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/ResourceConnection.java
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/monitor/impl/pull/qstat/ResourceConnection.java
@@ -24,7 +24,7 @@ import org.apache.airavata.gfac.core.GFacException;
 import org.apache.airavata.gfac.core.SecurityContext;
 import org.apache.airavata.gfac.core.authentication.AuthenticationInfo;
 import org.apache.airavata.gfac.core.monitor.MonitorID;
-import org.apache.airavata.gfac.gsi.ssh.impl.PBSCluster;
+import org.apache.airavata.gfac.gsi.ssh.impl.HPCRemoteCluster;
 import org.apache.airavata.gfac.gsissh.security.GSISecurityContext;
 import org.apache.airavata.gfac.monitor.HostMonitorData;
 import org.apache.airavata.gfac.ssh.security.SSHSecurityContext;
@@ -42,7 +42,7 @@ import java.util.TreeMap;
 public class ResourceConnection {
     private static final Logger log = LoggerFactory.getLogger(ResourceConnection.class);
 
-    private PBSCluster cluster;
+    private HPCRemoteCluster cluster;
 
     private AuthenticationInfo authenticationInfo;
 
@@ -54,11 +54,11 @@ public class ResourceConnection {
             if(securityContext != null) {
                 if (securityContext instanceof GSISecurityContext) {
                     GSISecurityContext gsiSecurityContext = (GSISecurityContext) securityContext;
-                    cluster = (PBSCluster) gsiSecurityContext.getPbsCluster();
+                    cluster = (HPCRemoteCluster) gsiSecurityContext.getRemoteCluster();
                 } else if (securityContext instanceof  SSHSecurityContext) {
                     SSHSecurityContext sshSecurityContext = (SSHSecurityContext)
                             securityContext;
-                    cluster = (PBSCluster) sshSecurityContext.getPbsCluster();
+                    cluster = (HPCRemoteCluster) sshSecurityContext.getRemoteCluster();
                 }
             }
             // we just use cluster configuration from the incoming request and construct a new cluster because for monitoring
@@ -74,11 +74,11 @@ public class ResourceConnection {
         try {
             GSISecurityContext securityContext = (GSISecurityContext)
                     monitorID.getJobExecutionContext().getSecurityContext(monitorID.getComputeResourceDescription().getHostName());
-            cluster = (PBSCluster) securityContext.getPbsCluster();
+            cluster = (HPCRemoteCluster) securityContext.getRemoteCluster();
 
             // we just use cluster configuration from the incoming request and construct a new cluster because for monitoring
             // we are using our own credentials and not using one users account to do everything.
-            cluster = new PBSCluster(cluster.getServerInfo(), authenticationInfo, cluster.getJobManagerConfiguration());
+            cluster = new HPCRemoteCluster(cluster.getServerInfo(), authenticationInfo, cluster.getJobManagerConfiguration());
         } catch (GFacException e) {
             log.error("Error reading data from job ExecutionContext");
         }
@@ -140,11 +140,11 @@ public class ResourceConnection {
         return JobState.UNKNOWN;
     }
 
-    public PBSCluster getCluster() {
+    public HPCRemoteCluster getCluster() {
         return cluster;
     }
 
-    public void setCluster(PBSCluster cluster) {
+    public void setCluster(HPCRemoteCluster cluster) {
         this.cluster = cluster;
     }
 

http://git-wip-us.apache.org/repos/asf/airavata/blob/bad2c1c0/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPInputHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPInputHandler.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPInputHandler.java
index d19ccb5..ec350a5 100644
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPInputHandler.java
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPInputHandler.java
@@ -22,7 +22,7 @@ package org.apache.airavata.gfac.ssh.handler;
 
 import org.apache.airavata.gfac.core.GFacException;
 import org.apache.airavata.gfac.core.SSHApiException;
-import org.apache.airavata.gfac.core.cluster.Cluster;
+import org.apache.airavata.gfac.core.cluster.RemoteCluster;
 import org.apache.airavata.gfac.core.context.JobExecutionContext;
 import org.apache.airavata.gfac.core.context.MessageContext;
 import org.apache.airavata.gfac.core.handler.AbstractHandler;
@@ -93,7 +93,7 @@ public class AdvancedSCPInputHandler extends AbstractHandler {
         List<String> oldFiles = new ArrayList<String>();
         MessageContext inputNew = new MessageContext();
         StringBuffer data = new StringBuffer("|");
-        Cluster pbsCluster = null;
+        RemoteCluster remoteCluster = null;
 
         try {
             String pluginData = GFacUtils.getHandlerData(jobExecutionContext, this.getClass().getName());
@@ -146,12 +146,12 @@ public class AdvancedSCPInputHandler extends AbstractHandler {
                         URL file = new URL(paramValue);
                         String key = file.getUserInfo() + file.getHost() + DEFAULT_SSH_PORT;
                         GFACSSHUtils.prepareSecurityContext(jobExecutionContext, authenticationInfo, file.getUserInfo(), file.getHost(), DEFAULT_SSH_PORT);
-                        pbsCluster = ((SSHSecurityContext)jobExecutionContext.getSecurityContext(key)).getPbsCluster();
+                        remoteCluster = ((SSHSecurityContext)jobExecutionContext.getSecurityContext(key)).getRemoteCluster();
                         paramValue = file.getPath();
                     } catch (MalformedURLException e) {
                         String key = this.userName + this.hostName + DEFAULT_SSH_PORT;
                         GFACSSHUtils.prepareSecurityContext(jobExecutionContext, authenticationInfo, this.userName, this.hostName, DEFAULT_SSH_PORT);
-                        pbsCluster = ((SSHSecurityContext)jobExecutionContext.getSecurityContext(key)).getPbsCluster();
+                        remoteCluster = ((SSHSecurityContext)jobExecutionContext.getSecurityContext(key)).getRemoteCluster();
                         log.error(e.getLocalizedMessage(), e);
                     }
 
@@ -160,7 +160,7 @@ public class AdvancedSCPInputHandler extends AbstractHandler {
                         inputParamType.setValue(oldFiles.get(index));
                         data.append(oldFiles.get(index++)).append(","); // we get already transfered file and increment the index
                     } else {
-                        String stageInputFile = stageInputFiles(pbsCluster, paramValue, parentPath);
+                        String stageInputFile = stageInputFiles(remoteCluster, paramValue, parentPath);
                         inputParamType.setValue(stageInputFile);
                         StringBuffer temp = new StringBuffer(data.append(stageInputFile).append(",").toString());
                         status.setTransferState(TransferState.UPLOAD);
@@ -189,7 +189,7 @@ public class AdvancedSCPInputHandler extends AbstractHandler {
 //                            newFiles.add(oldFiles.get(index));
 //                            data.append(oldFiles.get(index++)).append(",");
 //                        } else {
-//                            String stageInputFiles = stageInputFiles(pbsCluster, paramValueEach, parentPath);
+//                            String stageInputFiles = stageInputFiles(remoteCluster, paramValueEach, parentPath);
 //                            StringBuffer temp = new StringBuffer(data.append(stageInputFiles).append(",").toString());
 //                            GFacUtils.savePluginData(jobExecutionContext, temp.insert(0, ++index), this.getClass().getName());
 //                            newFiles.add(stageInputFiles);
@@ -217,9 +217,9 @@ public class AdvancedSCPInputHandler extends AbstractHandler {
         this.invoke(jobExecutionContext);
     }
 
-    private String stageInputFiles(Cluster cluster, String paramValue, String parentPath) throws GFacException {
+    private String stageInputFiles(RemoteCluster remoteCluster, String paramValue, String parentPath) throws GFacException {
         try {
-            cluster.scpFrom(paramValue, parentPath);
+            remoteCluster.scpFrom(paramValue, parentPath);
             return "file://" + parentPath + File.separator + (new File(paramValue)).getName();
         } catch (SSHApiException e) {
             log.error("Error tranfering remote file to local file, remote path: " + paramValue);

http://git-wip-us.apache.org/repos/asf/airavata/blob/bad2c1c0/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPOutputHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPOutputHandler.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPOutputHandler.java
index faef464..5dc2224 100644
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPOutputHandler.java
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/AdvancedSCPOutputHandler.java
@@ -23,7 +23,7 @@ package org.apache.airavata.gfac.ssh.handler;
 import org.apache.airavata.common.exception.ApplicationSettingsException;
 import org.apache.airavata.gfac.core.GFacException;
 import org.apache.airavata.gfac.core.SSHApiException;
-import org.apache.airavata.gfac.core.cluster.Cluster;
+import org.apache.airavata.gfac.core.cluster.RemoteCluster;
 import org.apache.airavata.gfac.core.context.JobExecutionContext;
 import org.apache.airavata.gfac.core.handler.AbstractHandler;
 import org.apache.airavata.gfac.core.handler.GFacHandlerException;
@@ -94,7 +94,7 @@ public class AdvancedSCPOutputHandler extends AbstractHandler {
 
     @Override
     public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-    	Cluster pbsCluster = null;
+    	RemoteCluster remoteCluster = null;
         AuthenticationInfo authenticationInfo = null;
         if (password != null) {
             authenticationInfo = new DefaultPasswordAuthenticationInfo(this.password);
@@ -134,14 +134,14 @@ public class AdvancedSCPOutputHandler extends AbstractHandler {
                 }
             }
             String key = GFACSSHUtils.prepareSecurityContext(jobExecutionContext, authenticationInfo, this.userName, this.hostName, DEFAULT_SSH_PORT);
-            pbsCluster = ((SSHSecurityContext)jobExecutionContext.getSecurityContext(key)).getPbsCluster();
+            remoteCluster = ((SSHSecurityContext)jobExecutionContext.getSecurityContext(key)).getRemoteCluster();
             if(jobExecutionContext.getTaskData().getAdvancedOutputDataHandling() != null && !jobExecutionContext.getTaskData().getAdvancedOutputDataHandling().isPersistOutputData()){
             outputPath = outputPath + File.separator + jobExecutionContext.getExperimentID() + "-" + jobExecutionContext.getTaskData().getTaskID()
                     + File.separator;
-                pbsCluster.makeDirectory(outputPath);
+                remoteCluster.makeDirectory(outputPath);
             }
-            pbsCluster.scpTo(outputPath, standardError);
-            pbsCluster.scpTo(outputPath, standardOutput);
+            remoteCluster.scpTo(outputPath, standardError);
+            remoteCluster.scpTo(outputPath, standardOutput);
             List<OutputDataObjectType> outputArray = new ArrayList<OutputDataObjectType>();
             Map<String, Object> output = jobExecutionContext.getOutMessageContext().getParameters();
             Set<String> keys = output.keySet();
@@ -157,7 +157,7 @@ public class AdvancedSCPOutputHandler extends AbstractHandler {
                         GFacUtils.saveErrorDetails(jobExecutionContext, "Empty Output returned from the application", CorrectiveAction.CONTACT_SUPPORT, ErrorCategory.AIRAVATA_INTERNAL_ERROR);
                 		throw new GFacHandlerException("Empty Output returned from the application.." );
                 	}
-                	pbsCluster.scpTo(outputPath, downloadFile);
+                	remoteCluster.scpTo(outputPath, downloadFile);
                     String fileName = downloadFile.substring(downloadFile.lastIndexOf(File.separatorChar)+1, downloadFile.length());
                     OutputDataObjectType dataObjectType = new OutputDataObjectType();
                     dataObjectType.setValue(outputPath + File.separatorChar + fileName);
@@ -169,7 +169,7 @@ public class AdvancedSCPOutputHandler extends AbstractHandler {
                     dataObjectType.setSearchQuery(outputDataObjectType.getSearchQuery());
                     outputArray.add(dataObjectType);
                 }else if (outputDataObjectType.getType() == DataType.STDOUT) {
-                    pbsCluster.scpTo(outputPath, standardOutput);
+                    remoteCluster.scpTo(outputPath, standardOutput);
                     String fileName = standardOutput.substring(standardOutput.lastIndexOf(File.separatorChar)+1, standardOutput.length());
                     OutputDataObjectType dataObjectType = new OutputDataObjectType();
                     dataObjectType.setValue(outputPath + File.separatorChar + fileName);
@@ -181,7 +181,7 @@ public class AdvancedSCPOutputHandler extends AbstractHandler {
                     dataObjectType.setSearchQuery(outputDataObjectType.getSearchQuery());
                     outputArray.add(dataObjectType);
                 }else if (outputDataObjectType.getType() == DataType.STDERR) {
-                    pbsCluster.scpTo(outputPath, standardError);
+                    remoteCluster.scpTo(outputPath, standardError);
                     String fileName = standardError.substring(standardError.lastIndexOf(File.separatorChar)+1, standardError.length());
                     OutputDataObjectType dataObjectType = new OutputDataObjectType();
                     dataObjectType.setValue(outputPath + File.separatorChar + fileName);

http://git-wip-us.apache.org/repos/asf/airavata/blob/bad2c1c0/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/NewSSHOutputHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/NewSSHOutputHandler.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/NewSSHOutputHandler.java
index 6c2a198..0189ca3 100644
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/NewSSHOutputHandler.java
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/NewSSHOutputHandler.java
@@ -6,7 +6,7 @@ import java.util.List;
 import java.util.Properties;
 
 import org.apache.airavata.gfac.core.GFacException;
-import org.apache.airavata.gfac.core.cluster.Cluster;
+import org.apache.airavata.gfac.core.cluster.RemoteCluster;
 import org.apache.airavata.gfac.core.context.JobExecutionContext;
 import org.apache.airavata.gfac.core.handler.AbstractHandler;
 import org.apache.airavata.gfac.core.handler.GFacHandlerException;
@@ -29,14 +29,14 @@ public class NewSSHOutputHandler extends AbstractHandler{
 
 	    public void invoke(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
 	        String hostAddress = jobExecutionContext.getHostName();
-	      	Cluster cluster = null;
+	      	RemoteCluster remoteCluster = null;
 	      	// Security Context and connection
 	        try {
 	            if (jobExecutionContext.getSecurityContext(hostAddress) == null) {
 	                GFACSSHUtils.addSecurityContext(jobExecutionContext);
 	            }
-	            cluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getPbsCluster();
-	            if (cluster == null) {
+	            remoteCluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getRemoteCluster();
+	            if (remoteCluster == null) {
 	                throw new GFacProviderException("Security context is not set properly");
 	            } else {
 	                log.info("Successfully retrieved the Security Context");
@@ -54,7 +54,7 @@ public class NewSSHOutputHandler extends AbstractHandler{
 	        }
 
 	        super.invoke(jobExecutionContext);
-	        List<OutputDataObjectType> outputArray =  HandleOutputs.handleOutputs(jobExecutionContext, cluster);
+	        List<OutputDataObjectType> outputArray =  HandleOutputs.handleOutputs(jobExecutionContext, remoteCluster);
 	        try {
 				experimentCatalog.add(ExpCatChildDataType.EXPERIMENT_OUTPUT, outputArray, jobExecutionContext.getExperimentID());
 			} catch (RegistryException e) {

http://git-wip-us.apache.org/repos/asf/airavata/blob/bad2c1c0/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHDirectorySetupHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHDirectorySetupHandler.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHDirectorySetupHandler.java
index e79903f..3b49f63 100644
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHDirectorySetupHandler.java
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHDirectorySetupHandler.java
@@ -21,7 +21,7 @@
 package org.apache.airavata.gfac.ssh.handler;
 
 import org.apache.airavata.gfac.core.GFacException;
-import org.apache.airavata.gfac.core.cluster.Cluster;
+import org.apache.airavata.gfac.core.cluster.RemoteCluster;
 import org.apache.airavata.gfac.core.context.JobExecutionContext;
 import org.apache.airavata.gfac.core.handler.AbstractHandler;
 import org.apache.airavata.gfac.core.handler.GFacHandlerException;
@@ -70,21 +70,21 @@ public class SSHDirectorySetupHandler extends AbstractHandler {
     }
 
     private void makeDirectory(JobExecutionContext jobExecutionContext) throws GFacHandlerException {
-		Cluster cluster = null;
+		RemoteCluster remoteCluster = null;
 		try{
             String hostAddress = jobExecutionContext.getHostName();
-            cluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getPbsCluster();
-        if (cluster == null) {
+            remoteCluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getRemoteCluster();
+        if (remoteCluster == null) {
             throw new GFacHandlerException("Security context is not set properly");
         } else {
             log.info("Successfully retrieved the Security Context");
         }
             String workingDirectory = jobExecutionContext.getWorkingDir();
-            cluster.makeDirectory(workingDirectory);
+            remoteCluster.makeDirectory(workingDirectory);
             if(!jobExecutionContext.getInputDir().equals(workingDirectory))
-            	cluster.makeDirectory(jobExecutionContext.getInputDir());
+            	remoteCluster.makeDirectory(jobExecutionContext.getInputDir());
             if(!jobExecutionContext.getOutputDir().equals(workingDirectory))
-            	cluster.makeDirectory(jobExecutionContext.getOutputDir());
+            	remoteCluster.makeDirectory(jobExecutionContext.getOutputDir());
             
             DataTransferDetails detail = new DataTransferDetails();
             TransferStatus status = new TransferStatus();

http://git-wip-us.apache.org/repos/asf/airavata/blob/bad2c1c0/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHInputHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHInputHandler.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHInputHandler.java
index f907cc6..53bc2ca 100644
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHInputHandler.java
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHInputHandler.java
@@ -22,7 +22,7 @@ package org.apache.airavata.gfac.ssh.handler;
 
 import org.apache.airavata.common.exception.ApplicationSettingsException;
 import org.apache.airavata.gfac.core.GFacException;
-import org.apache.airavata.gfac.core.cluster.Cluster;
+import org.apache.airavata.gfac.core.cluster.RemoteCluster;
 import org.apache.airavata.gfac.core.context.JobExecutionContext;
 import org.apache.airavata.gfac.core.context.MessageContext;
 import org.apache.airavata.gfac.core.handler.AbstractHandler;
@@ -60,7 +60,7 @@ public class SSHInputHandler extends AbstractHandler {
         List<String> oldFiles = new ArrayList<String>();
         StringBuffer data = new StringBuffer("|");
         MessageContext inputNew = new MessageContext();
-        Cluster cluster = null;
+        RemoteCluster remoteCluster = null;
         
         try {
             String hostAddress = jobExecutionContext.getHostName();
@@ -80,8 +80,8 @@ public class SSHInputHandler extends AbstractHandler {
                 }
             }
 
-            cluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getPbsCluster();
-            if (cluster == null) {
+            remoteCluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getRemoteCluster();
+            if (remoteCluster == null) {
                 throw new GFacException("Security context is not set properly");
             } else {
                 log.info("Successfully retrieved the Security Context");
@@ -102,7 +102,7 @@ public class SSHInputHandler extends AbstractHandler {
                         inputParamType.setValue(oldFiles.get(index));
                         data.append(oldFiles.get(index++)).append(","); // we get already transfered file and increment the index
                     } else {
-                        String stageInputFile = stageInputFiles(cluster, jobExecutionContext, paramValue);
+                        String stageInputFile = stageInputFiles(remoteCluster, jobExecutionContext, paramValue);
                         inputParamType.setValue(stageInputFile);
                         StringBuffer temp = new StringBuffer(data.append(stageInputFile).append(",").toString());
                         status.setTransferState(TransferState.UPLOAD);
@@ -122,7 +122,7 @@ public class SSHInputHandler extends AbstractHandler {
 //                	List<String> split = Arrays.asList(StringUtil.getElementsFromString(paramValue));
 //                    List<String> newFiles = new ArrayList<String>();
 //                    for (String paramValueEach : split) {
-//                        String stageInputFiles = stageInputFiles(cluster,jobExecutionContext, paramValueEach);
+//                        String stageInputFiles = stageInputFiles(remoteCluster,jobExecutionContext, paramValueEach);
 //                        status.setTransferState(TransferState.UPLOAD);
 //                        detail.setTransferStatus(status);
 //                        detail.setTransferDescription("Input Data Staged: " + stageInputFiles);
@@ -158,14 +158,14 @@ public class SSHInputHandler extends AbstractHandler {
         // TODO: Auto generated method body.
     }
 
-    private static String stageInputFiles(Cluster cluster, JobExecutionContext jobExecutionContext, String paramValue) throws IOException, GFacException {
+    private static String stageInputFiles(RemoteCluster remoteCluster, JobExecutionContext jobExecutionContext, String paramValue) throws IOException, GFacException {
         int i = paramValue.lastIndexOf(File.separator);
         String substring = paramValue.substring(i + 1);
         try {
             String targetFile = jobExecutionContext.getInputDir() + File.separator + substring;
             if(paramValue.startsWith("scp:")){
             	paramValue = paramValue.substring(paramValue.indexOf(":") + 1, paramValue.length());
-            	cluster.scpThirdParty(paramValue, targetFile);
+            	remoteCluster.scpThirdParty(paramValue, targetFile);
             }else{
             if(paramValue.startsWith("file")){
                 paramValue = paramValue.substring(paramValue.indexOf(":") + 1, paramValue.length());
@@ -174,7 +174,7 @@ public class SSHInputHandler extends AbstractHandler {
             int j = 1;
             while(!success){
             try {
-				cluster.scpTo(targetFile, paramValue);
+				remoteCluster.scpTo(targetFile, paramValue);
 				success = true;
 			} catch (Exception e) {
 				log.info(e.getLocalizedMessage());

http://git-wip-us.apache.org/repos/asf/airavata/blob/bad2c1c0/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHOutputHandler.java
----------------------------------------------------------------------
diff --git a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHOutputHandler.java b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHOutputHandler.java
index 3f9edae..a131557 100644
--- a/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHOutputHandler.java
+++ b/modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/ssh/handler/SSHOutputHandler.java
@@ -23,7 +23,7 @@ package org.apache.airavata.gfac.ssh.handler;
 import org.apache.airavata.common.utils.ServerSettings;
 import org.apache.airavata.gfac.core.Constants;
 import org.apache.airavata.gfac.core.GFacException;
-import org.apache.airavata.gfac.core.cluster.Cluster;
+import org.apache.airavata.gfac.core.cluster.RemoteCluster;
 import org.apache.airavata.gfac.core.context.JobExecutionContext;
 import org.apache.airavata.gfac.core.handler.AbstractHandler;
 import org.apache.airavata.gfac.core.handler.GFacHandlerException;
@@ -79,10 +79,10 @@ public class SSHOutputHandler extends AbstractHandler {
         detail.setTransferDescription("Output data staging");
         TransferStatus status = new TransferStatus();
 
-        Cluster cluster = null;
+        RemoteCluster remoteCluster = null;
         try {
-             cluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getPbsCluster();
-            if (cluster == null) {
+             remoteCluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(hostAddress)).getRemoteCluster();
+            if (remoteCluster == null) {
                 throw new GFacProviderException("Security context is not set properly");
             } else {
                 log.info("Successfully retrieved the Security Context");
@@ -108,12 +108,12 @@ public class SSHOutputHandler extends AbstractHandler {
 
             localStdOutFile = new File(outputDataDir + File.separator + timeStampedExperimentID + "stdout");
             localStdErrFile = new File(outputDataDir + File.separator + timeStampedExperimentID + "stderr");
-//            cluster.makeDirectory(outputDataDir);
+//            remoteCluster.makeDirectory(outputDataDir);
             int i = 0;
             String stdOutStr = "";
             while (stdOutStr.isEmpty()) {
                 try {
-                    cluster.scpFrom(jobExecutionContext.getStandardOutput(), localStdOutFile.getAbsolutePath());
+                    remoteCluster.scpFrom(jobExecutionContext.getStandardOutput(), localStdOutFile.getAbsolutePath());
                     stdOutStr = GFacUtils.readFileToString(localStdOutFile.getAbsolutePath());
                 } catch (Exception e) {
                     log.error(e.getLocalizedMessage());
@@ -123,7 +123,7 @@ public class SSHOutputHandler extends AbstractHandler {
                 if (i == 3) break;
             }
             Thread.sleep(1000);
-            cluster.scpFrom(jobExecutionContext.getStandardError(), localStdErrFile.getAbsolutePath());
+            remoteCluster.scpFrom(jobExecutionContext.getStandardError(), localStdErrFile.getAbsolutePath());
             Thread.sleep(1000);
 
             String stdErrStr = GFacUtils.readFileToString(localStdErrFile.getAbsolutePath());
@@ -147,7 +147,7 @@ public class SSHOutputHandler extends AbstractHandler {
                     List<String> outputList = null;
                     int retry = 3;
                     while (retry > 0) {
-                        outputList = cluster.listDirectory(jobExecutionContext.getOutputDir());
+                        outputList = remoteCluster.listDirectory(jobExecutionContext.getOutputDir());
                         if (outputList.size() > 0) {
                             break;
                         }
@@ -163,7 +163,7 @@ public class SSHOutputHandler extends AbstractHandler {
                             OutputDataObjectType actualParameter1 = (OutputDataObjectType) output.get(key);
                             if (DataType.URI == actualParameter1.getType()) {
                                 String downloadFile = actualParameter1.getValue();
-                                cluster.scpFrom(downloadFile, outputDataDir);
+                                remoteCluster.scpFrom(downloadFile, outputDataDir);
                                 String fileName = downloadFile.substring(downloadFile.lastIndexOf(File.separatorChar) + 1, downloadFile.length());
                                 String localFile = outputDataDir + File.separator + fileName;
                                 jobExecutionContext.addOutputFile(localFile);
@@ -198,7 +198,7 @@ public class SSHOutputHandler extends AbstractHandler {
                         break;
                     } else if (outputList.size() == 1) {//FIXME: Ultrascan case
                         String valueList = outputList.get(0);
-                        cluster.scpFrom(jobExecutionContext.getOutputDir() + File.separator + valueList, outputDataDir);
+                        remoteCluster.scpFrom(jobExecutionContext.getOutputDir() + File.separator + valueList, outputDataDir);
                         String outputPath = outputDataDir + File.separator + valueList;
                         jobExecutionContext.addOutputFile(outputPath);
                         actualParameter.setValue(outputPath);